diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 047fda64c99b..248374d0fac6 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python-mono-repo:latest - digest: sha256:db9568404b062e3bcf060fb7db09ca9aa51275d187b0f4ccd5f0071e82e4521a + digest: sha256:16341f2811634da310cbb106ce4af18ed6394c964f4f2f1bae4dd321566ddaf5 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 678e2c6a3724..283302455f2d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -45,7 +45,7 @@ jobs: - name: Set up Python 3.10 uses: actions/setup-python@v5 with: - python-version: 3.10.0 + python-version: "3.10" - name: Install script dependencies run: pip3 install -r requirements.txt working-directory: ./scripts diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 977202e4790f..d0768ddc43ad 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -13,7 +13,10 @@ permissions: jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: python: ['3.7', '3.8', '3.9', '3.10', "3.11", "3.12", "3.13"] @@ -46,11 +49,13 @@ jobs: with: name: coverage-artifact-${{ '{{' }} matrix.python {{ '}}' }} path: .coverage-${{ matrix.python }} - prerelease: + unit-extended: + name: ${{ matrix.option }} runs-on: ubuntu-latest strategy: matrix: - python: ['3.13'] + python: ["3.13"] + option: ["prerelease", "core_deps_from_source"] steps: - name: Checkout uses: actions/checkout@v4 @@ -67,10 +72,10 @@ jobs: run: | python -m pip install --upgrade setuptools pip wheel python -m pip install nox - - name: Run prerelease tests + - name: Run ${{ matrix.option }} tests env: BUILD_TYPE: presubmit - TEST_TYPE: prerelease + TEST_TYPE: ${{ matrix.option }} PY_VERSION: ${{ matrix.python }} run: | ci/run_conditional_tests.sh diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 688814801711..7ff7b6e507d8 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -81,4 +81,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3.10 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt + CMD ["python3.10"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000000..586bd07037ae --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1,2 @@ +nox +gcp-docuploader diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000000..a9360a25b707 --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,297 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 + # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 + # via + # gcp-docuploader + # nox +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 + # via virtualenv +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 + # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 + # via -r requirements.in +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f + # via nox +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb + # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 + # via nox +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 + # via nox diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 0b68b0679bd5..c4ad8794d97f 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -35,10 +35,6 @@ RETVAL=0 export PROJECT_ROOT=$(realpath $(dirname "${BASH_SOURCE[0]}")/..) -# Install nox -python3.10 -m pip install --require-hashes -r "$PROJECT_ROOT/.kokoro/requirements.txt" -python3.10 -m nox --version - # A file for publishing docs publish_docs_script="${PROJECT_ROOT}/.kokoro/publish-docs-single.sh" diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index d9a6177e1dac..000000000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to non-zero if any command fails, -# or zero if all commands in the pipeline exit successfully. -set -eo pipefail - -python3 -m pip install --require-hashes -r github/google-cloud-python/.kokoro/requirements-aoss.txt -python3 -m keyring --list-backends - -echo "[distutils] -index-servers = - aoss-1p-python -[aoss-1p-python] -repository: https://us-python.pkg.dev/cloud-aoss-1p/cloud-aoss-1p-python/" >> $HOME/.pypirc - -echo "[install] -index-url = https://us-python.pkg.dev/cloud-aoss-1p/cloud-aoss-1p-python/simple/ -trusted-host = us-python.pkg.dev" >> $HOME/pip.conf - -export PIP_CONFIG_FILE=$HOME/pip.conf - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/google-cloud-python/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -subdirs=( - packages -) -RETVAL=0 - -export PROJECT_ROOT=$(realpath $(dirname "${BASH_SOURCE[0]}")/..) - -cd "$PROJECT_ROOT" - -pwd - -git config --global --add safe.directory "$PROJECT_ROOT" - -# In order to determine which packages to publish, we need -# to know the difference in */gapic_version.py from the previous -# commit (HEAD~1). This assumes we use squash commit when merging PRs. -git fetch origin main --deepen=1 - -# A file for publishing packages to PyPI -publish_script="${PROJECT_ROOT}/.kokoro/release-single.sh" - -for subdir in ${subdirs[@]}; do - for d in `ls -d ${subdir}/*/`; do - should_publish=false - echo "checking changes with 'git diff HEAD~.. ${d}/**/gapic_version.py'" - set +e - changed=$(git diff "HEAD~.." ${d}/**/gapic_version.py | wc -l) - set -e - if [[ "${changed}" -eq 0 ]]; then - echo "no change detected in ${d}, skipping" - else - echo "change detected in ${d}" - should_publish=true - fi - if [ "${should_publish}" = true ]; then - echo "publishing package in ${d}" - pushd ${d} - # Temporarily allow failure. - set +e - ${publish_script} - ret=$? - set -e - if [ ${ret} -ne 0 ]; then - RETVAL=${ret} - fi - popd - fi - done -done - -exit ${RETVAL} diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index 6fa7089b57ce..000000000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,43 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "google-cloud-python/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/google-cloud-python/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-3" - } - } -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/google-cloud-python/**/*.tar.gz" - strip_prefix: "github/google-cloud-python" - } -} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 18a4c35325b8..000000000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto diff --git a/.kokoro/requirements-aoss.in b/.kokoro/requirements-aoss.in deleted file mode 100644 index bd6769b591d5..000000000000 --- a/.kokoro/requirements-aoss.in +++ /dev/null @@ -1,2 +0,0 @@ -keyring -keyrings.google-artifactregistry-auth diff --git a/.kokoro/requirements-aoss.txt b/.kokoro/requirements-aoss.txt deleted file mode 100644 index d45d18ec8b73..000000000000 --- a/.kokoro/requirements-aoss.txt +++ /dev/null @@ -1,307 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements-aoss.in -# -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography -charset-normalizer==3.4.0 \ - --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ - --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ - --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ - --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ - --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ - --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ - --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ - --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ - --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ - --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ - --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ - --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ - --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ - --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ - --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ - --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ - --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ - --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ - --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ - --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ - --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ - --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ - --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ - --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ - --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ - --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ - --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ - --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ - --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ - --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ - --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ - --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ - --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ - --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ - --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ - --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ - --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ - --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ - --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ - --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ - --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ - --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ - --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ - --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ - --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ - --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ - --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ - --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ - --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ - --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ - --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ - --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ - --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ - --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ - --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ - --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ - --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ - --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ - --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ - --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ - --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ - --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ - --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ - --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ - --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ - --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ - --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ - --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ - --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ - --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ - --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ - --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ - --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ - --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ - --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ - --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ - --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ - --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ - --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ - --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ - --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ - --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ - --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ - --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ - --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ - --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ - --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ - --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ - --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ - --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ - --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ - --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ - --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ - --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ - --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ - --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ - --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ - --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ - --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ - --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ - --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ - --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ - --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ - --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ - --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 - # via requests -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 - # via secretstorage -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via keyrings-google-artifactregistry-auth -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -importlib-metadata==8.5.0 \ - --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ - --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 - # via keyring -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 - # via keyring -jaraco-functools==4.1.0 \ - --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ - --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -keyring==25.4.1 \ - --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ - --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b - # via - # -r requirements-aoss.in - # keyrings-google-artifactregistry-auth -keyrings-google-artifactregistry-auth==1.1.2 \ - --hash=sha256:bd6abb72740d2dfeb4a5c03c3b105c6f7dba169caa29dee3959694f1f02c77de \ - --hash=sha256:e3f18b50fa945c786593014dc225810d191671d4f5f8e12d9259e39bad3605a3 - # via -r requirements-aoss.in -more-itertools==10.5.0 \ - --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ - --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 - # via - # jaraco-classes - # jaraco-functools -pluggy==1.5.0 \ - --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ - --hash=sha256:d5783d8a2575b1d2f22c03e92b0a2e18892b45eadc5a8e41625767aa5e6bcc52 - # via keyrings-google-artifactregistry-auth -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:d47c5472466f7131bb482b7dd186918f73a2e087d05d2a50d88957a8498377e5 - # via cffi -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via keyrings-google-artifactregistry-auth -rsa==4.9 \ - --hash=sha256:bbe333816d27ed8355b433f85795665a1e9ecec1b7a022906ec6ab9d60bfcbef \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:7ef3a3c14fd6975684be05cf30cb13cc17936814b3bd02664b6ab8378aaf0c5b - # via keyring -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via requests -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 - # via importlib-metadata diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index dbfece72c44e..000000000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,10 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index a4192b1425ee..000000000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,508 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 - # via nox -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.1.1 \ - --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ - --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e - # via -r requirements.in -google-api-core==2.21.0 \ - --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ - --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.18.2 \ - --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ - --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -importlib-metadata==8.5.0 \ - --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ - --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 - # via keyring -jaraco-functools==4.1.0 \ - --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ - --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.4.1 \ - --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ - --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.5.0 \ - --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ - --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.9.2 \ - --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ - --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # -r requirements.in - # rich -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via - # requests - # twine -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via -r requirements.in -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 - # via -r requirements.in diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8c18487fbed5..fa9947f7ddc9 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,197 +1,202 @@ { - "packages/google-ads-admanager": "0.2.3", - "packages/google-ads-marketingplatform-admin": "0.1.3", - "packages/google-ai-generativelanguage": "0.6.14", - "packages/google-analytics-admin": "0.23.3", - "packages/google-analytics-data": "0.18.16", - "packages/google-apps-card": "0.1.5", - "packages/google-apps-chat": "0.2.0", - "packages/google-apps-events-subscriptions": "0.1.5", - "packages/google-apps-meet": "0.1.11", - "packages/google-apps-script-type": "0.3.11", - "packages/google-area120-tables": "0.11.14", - "packages/google-cloud-access-approval": "1.15.0", - "packages/google-cloud-advisorynotifications": "0.3.13", - "packages/google-cloud-alloydb": "0.4.1", - "packages/google-cloud-alloydb-connectors": "0.1.7", - "packages/google-cloud-api-gateway": "1.11.0", - "packages/google-cloud-api-keys": "0.5.14", - "packages/google-cloud-apigee-connect": "1.11.0", - "packages/google-cloud-apigee-registry": "0.6.14", - "packages/google-cloud-apihub": "0.2.3", - "packages/google-cloud-appengine-admin": "1.13.0", - "packages/google-cloud-appengine-logging": "1.5.0", - "packages/google-cloud-apphub": "0.1.5", - "packages/google-cloud-artifact-registry": "1.14.0", - "packages/google-cloud-asset": "3.28.0", - "packages/google-cloud-assured-workloads": "1.14.0", - "packages/google-cloud-automl": "2.15.0", - "packages/google-cloud-backupdr": "0.1.7", - "packages/google-cloud-bare-metal-solution": "1.9.0", - "packages/google-cloud-batch": "0.17.32", - "packages/google-cloud-beyondcorp-appconnections": "0.4.14", - "packages/google-cloud-beyondcorp-appconnectors": "0.4.14", - "packages/google-cloud-beyondcorp-appgateways": "0.4.14", - "packages/google-cloud-beyondcorp-clientconnectorservices": "0.4.14", - "packages/google-cloud-beyondcorp-clientgateways": "0.4.13", - "packages/google-cloud-bigquery-analyticshub": "0.4.14", - "packages/google-cloud-bigquery-biglake": "0.4.12", - "packages/google-cloud-bigquery-connection": "1.17.0", - "packages/google-cloud-bigquery-data-exchange": "0.5.16", - "packages/google-cloud-bigquery-datapolicies": "0.6.11", - "packages/google-cloud-bigquery-datatransfer": "3.18.0", - "packages/google-cloud-bigquery-logging": "1.5.0", - "packages/google-cloud-bigquery-migration": "0.11.12", - "packages/google-cloud-bigquery-reservation": "1.15.0", - "packages/google-cloud-billing": "1.15.0", - "packages/google-cloud-billing-budgets": "1.16.0", - "packages/google-cloud-binary-authorization": "1.12.0", - "packages/google-cloud-build": "3.28.0", - "packages/google-cloud-certificate-manager": "1.9.0", - "packages/google-cloud-channel": "1.21.0", - "packages/google-cloud-cloudcontrolspartner": "0.2.3", - "packages/google-cloud-commerce-consumer-procurement": "0.1.11", - "packages/google-cloud-common": "1.4.0", - "packages/google-cloud-compute": "1.23.0", - "packages/google-cloud-confidentialcomputing": "0.4.14", - "packages/google-cloud-config": "0.1.14", - "packages/google-cloud-contact-center-insights": "1.21.0", - "packages/google-cloud-container": "2.55.0", - "packages/google-cloud-containeranalysis": "2.16.0", - "packages/google-cloud-contentwarehouse": "0.7.12", - "packages/google-cloud-data-fusion": "1.12.0", - "packages/google-cloud-data-qna": "0.10.14", - "packages/google-cloud-datacatalog": "3.24.0", - "packages/google-cloud-datacatalog-lineage": "0.3.11", - "packages/google-cloud-dataflow-client": "0.8.15", - "packages/google-cloud-dataform": "0.5.14", - "packages/google-cloud-datalabeling": "1.12.0", - "packages/google-cloud-dataplex": "2.5.0", - "packages/google-cloud-dataproc": "5.16.0", - "packages/google-cloud-dataproc-metastore": "1.17.0", - "packages/google-cloud-datastream": "1.11.0", - "packages/google-cloud-deploy": "2.5.0", - "packages/google-cloud-developerconnect": "0.1.6", - "packages/google-cloud-dialogflow": "2.37.0", - "packages/google-cloud-dialogflow-cx": "1.38.0", - "packages/google-cloud-discoveryengine": "0.13.5", - "packages/google-cloud-dlp": "3.26.0", - "packages/google-cloud-dms": "1.11.0", - "packages/google-cloud-documentai": "3.1.0", - "packages/google-cloud-domains": "1.9.0", - "packages/google-cloud-edgecontainer": "0.5.15", - "packages/google-cloud-edgenetwork": "0.1.14", - "packages/google-cloud-enterpriseknowledgegraph": "0.3.14", - "packages/google-cloud-essential-contacts": "1.9.0", - "packages/google-cloud-eventarc": "1.14.0", - "packages/google-cloud-eventarc-publishing": "0.6.15", - "packages/google-cloud-filestore": "1.11.0", - "packages/google-cloud-functions": "1.19.0", - "packages/google-cloud-gdchardwaremanagement": "0.1.9", - "packages/google-cloud-gke-backup": "0.5.14", - "packages/google-cloud-gke-connect-gateway": "0.10.1", - "packages/google-cloud-gke-hub": "1.16.0", - "packages/google-cloud-gke-multicloud": "0.6.16", - "packages/google-cloud-gsuiteaddons": "0.3.13", - "packages/google-cloud-iam": "2.17.0", + "packages/google-ads-admanager": "0.2.4", + "packages/google-ads-marketingplatform-admin": "0.1.4", + "packages/google-ai-generativelanguage": "0.6.16", + "packages/google-analytics-admin": "0.23.4", + "packages/google-analytics-data": "0.18.17", + "packages/google-apps-card": "0.1.6", + "packages/google-apps-chat": "0.2.2", + "packages/google-apps-events-subscriptions": "0.1.6", + "packages/google-apps-meet": "0.1.13", + "packages/google-apps-script-type": "0.3.13", + "packages/google-area120-tables": "0.11.15", + "packages/google-cloud-access-approval": "1.16.0", + "packages/google-cloud-access-context-manager": "0.2.1", + "packages/google-cloud-advisorynotifications": "0.3.14", + "packages/google-cloud-alloydb": "0.4.2", + "packages/google-cloud-alloydb-connectors": "0.1.8", + "packages/google-cloud-api-gateway": "1.12.0", + "packages/google-cloud-api-keys": "0.5.15", + "packages/google-cloud-apigee-connect": "1.12.0", + "packages/google-cloud-apigee-registry": "0.6.15", + "packages/google-cloud-apihub": "0.2.4", + "packages/google-cloud-appengine-admin": "1.14.0", + "packages/google-cloud-appengine-logging": "1.6.0", + "packages/google-cloud-apphub": "0.1.6", + "packages/google-cloud-artifact-registry": "1.15.0", + "packages/google-cloud-asset": "3.29.0", + "packages/google-cloud-assured-workloads": "1.15.0", + "packages/google-cloud-audit-log": "0.3.0", + "packages/google-cloud-automl": "2.16.0", + "packages/google-cloud-backupdr": "0.2.1", + "packages/google-cloud-bare-metal-solution": "1.10.0", + "packages/google-cloud-batch": "0.17.34", + "packages/google-cloud-beyondcorp-appconnections": "0.4.15", + "packages/google-cloud-beyondcorp-appconnectors": "0.4.15", + "packages/google-cloud-beyondcorp-appgateways": "0.4.15", + "packages/google-cloud-beyondcorp-clientconnectorservices": "0.4.15", + "packages/google-cloud-beyondcorp-clientgateways": "0.4.14", + "packages/google-cloud-bigquery-analyticshub": "0.4.15", + "packages/google-cloud-bigquery-biglake": "0.4.13", + "packages/google-cloud-bigquery-connection": "1.18.0", + "packages/google-cloud-bigquery-data-exchange": "0.5.17", + "packages/google-cloud-bigquery-datapolicies": "0.6.12", + "packages/google-cloud-bigquery-datatransfer": "3.19.0", + "packages/google-cloud-bigquery-logging": "1.6.0", + "packages/google-cloud-bigquery-migration": "0.11.13", + "packages/google-cloud-bigquery-reservation": "1.16.0", + "packages/google-cloud-billing": "1.16.0", + "packages/google-cloud-billing-budgets": "1.17.0", + "packages/google-cloud-binary-authorization": "1.13.0", + "packages/google-cloud-build": "3.30.0", + "packages/google-cloud-certificate-manager": "1.10.0", + "packages/google-cloud-channel": "1.22.0", + "packages/google-cloud-cloudcontrolspartner": "0.2.5", + "packages/google-cloud-commerce-consumer-procurement": "0.1.12", + "packages/google-cloud-common": "1.5.0", + "packages/google-cloud-compute": "1.25.0", + "packages/google-cloud-confidentialcomputing": "0.4.15", + "packages/google-cloud-config": "0.1.15", + "packages/google-cloud-contact-center-insights": "1.23.0", + "packages/google-cloud-container": "2.56.0", + "packages/google-cloud-containeranalysis": "2.17.0", + "packages/google-cloud-contentwarehouse": "0.7.13", + "packages/google-cloud-data-fusion": "1.13.0", + "packages/google-cloud-data-qna": "0.10.15", + "packages/google-cloud-datacatalog": "3.25.0", + "packages/google-cloud-datacatalog-lineage": "0.3.12", + "packages/google-cloud-dataflow-client": "0.8.16", + "packages/google-cloud-dataform": "0.5.15", + "packages/google-cloud-datalabeling": "1.13.0", + "packages/google-cloud-dataplex": "2.7.0", + "packages/google-cloud-dataproc": "5.17.0", + "packages/google-cloud-dataproc-metastore": "1.18.0", + "packages/google-cloud-datastream": "1.13.0", + "packages/google-cloud-deploy": "2.6.0", + "packages/google-cloud-developerconnect": "0.1.7", + "packages/google-cloud-dialogflow": "2.39.0", + "packages/google-cloud-dialogflow-cx": "1.39.0", + "packages/google-cloud-discoveryengine": "0.13.6", + "packages/google-cloud-dlp": "3.27.0", + "packages/google-cloud-dms": "1.12.0", + "packages/google-cloud-documentai": "3.2.0", + "packages/google-cloud-domains": "1.10.0", + "packages/google-cloud-edgecontainer": "0.5.16", + "packages/google-cloud-edgenetwork": "0.1.15", + "packages/google-cloud-enterpriseknowledgegraph": "0.3.15", + "packages/google-cloud-essential-contacts": "1.10.0", + "packages/google-cloud-eventarc": "1.15.0", + "packages/google-cloud-eventarc-publishing": "0.6.16", + "packages/google-cloud-filestore": "1.12.0", + "packages/google-cloud-functions": "1.20.0", + "packages/google-cloud-gdchardwaremanagement": "0.1.10", + "packages/google-cloud-gke-backup": "0.5.15", + "packages/google-cloud-gke-connect-gateway": "0.10.2", + "packages/google-cloud-gke-hub": "1.17.0", + "packages/google-cloud-gke-multicloud": "0.6.19", + "packages/google-cloud-gsuiteaddons": "0.3.15", + "packages/google-cloud-iam": "2.18.0", "packages/google-cloud-iam-logging": "1.4.0", - "packages/google-cloud-iap": "1.15.0", - "packages/google-cloud-ids": "1.9.0", - "packages/google-cloud-kms": "3.2.0", - "packages/google-cloud-kms-inventory": "0.2.12", - "packages/google-cloud-language": "2.16.0", - "packages/google-cloud-life-sciences": "0.9.15", - "packages/google-cloud-managed-identities": "1.11.0", - "packages/google-cloud-managedkafka": "0.1.6", - "packages/google-cloud-media-translation": "0.11.14", - "packages/google-cloud-memcache": "1.11.0", - "packages/google-cloud-memorystore": "0.1.0", - "packages/google-cloud-migrationcenter": "0.1.12", - "packages/google-cloud-monitoring": "2.24.0", - "packages/google-cloud-monitoring-dashboards": "2.17.0", - "packages/google-cloud-monitoring-metrics-scopes": "1.8.0", - "packages/google-cloud-netapp": "0.3.17", - "packages/google-cloud-network-connectivity": "2.6.0", - "packages/google-cloud-network-management": "1.23.0", - "packages/google-cloud-network-security": "0.9.14", - "packages/google-cloud-network-services": "0.5.17", - "packages/google-cloud-notebooks": "1.12.0", - "packages/google-cloud-optimization": "1.10.0", - "packages/google-cloud-oracledatabase": "0.1.3", - "packages/google-cloud-orchestration-airflow": "1.16.0", - "packages/google-cloud-os-config": "1.19.0", - "packages/google-cloud-os-login": "2.16.0", - "packages/google-cloud-parallelstore": "0.2.7", + "packages/google-cloud-iap": "1.16.0", + "packages/google-cloud-ids": "1.10.0", + "packages/google-cloud-kms": "3.3.0", + "packages/google-cloud-kms-inventory": "0.2.13", + "packages/google-cloud-language": "2.17.0", + "packages/google-cloud-life-sciences": "0.9.16", + "packages/google-cloud-managed-identities": "1.12.0", + "packages/google-cloud-managedkafka": "0.1.7", + "packages/google-cloud-media-translation": "0.11.15", + "packages/google-cloud-memcache": "1.12.0", + "packages/google-cloud-memorystore": "0.1.1", + "packages/google-cloud-migrationcenter": "0.1.13", + "packages/google-cloud-modelarmor": "0.1.1", + "packages/google-cloud-monitoring": "2.27.0", + "packages/google-cloud-monitoring-dashboards": "2.18.0", + "packages/google-cloud-monitoring-metrics-scopes": "1.9.0", + "packages/google-cloud-netapp": "0.3.19", + "packages/google-cloud-network-connectivity": "2.7.0", + "packages/google-cloud-network-management": "1.25.0", + "packages/google-cloud-network-security": "0.9.15", + "packages/google-cloud-network-services": "0.5.18", + "packages/google-cloud-notebooks": "1.13.0", + "packages/google-cloud-optimization": "1.11.0", + "packages/google-cloud-oracledatabase": "0.1.5", + "packages/google-cloud-orchestration-airflow": "1.17.0", + "packages/google-cloud-org-policy": "1.13.0", + "packages/google-cloud-os-config": "1.20.0", + "packages/google-cloud-os-login": "2.17.0", + "packages/google-cloud-parallelstore": "0.2.10", + "packages/google-cloud-parametermanager": "0.1.0", "packages/google-cloud-phishing-protection": "1.13.0", "packages/google-cloud-policy-troubleshooter": "1.13.0", "packages/google-cloud-policysimulator": "0.1.11", "packages/google-cloud-policytroubleshooter-iam": "0.1.10", "packages/google-cloud-private-ca": "1.14.0", - "packages/google-cloud-private-catalog": "0.9.14", + "packages/google-cloud-private-catalog": "0.9.15", "packages/google-cloud-privilegedaccessmanager": "0.1.5", "packages/google-cloud-public-ca": "0.3.15", - "packages/google-cloud-quotas": "0.1.14", + "packages/google-cloud-quotas": "0.1.15", "packages/google-cloud-rapidmigrationassessment": "0.1.12", - "packages/google-cloud-recaptcha-enterprise": "1.26.0", + "packages/google-cloud-recaptcha-enterprise": "1.26.1", "packages/google-cloud-recommendations-ai": "0.10.15", "packages/google-cloud-recommender": "2.17.0", "packages/google-cloud-redis": "2.17.0", - "packages/google-cloud-redis-cluster": "0.1.11", + "packages/google-cloud-redis-cluster": "0.1.13", "packages/google-cloud-resource-manager": "1.14.0", "packages/google-cloud-resource-settings": "1.11.0", "packages/google-cloud-retail": "1.24.0", - "packages/google-cloud-run": "0.10.14", - "packages/google-cloud-scheduler": "2.15.0", - "packages/google-cloud-secret-manager": "2.22.0", - "packages/google-cloud-securesourcemanager": "0.1.12", - "packages/google-cloud-securitycenter": "1.36.0", - "packages/google-cloud-securitycentermanagement": "0.1.18", - "packages/google-cloud-service-control": "1.14.0", - "packages/google-cloud-service-directory": "1.13.0", - "packages/google-cloud-service-management": "1.12.0", - "packages/google-cloud-service-usage": "1.12.0", - "packages/google-cloud-servicehealth": "0.1.9", - "packages/google-cloud-shell": "1.11.0", - "packages/google-cloud-source-context": "1.6.0", - "packages/google-cloud-speech": "2.29.0", - "packages/google-cloud-storage-control": "1.2.0", - "packages/google-cloud-storage-transfer": "1.14.0", - "packages/google-cloud-storageinsights": "0.1.13", - "packages/google-cloud-support": "0.1.12", - "packages/google-cloud-talent": "2.15.0", - "packages/google-cloud-tasks": "2.18.0", - "packages/google-cloud-telcoautomation": "0.2.8", - "packages/google-cloud-texttospeech": "2.22.0", - "packages/google-cloud-tpu": "1.20.0", - "packages/google-cloud-trace": "1.15.0", - "packages/google-cloud-translate": "3.19.0", - "packages/google-cloud-video-live-stream": "1.10.0", - "packages/google-cloud-video-stitcher": "0.7.15", - "packages/google-cloud-video-transcoder": "1.14.0", - "packages/google-cloud-videointelligence": "2.15.0", - "packages/google-cloud-vision": "3.9.0", - "packages/google-cloud-visionai": "0.1.6", - "packages/google-cloud-vm-migration": "1.10.0", - "packages/google-cloud-vmwareengine": "1.7.0", - "packages/google-cloud-vpc-access": "1.12.0", - "packages/google-cloud-webrisk": "1.16.0", - "packages/google-cloud-websecurityscanner": "1.16.0", - "packages/google-cloud-workflows": "1.16.0", - "packages/google-cloud-workstations": "0.5.11", - "packages/google-geo-type": "0.3.10", - "packages/google-maps-addressvalidation": "0.3.16", - "packages/google-maps-areainsights": "0.1.3", - "packages/google-maps-fleetengine": "0.2.5", - "packages/google-maps-fleetengine-delivery": "0.2.7", + "packages/google-cloud-run": "0.10.15", + "packages/google-cloud-scheduler": "2.15.1", + "packages/google-cloud-secret-manager": "2.23.0", + "packages/google-cloud-securesourcemanager": "0.1.13", + "packages/google-cloud-securitycenter": "1.37.0", + "packages/google-cloud-securitycentermanagement": "0.1.19", + "packages/google-cloud-service-control": "1.15.0", + "packages/google-cloud-service-directory": "1.14.0", + "packages/google-cloud-service-management": "1.13.0", + "packages/google-cloud-service-usage": "1.13.0", + "packages/google-cloud-servicehealth": "0.1.11", + "packages/google-cloud-shell": "1.12.0", + "packages/google-cloud-source-context": "1.7.0", + "packages/google-cloud-speech": "2.31.0", + "packages/google-cloud-storage-control": "1.3.0", + "packages/google-cloud-storage-transfer": "1.16.0", + "packages/google-cloud-storageinsights": "0.1.14", + "packages/google-cloud-support": "0.1.13", + "packages/google-cloud-talent": "2.17.0", + "packages/google-cloud-tasks": "2.19.0", + "packages/google-cloud-telcoautomation": "0.2.9", + "packages/google-cloud-texttospeech": "2.25.0", + "packages/google-cloud-tpu": "1.23.0", + "packages/google-cloud-trace": "1.16.0", + "packages/google-cloud-translate": "3.20.0", + "packages/google-cloud-video-live-stream": "1.11.0", + "packages/google-cloud-video-stitcher": "0.7.16", + "packages/google-cloud-video-transcoder": "1.15.0", + "packages/google-cloud-videointelligence": "2.16.0", + "packages/google-cloud-vision": "3.10.0", + "packages/google-cloud-visionai": "0.1.7", + "packages/google-cloud-vm-migration": "1.11.0", + "packages/google-cloud-vmwareengine": "1.8.0", + "packages/google-cloud-vpc-access": "1.13.0", + "packages/google-cloud-webrisk": "1.17.0", + "packages/google-cloud-websecurityscanner": "1.17.0", + "packages/google-cloud-workflows": "1.17.0", + "packages/google-cloud-workstations": "0.5.12", + "packages/google-geo-type": "0.3.11", + "packages/google-maps-addressvalidation": "0.3.17", + "packages/google-maps-areainsights": "0.1.4", + "packages/google-maps-fleetengine": "0.2.6", + "packages/google-maps-fleetengine-delivery": "0.2.8", "packages/google-maps-mapsplatformdatasets": "0.4.5", - "packages/google-maps-places": "0.1.21", + "packages/google-maps-places": "0.1.23", "packages/google-maps-routeoptimization": "0.1.7", "packages/google-maps-routing": "0.6.13", "packages/google-maps-solar": "0.1.5", - "packages/google-shopping-css": "0.1.11", + "packages/google-shopping-css": "0.1.12", "packages/google-shopping-merchant-accounts": "0.2.3", "packages/google-shopping-merchant-conversions": "0.1.6", - "packages/google-shopping-merchant-datasources": "0.1.6", + "packages/google-shopping-merchant-datasources": "0.1.7", "packages/google-shopping-merchant-inventories": "0.1.12", "packages/google-shopping-merchant-lfp": "0.1.6", "packages/google-shopping-merchant-notifications": "0.1.5", @@ -201,5 +206,7 @@ "packages/google-shopping-merchant-reports": "0.1.12", "packages/google-shopping-merchant-reviews": "0.1.0", "packages/google-shopping-type": "0.1.9", - "packages/grafeas": "1.13.0" + "packages/googleapis-common-protos": "1.67.0", + "packages/grafeas": "1.13.0", + "packages/grpc-google-iam-v1": "0.14.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index af7095f03300..8f82b7120196 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,205 +2,214 @@ Please refer to each API's `CHANGELOG.md` file under the `packages/` directory Changelogs ----- -- [google-ads-admanager==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) -- [google-ads-marketingplatform-admin==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) -- [google-ai-generativelanguage==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) -- [google-analytics-admin==0.23.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) -- [google-analytics-data==0.18.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) +- [google-ads-admanager==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-admanager/CHANGELOG.md) +- [google-ads-marketingplatform-admin==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ads-marketingplatform-admin/CHANGELOG.md) +- [google-ai-generativelanguage==0.6.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-ai-generativelanguage/CHANGELOG.md) +- [google-analytics-admin==0.23.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-admin/CHANGELOG.md) +- [google-analytics-data==0.18.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-analytics-data/CHANGELOG.md) - [google-apps-card==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-card/CHANGELOG.md) -- [google-apps-chat==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) -- [google-apps-events-subscriptions==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) -- [google-apps-meet==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) -- [google-apps-script-type==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) -- [google-area120-tables==0.11.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) -- [google-cloud-access-approval==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) -- [google-cloud-advisorynotifications==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) +- [google-apps-chat==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-chat/CHANGELOG.md) +- [google-apps-events-subscriptions==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-events-subscriptions/CHANGELOG.md) +- [google-apps-meet==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-meet/CHANGELOG.md) +- [google-apps-script-type==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-apps-script-type/CHANGELOG.md) +- [google-area120-tables==0.11.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-area120-tables/CHANGELOG.md) +- [google-cloud-access-approval==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-approval/CHANGELOG.md) +- [google-cloud-access-context-manager==0.2.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager/CHANGELOG.md) +- [google-cloud-advisorynotifications==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-advisorynotifications/CHANGELOG.md) - [google-cloud-alloydb-connectors==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb-connectors/CHANGELOG.md) -- [google-cloud-alloydb==0.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) -- [google-cloud-api-gateway==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) -- [google-cloud-api-keys==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) -- [google-cloud-apigee-connect==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) -- [google-cloud-apigee-registry==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) -- [google-cloud-apihub==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) -- [google-cloud-appengine-admin==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) +- [google-cloud-alloydb==0.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-alloydb/CHANGELOG.md) +- [google-cloud-api-gateway==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-gateway/CHANGELOG.md) +- [google-cloud-api-keys==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-api-keys/CHANGELOG.md) +- [google-cloud-apigee-connect==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-connect/CHANGELOG.md) +- [google-cloud-apigee-registry==0.6.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apigee-registry/CHANGELOG.md) +- [google-cloud-apihub==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apihub/CHANGELOG.md) +- [google-cloud-appengine-admin==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-admin/CHANGELOG.md) - [google-cloud-appengine-logging==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-appengine-logging/CHANGELOG.md) -- [google-cloud-apphub==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) -- [google-cloud-artifact-registry==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) -- [google-cloud-asset==3.27.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) -- [google-cloud-assured-workloads==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) -- [google-cloud-automl==2.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) -- [google-cloud-backupdr==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) -- [google-cloud-bare-metal-solution==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) -- [google-cloud-batch==0.17.31](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnections==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) -- [google-cloud-beyondcorp-appconnectors==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) -- [google-cloud-beyondcorp-appgateways==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) -- [google-cloud-beyondcorp-clientconnectorservices==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) -- [google-cloud-beyondcorp-clientgateways==0.4.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) -- [google-cloud-bigquery-analyticshub==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) -- [google-cloud-bigquery-biglake==0.4.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) -- [google-cloud-bigquery-connection==1.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) -- [google-cloud-bigquery-data-exchange==0.5.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) -- [google-cloud-bigquery-datapolicies==0.6.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) -- [google-cloud-bigquery-datatransfer==3.17.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) +- [google-cloud-apphub==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-apphub/CHANGELOG.md) +- [google-cloud-artifact-registry==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-artifact-registry/CHANGELOG.md) +- [google-cloud-asset==3.28.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset/CHANGELOG.md) +- [google-cloud-assured-workloads==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads/CHANGELOG.md) +- [google-cloud-audit-log==0.3.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log/CHANGELOG.md) +- [google-cloud-automl==2.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl/CHANGELOG.md) +- [google-cloud-backupdr==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr/CHANGELOG.md) +- [google-cloud-bare-metal-solution==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution/CHANGELOG.md) +- [google-cloud-batch==0.17.33](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnections==0.4.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md) +- [google-cloud-beyondcorp-appconnectors==0.4.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md) +- [google-cloud-beyondcorp-appgateways==0.4.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md) +- [google-cloud-beyondcorp-clientconnectorservices==0.4.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md) +- [google-cloud-beyondcorp-clientgateways==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md) +- [google-cloud-bigquery-analyticshub==0.4.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md) +- [google-cloud-bigquery-biglake==0.4.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake/CHANGELOG.md) +- [google-cloud-bigquery-connection==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection/CHANGELOG.md) +- [google-cloud-bigquery-data-exchange==0.5.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md) +- [google-cloud-bigquery-datapolicies==0.6.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md) +- [google-cloud-bigquery-datatransfer==3.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md) - [google-cloud-bigquery-logging==1.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-logging/CHANGELOG.md) -- [google-cloud-bigquery-migration==0.11.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) -- [google-cloud-bigquery-reservation==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) -- [google-cloud-billing-budgets==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) -- [google-cloud-billing==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) -- [google-cloud-binary-authorization==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) -- [google-cloud-build==3.27.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) -- [google-cloud-certificate-manager==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) -- [google-cloud-channel==1.20.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) -- [google-cloud-cloudcontrolspartner==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) -- [google-cloud-commerce-consumer-procurement==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) +- [google-cloud-bigquery-migration==0.11.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration/CHANGELOG.md) +- [google-cloud-bigquery-reservation==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation/CHANGELOG.md) +- [google-cloud-billing-budgets==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets/CHANGELOG.md) +- [google-cloud-billing==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing/CHANGELOG.md) +- [google-cloud-binary-authorization==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-binary-authorization/CHANGELOG.md) +- [google-cloud-build==3.29.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-build/CHANGELOG.md) +- [google-cloud-certificate-manager==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-certificate-manager/CHANGELOG.md) +- [google-cloud-channel==1.21.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-channel/CHANGELOG.md) +- [google-cloud-cloudcontrolspartner==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md) +- [google-cloud-commerce-consumer-procurement==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md) - [google-cloud-common==1.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-common/CHANGELOG.md) -- [google-cloud-compute==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) -- [google-cloud-confidentialcomputing==0.4.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) -- [google-cloud-config==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) -- [google-cloud-contact-center-insights==1.20.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) -- [google-cloud-container==2.54.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) -- [google-cloud-containeranalysis==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) -- [google-cloud-contentwarehouse==0.7.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) -- [google-cloud-data-fusion==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) -- [google-cloud-data-qna==0.10.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) -- [google-cloud-datacatalog-lineage==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) -- [google-cloud-datacatalog==3.23.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) -- [google-cloud-dataflow-client==0.8.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) -- [google-cloud-dataform==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) -- [google-cloud-datalabeling==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) -- [google-cloud-dataplex==2.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) -- [google-cloud-dataproc-metastore==1.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) -- [google-cloud-dataproc==5.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) -- [google-cloud-datastream==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) -- [google-cloud-deploy==2.4.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) -- [google-cloud-developerconnect==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) -- [google-cloud-dialogflow-cx==1.37.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) -- [google-cloud-dialogflow==2.36.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) -- [google-cloud-discoveryengine==0.13.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) -- [google-cloud-dlp==3.25.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) -- [google-cloud-dms==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) -- [google-cloud-documentai==3.0.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) -- [google-cloud-domains==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) -- [google-cloud-edgecontainer==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) -- [google-cloud-edgenetwork==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) -- [google-cloud-enterpriseknowledgegraph==0.3.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) -- [google-cloud-essential-contacts==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) -- [google-cloud-eventarc-publishing==0.6.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) -- [google-cloud-eventarc==1.13.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) -- [google-cloud-filestore==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) -- [google-cloud-functions==1.18.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) -- [google-cloud-gdchardwaremanagement==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) -- [google-cloud-gke-backup==0.5.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) -- [google-cloud-gke-connect-gateway==0.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) -- [google-cloud-gke-hub==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) -- [google-cloud-gke-multicloud==0.6.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) -- [google-cloud-gsuiteaddons==0.3.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) +- [google-cloud-compute==1.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-compute/CHANGELOG.md) +- [google-cloud-confidentialcomputing==0.4.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-confidentialcomputing/CHANGELOG.md) +- [google-cloud-config==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-config/CHANGELOG.md) +- [google-cloud-contact-center-insights==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contact-center-insights/CHANGELOG.md) +- [google-cloud-container==2.55.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-container/CHANGELOG.md) +- [google-cloud-containeranalysis==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-containeranalysis/CHANGELOG.md) +- [google-cloud-contentwarehouse==0.7.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-contentwarehouse/CHANGELOG.md) +- [google-cloud-data-fusion==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-fusion/CHANGELOG.md) +- [google-cloud-data-qna==0.10.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-data-qna/CHANGELOG.md) +- [google-cloud-datacatalog-lineage==0.3.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog-lineage/CHANGELOG.md) +- [google-cloud-datacatalog==3.24.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datacatalog/CHANGELOG.md) +- [google-cloud-dataflow-client==0.8.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataflow-client/CHANGELOG.md) +- [google-cloud-dataform==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataform/CHANGELOG.md) +- [google-cloud-datalabeling==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datalabeling/CHANGELOG.md) +- [google-cloud-dataplex==2.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataplex/CHANGELOG.md) +- [google-cloud-dataproc-metastore==1.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc-metastore/CHANGELOG.md) +- [google-cloud-dataproc==5.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dataproc/CHANGELOG.md) +- [google-cloud-datastream==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-datastream/CHANGELOG.md) +- [google-cloud-deploy==2.5.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-deploy/CHANGELOG.md) +- [google-cloud-developerconnect==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-developerconnect/CHANGELOG.md) +- [google-cloud-dialogflow-cx==1.38.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow-cx/CHANGELOG.md) +- [google-cloud-dialogflow==2.38.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dialogflow/CHANGELOG.md) +- [google-cloud-discoveryengine==0.13.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-discoveryengine/CHANGELOG.md) +- [google-cloud-dlp==3.26.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dlp/CHANGELOG.md) +- [google-cloud-dms==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-dms/CHANGELOG.md) +- [google-cloud-documentai==3.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-documentai/CHANGELOG.md) +- [google-cloud-domains==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-domains/CHANGELOG.md) +- [google-cloud-edgecontainer==0.5.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgecontainer/CHANGELOG.md) +- [google-cloud-edgenetwork==0.1.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-edgenetwork/CHANGELOG.md) +- [google-cloud-enterpriseknowledgegraph==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md) +- [google-cloud-essential-contacts==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-essential-contacts/CHANGELOG.md) +- [google-cloud-eventarc-publishing==0.6.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc-publishing/CHANGELOG.md) +- [google-cloud-eventarc==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc/CHANGELOG.md) +- [google-cloud-filestore==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-filestore/CHANGELOG.md) +- [google-cloud-functions==1.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-functions/CHANGELOG.md) +- [google-cloud-gdchardwaremanagement==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md) +- [google-cloud-gke-backup==0.5.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-backup/CHANGELOG.md) +- [google-cloud-gke-connect-gateway==0.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-connect-gateway/CHANGELOG.md) +- [google-cloud-gke-hub==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-hub/CHANGELOG.md) +- [google-cloud-gke-multicloud==0.6.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gke-multicloud/CHANGELOG.md) +- [google-cloud-gsuiteaddons==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-gsuiteaddons/CHANGELOG.md) - [google-cloud-iam-logging==1.4.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam-logging/CHANGELOG.md) -- [google-cloud-iam==2.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) -- [google-cloud-iap==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) -- [google-cloud-ids==1.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) -- [google-cloud-kms-inventory==0.2.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) -- [google-cloud-kms==3.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) -- [google-cloud-language==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) -- [google-cloud-life-sciences==0.9.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) -- [google-cloud-managed-identities==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) -- [google-cloud-managedkafka==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) -- [google-cloud-media-translation==0.11.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) -- [google-cloud-memcache==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) -- [google-cloud-migrationcenter==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) -- [google-cloud-monitoring-dashboards==2.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) -- [google-cloud-monitoring-metrics-scopes==1.7.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) -- [google-cloud-monitoring==2.23.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) -- [google-cloud-netapp==0.3.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) -- [google-cloud-network-connectivity==2.5.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) -- [google-cloud-network-management==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) -- [google-cloud-network-security==0.9.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) -- [google-cloud-network-services==0.5.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) -- [google-cloud-notebooks==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) -- [google-cloud-optimization==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) -- [google-cloud-oracledatabase==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) -- [google-cloud-orchestration-airflow==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) -- [google-cloud-os-config==1.18.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) -- [google-cloud-os-login==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) -- [google-cloud-parallelstore==0.2.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) -- [google-cloud-phishing-protection==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) -- [google-cloud-policy-troubleshooter==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) -- [google-cloud-policysimulator==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) -- [google-cloud-policytroubleshooter-iam==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) -- [google-cloud-private-ca==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) -- [google-cloud-private-catalog==0.9.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) -- [google-cloud-privilegedaccessmanager==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) -- [google-cloud-public-ca==0.3.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) -- [google-cloud-quotas==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-quotas/CHANGELOG.md) -- [google-cloud-rapidmigrationassessment==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) -- [google-cloud-recaptcha-enterprise==1.25.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) -- [google-cloud-recommendations-ai==0.10.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) -- [google-cloud-recommender==2.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) -- [google-cloud-redis-cluster==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) -- [google-cloud-redis==2.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) -- [google-cloud-resource-manager==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) -- [google-cloud-resource-settings==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) -- [google-cloud-retail==1.23.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) -- [google-cloud-run==0.10.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) -- [google-cloud-scheduler==2.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) -- [google-cloud-secret-manager==2.21.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) -- [google-cloud-securesourcemanager==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) -- [google-cloud-securitycenter==1.35.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) -- [google-cloud-securitycentermanagement==0.1.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) -- [google-cloud-service-control==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) -- [google-cloud-service-directory==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) -- [google-cloud-service-management==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) -- [google-cloud-service-usage==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) -- [google-cloud-servicehealth==0.1.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) -- [google-cloud-shell==1.10.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) +- [google-cloud-iam==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iam/CHANGELOG.md) +- [google-cloud-iap==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-iap/CHANGELOG.md) +- [google-cloud-ids==1.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-ids/CHANGELOG.md) +- [google-cloud-kms-inventory==0.2.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms-inventory/CHANGELOG.md) +- [google-cloud-kms==3.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-kms/CHANGELOG.md) +- [google-cloud-language==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-language/CHANGELOG.md) +- [google-cloud-life-sciences==0.9.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-life-sciences/CHANGELOG.md) +- [google-cloud-managed-identities==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managed-identities/CHANGELOG.md) +- [google-cloud-managedkafka==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-managedkafka/CHANGELOG.md) +- [google-cloud-media-translation==0.11.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-media-translation/CHANGELOG.md) +- [google-cloud-memcache==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memcache/CHANGELOG.md) +- [google-cloud-memorystore==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-memorystore/CHANGELOG.md) +- [google-cloud-migrationcenter==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-migrationcenter/CHANGELOG.md) +- [google-cloud-modelarmor==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-modelarmor/CHANGELOG.md) +- [google-cloud-monitoring-dashboards==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-dashboards/CHANGELOG.md) +- [google-cloud-monitoring-metrics-scopes==1.8.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring-metrics-scopes/CHANGELOG.md) +- [google-cloud-monitoring==2.26.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-monitoring/CHANGELOG.md) +- [google-cloud-netapp==0.3.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-netapp/CHANGELOG.md) +- [google-cloud-network-connectivity==2.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-connectivity/CHANGELOG.md) +- [google-cloud-network-management==1.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-management/CHANGELOG.md) +- [google-cloud-network-security==0.9.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-security/CHANGELOG.md) +- [google-cloud-network-services==0.5.17](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-network-services/CHANGELOG.md) +- [google-cloud-notebooks==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-notebooks/CHANGELOG.md) +- [google-cloud-optimization==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-optimization/CHANGELOG.md) +- [google-cloud-oracledatabase==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-oracledatabase/CHANGELOG.md) +- [google-cloud-orchestration-airflow==1.16.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-orchestration-airflow/CHANGELOG.md) +- [google-cloud-org-policy==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-org-policy/CHANGELOG.md) +- [google-cloud-os-config==1.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-config/CHANGELOG.md) +- [google-cloud-os-login==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-os-login/CHANGELOG.md) +- [google-cloud-parallelstore==0.2.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parallelstore/CHANGELOG.md) +- [google-cloud-parametermanager==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-parametermanager/CHANGELOG.md) +- [google-cloud-phishing-protection==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-phishing-protection/CHANGELOG.md) +- [google-cloud-policy-troubleshooter==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policy-troubleshooter/CHANGELOG.md) +- [google-cloud-policysimulator==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policysimulator/CHANGELOG.md) +- [google-cloud-policytroubleshooter-iam==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-policytroubleshooter-iam/CHANGELOG.md) +- [google-cloud-private-ca==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-ca/CHANGELOG.md) +- [google-cloud-private-catalog==0.9.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-private-catalog/CHANGELOG.md) +- [google-cloud-privilegedaccessmanager==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-privilegedaccessmanager/CHANGELOG.md) +- [google-cloud-public-ca==0.3.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-public-ca/CHANGELOG.md) +- [google-cloud-quotas==0.1.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-quotas/CHANGELOG.md) +- [google-cloud-rapidmigrationassessment==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-rapidmigrationassessment/CHANGELOG.md) +- [google-cloud-recaptcha-enterprise==1.26.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recaptcha-enterprise/CHANGELOG.md) +- [google-cloud-recommendations-ai==0.10.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommendations-ai/CHANGELOG.md) +- [google-cloud-recommender==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-recommender/CHANGELOG.md) +- [google-cloud-redis-cluster==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis-cluster/CHANGELOG.md) +- [google-cloud-redis==2.17.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis/CHANGELOG.md) +- [google-cloud-resource-manager==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-manager/CHANGELOG.md) +- [google-cloud-resource-settings==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-resource-settings/CHANGELOG.md) +- [google-cloud-retail==1.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-retail/CHANGELOG.md) +- [google-cloud-run==0.10.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-run/CHANGELOG.md) +- [google-cloud-scheduler==2.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-scheduler/CHANGELOG.md) +- [google-cloud-secret-manager==2.22.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-secret-manager/CHANGELOG.md) +- [google-cloud-securesourcemanager==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securesourcemanager/CHANGELOG.md) +- [google-cloud-securitycenter==1.36.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycenter/CHANGELOG.md) +- [google-cloud-securitycentermanagement==0.1.18](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-securitycentermanagement/CHANGELOG.md) +- [google-cloud-service-control==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-control/CHANGELOG.md) +- [google-cloud-service-directory==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-directory/CHANGELOG.md) +- [google-cloud-service-management==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-management/CHANGELOG.md) +- [google-cloud-service-usage==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-service-usage/CHANGELOG.md) +- [google-cloud-servicehealth==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-servicehealth/CHANGELOG.md) +- [google-cloud-shell==1.11.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-shell/CHANGELOG.md) - [google-cloud-source-context==1.6.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-source-context/CHANGELOG.md) -- [google-cloud-speech==2.28.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) -- [google-cloud-storage-control==1.1.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) -- [google-cloud-storage-transfer==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) -- [google-cloud-storageinsights==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) -- [google-cloud-support==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) -- [google-cloud-talent==2.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) -- [google-cloud-tasks==2.17.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) -- [google-cloud-telcoautomation==0.2.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) -- [google-cloud-texttospeech==2.21.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) -- [google-cloud-tpu==1.19.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) -- [google-cloud-trace==1.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) -- [google-cloud-translate==3.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) -- [google-cloud-video-live-stream==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) -- [google-cloud-video-stitcher==0.7.14](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) -- [google-cloud-video-transcoder==1.13.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) -- [google-cloud-videointelligence==2.14.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) -- [google-cloud-vision==3.8.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) -- [google-cloud-visionai==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) -- [google-cloud-vm-migration==1.9.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) -- [google-cloud-vmwareengine==1.6.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) -- [google-cloud-vpc-access==1.11.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) -- [google-cloud-webrisk==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) -- [google-cloud-websecurityscanner==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) -- [google-cloud-workflows==1.15.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) -- [google-cloud-workstations==0.5.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) +- [google-cloud-speech==2.30.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-speech/CHANGELOG.md) +- [google-cloud-storage-control==1.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-control/CHANGELOG.md) +- [google-cloud-storage-transfer==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storage-transfer/CHANGELOG.md) +- [google-cloud-storageinsights==0.1.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-storageinsights/CHANGELOG.md) +- [google-cloud-support==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-support/CHANGELOG.md) +- [google-cloud-talent==2.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-talent/CHANGELOG.md) +- [google-cloud-tasks==2.18.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tasks/CHANGELOG.md) +- [google-cloud-telcoautomation==0.2.8](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-telcoautomation/CHANGELOG.md) +- [google-cloud-texttospeech==2.24.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-texttospeech/CHANGELOG.md) +- [google-cloud-tpu==1.22.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-tpu/CHANGELOG.md) +- [google-cloud-trace==1.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-trace/CHANGELOG.md) +- [google-cloud-translate==3.19.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-translate/CHANGELOG.md) +- [google-cloud-video-live-stream==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-live-stream/CHANGELOG.md) +- [google-cloud-video-stitcher==0.7.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-stitcher/CHANGELOG.md) +- [google-cloud-video-transcoder==1.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-video-transcoder/CHANGELOG.md) +- [google-cloud-videointelligence==2.15.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-videointelligence/CHANGELOG.md) +- [google-cloud-vision==3.9.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vision/CHANGELOG.md) +- [google-cloud-visionai==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-visionai/CHANGELOG.md) +- [google-cloud-vm-migration==1.10.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vm-migration/CHANGELOG.md) +- [google-cloud-vmwareengine==1.7.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vmwareengine/CHANGELOG.md) +- [google-cloud-vpc-access==1.12.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-vpc-access/CHANGELOG.md) +- [google-cloud-webrisk==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-webrisk/CHANGELOG.md) +- [google-cloud-websecurityscanner==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-websecurityscanner/CHANGELOG.md) +- [google-cloud-workflows==1.16.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workflows/CHANGELOG.md) +- [google-cloud-workstations==0.5.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-workstations/CHANGELOG.md) - [google-geo-type==0.3.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-geo-type/CHANGELOG.md) -- [google-maps-addressvalidation==0.3.15](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) -- [google-maps-areainsights==0.1.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) -- [google-maps-fleetengine-delivery==0.2.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) -- [google-maps-fleetengine==0.2.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) -- [google-maps-mapsplatformdatasets==0.4.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) -- [google-maps-places==0.1.20](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) -- [google-maps-routeoptimization==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) -- [google-maps-routing==0.6.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) -- [google-maps-solar==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) -- [google-shopping-css==0.1.10](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) -- [google-shopping-merchant-accounts==0.2.2](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) -- [google-shopping-merchant-conversions==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) -- [google-shopping-merchant-datasources==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) -- [google-shopping-merchant-inventories==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) -- [google-shopping-merchant-lfp==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) -- [google-shopping-merchant-notifications==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) -- [google-shopping-merchant-products==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) -- [google-shopping-merchant-promotions==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) -- [google-shopping-merchant-quota==0.1.4](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) -- [google-shopping-merchant-reports==0.1.11](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) +- [google-maps-addressvalidation==0.3.16](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-addressvalidation/CHANGELOG.md) +- [google-maps-areainsights==0.1.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-areainsights/CHANGELOG.md) +- [google-maps-fleetengine-delivery==0.2.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine-delivery/CHANGELOG.md) +- [google-maps-fleetengine==0.2.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-fleetengine/CHANGELOG.md) +- [google-maps-mapsplatformdatasets==0.4.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-mapsplatformdatasets/CHANGELOG.md) +- [google-maps-places==0.1.22](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-places/CHANGELOG.md) +- [google-maps-routeoptimization==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routeoptimization/CHANGELOG.md) +- [google-maps-routing==0.6.13](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-routing/CHANGELOG.md) +- [google-maps-solar==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-maps-solar/CHANGELOG.md) +- [google-shopping-css==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-css/CHANGELOG.md) +- [google-shopping-merchant-accounts==0.2.3](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-accounts/CHANGELOG.md) +- [google-shopping-merchant-conversions==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-conversions/CHANGELOG.md) +- [google-shopping-merchant-datasources==0.1.7](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-datasources/CHANGELOG.md) +- [google-shopping-merchant-inventories==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-inventories/CHANGELOG.md) +- [google-shopping-merchant-lfp==0.1.6](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-lfp/CHANGELOG.md) +- [google-shopping-merchant-notifications==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-notifications/CHANGELOG.md) +- [google-shopping-merchant-products==0.2.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-products/CHANGELOG.md) +- [google-shopping-merchant-promotions==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-promotions/CHANGELOG.md) +- [google-shopping-merchant-quota==0.1.5](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-quota/CHANGELOG.md) +- [google-shopping-merchant-reports==0.1.12](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reports/CHANGELOG.md) +- [google-shopping-merchant-reviews==0.1.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-merchant-reviews/CHANGELOG.md) - [google-shopping-type==0.1.9](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-shopping-type/CHANGELOG.md) -- [grafeas==1.12.1](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) +- [googleapis-common-protos==1.66.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/googleapis-common-protos/CHANGELOG.md) +- [grafeas==1.13.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grafeas/CHANGELOG.md) +- [grpc-google-iam-v1==0.14.0](https://github.com/googleapis/google-cloud-python/tree/main/packages/grpc-google-iam-v1/CHANGELOG.md) diff --git a/README.rst b/README.rst index afed928d5568..c8bf0e50d208 100644 --- a/README.rst +++ b/README.rst @@ -97,6 +97,12 @@ Libraries - - - `Client Library Issues `_ + * - `APIs Common Protos `_ + - stable + - |PyPI-googleapis-common-protos| + - + - + - `Client Library Issues `_ * - `Access Approval `_ - stable - |PyPI-google-cloud-access-approval| @@ -409,15 +415,15 @@ Libraries - - - `Client Library Issues `_ - * - `Identity and Access Management `_ + * - `Identity and Access Management `_ - stable - - |PyPI-grpc-google-iam-v1| + - |PyPI-google-cloud-iam| - `API Issues `_ - `File an API Issue `_ - - `Client Library Issues `_ - * - `Identity and Access Management `_ + - `Client Library Issues `_ + * - `Identity and Access Management `_ - stable - - |PyPI-google-cloud-iam| + - |PyPI-grpc-google-iam-v1| - `API Issues `_ - `File an API Issue `_ - `Client Library Issues `_ @@ -517,12 +523,12 @@ Libraries - - - `Client Library Issues `_ - * - `Organization Policy `_ + * - `Organization Policy `_ - stable - |PyPI-google-cloud-org-policy| - - - - `Client Library Issues `_ + - `Client Library Issues `_ * - `Pandas Data Types for SQL systems (BigQuery, Spanner) `_ - stable - |PyPI-db-dtypes| @@ -769,18 +775,24 @@ Libraries - - - `Client Library Issues `_ + * - `API Hub API `_ + - preview + - |PyPI-google-cloud-apihub| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `API Keys `_ - preview - |PyPI-google-cloud-api-keys| - - - `Client Library Issues `_ - * - `Access Context Manager `_ + * - `Access Context Manager `_ - preview - |PyPI-google-cloud-access-context-manager| - - - - `Client Library Issues `_ + - `Client Library Issues `_ * - `Ad Manager `_ - preview - |PyPI-google-ads-admanager| @@ -829,12 +841,6 @@ Libraries - - - `Client Library Issues `_ - * - `Apache Kafka for BigQuery API `_ - - preview - - |PyPI-google-cloud-managedkafka| - - `API Issues `_ - - `File an API Issue `_ - - `Client Library Issues `_ * - `Apigee Registry API `_ - preview - |PyPI-google-cloud-apigee-registry| @@ -865,12 +871,12 @@ Libraries - - - `Client Library Issues `_ - * - `Audit Log `_ + * - `Audit Log API `_ - preview - |PyPI-google-cloud-audit-log| - - - - `Client Library Issues `_ + - `Client Library Issues `_ * - `Backup and DR Service API `_ - preview - |PyPI-google-cloud-backupdr| @@ -1129,6 +1135,12 @@ Libraries - - - `Client Library Issues `_ + * - `Managed Service for Apache Kafka `_ + - preview + - |PyPI-google-cloud-managedkafka| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Maps Platform Datasets API `_ - preview - |PyPI-google-maps-mapsplatformdatasets| @@ -1141,6 +1153,12 @@ Libraries - - - `Client Library Issues `_ + * - `Marketing Platform Admin API `_ + - preview + - |PyPI-google-ads-marketingplatform-admin| + - + - + - `Client Library Issues `_ * - `Media Translation `_ - preview - |PyPI-google-cloud-media-translation| @@ -1153,6 +1171,12 @@ Libraries - `API Issues `_ - `File an API Issue `_ - `Client Library Issues `_ + * - `Memorystore `_ + - preview + - |PyPI-google-cloud-memorystore| + - + - + - `Client Library Issues `_ * - `Memorystore for Redis API `_ - preview - |PyPI-google-cloud-redis-cluster| @@ -1213,12 +1237,24 @@ Libraries - `API Issues `_ - `File an API Issue `_ - `Client Library Issues `_ + * - `Merchant Reviews API `_ + - preview + - |PyPI-google-shopping-merchant-reviews| + - + - + - `Client Library Issues `_ * - `Migration Center API `_ - preview - |PyPI-google-cloud-migrationcenter| - - - `Client Library Issues `_ + * - `Model Armor API `_ + - preview + - |PyPI-google-cloud-modelarmor| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `NetApp API `_ - preview - |PyPI-google-cloud-netapp| @@ -1237,12 +1273,24 @@ Libraries - - - `Client Library Issues `_ + * - `Oracle Database@API `_ + - preview + - |PyPI-google-cloud-oracledatabase| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Parallelstore API `_ - preview - |PyPI-google-cloud-parallelstore| - - - `Client Library Issues `_ + * - `Parameter Manager API `_ + - preview + - |PyPI-google-cloud-parametermanager| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Phishing Protection `_ - preview - |PyPI-google-cloud-phishing-protection| @@ -1255,6 +1303,12 @@ Libraries - - - `Client Library Issues `_ + * - `Places Insights API `_ + - preview + - |PyPI-google-maps-areainsights| + - `API Issues `_ + - `File an API Issue `_ + - `Client Library Issues `_ * - `Policy Simulator API `_ - preview - |PyPI-google-cloud-policysimulator| @@ -1273,15 +1327,21 @@ Libraries - - - `Client Library Issues `_ + * - `Privileged Access Manager API `_ + - preview + - |PyPI-google-cloud-privilegedaccessmanager| + - + - + - `Client Library Issues `_ * - `Public Certificate Authority `_ - preview - |PyPI-google-cloud-public-ca| - - - `Client Library Issues `_ - * - `Quotas API `_ + * - `Quotas API `_ - preview - - |PyPI-google-cloud-cloudquotas| + - |PyPI-google-cloud-quotas| - `API Issues `_ - `File an API Issue `_ - `Client Library Issues `_ @@ -1420,6 +1480,8 @@ Libraries :target: https://pypi.org/project/google-cloud-notebooks .. |PyPI-google-cloud-api-gateway| image:: https://img.shields.io/pypi/v/google-cloud-api-gateway.svg :target: https://pypi.org/project/google-cloud-api-gateway +.. |PyPI-googleapis-common-protos| image:: https://img.shields.io/pypi/v/googleapis-common-protos.svg + :target: https://pypi.org/project/googleapis-common-protos .. |PyPI-google-cloud-access-approval| image:: https://img.shields.io/pypi/v/google-cloud-access-approval.svg :target: https://pypi.org/project/google-cloud-access-approval .. |PyPI-google-cloud-apigee-connect| image:: https://img.shields.io/pypi/v/google-cloud-apigee-connect.svg @@ -1524,10 +1586,10 @@ Libraries :target: https://pypi.org/project/google-cloud-policy-troubleshooter .. |PyPI-google-cloud-ids| image:: https://img.shields.io/pypi/v/google-cloud-ids.svg :target: https://pypi.org/project/google-cloud-ids -.. |PyPI-grpc-google-iam-v1| image:: https://img.shields.io/pypi/v/grpc-google-iam-v1.svg - :target: https://pypi.org/project/grpc-google-iam-v1 .. |PyPI-google-cloud-iam| image:: https://img.shields.io/pypi/v/google-cloud-iam.svg :target: https://pypi.org/project/google-cloud-iam +.. |PyPI-grpc-google-iam-v1| image:: https://img.shields.io/pypi/v/grpc-google-iam-v1.svg + :target: https://pypi.org/project/grpc-google-iam-v1 .. |PyPI-google-cloud-iap| image:: https://img.shields.io/pypi/v/google-cloud-iap.svg :target: https://pypi.org/project/google-cloud-iap .. |PyPI-google-cloud-kms| image:: https://img.shields.io/pypi/v/google-cloud-kms.svg @@ -1644,6 +1706,8 @@ Libraries :target: https://pypi.org/project/google-cloud-recaptcha-enterprise .. |PyPI-bigframes| image:: https://img.shields.io/pypi/v/bigframes.svg :target: https://pypi.org/project/bigframes +.. |PyPI-google-cloud-apihub| image:: https://img.shields.io/pypi/v/google-cloud-apihub.svg + :target: https://pypi.org/project/google-cloud-apihub .. |PyPI-google-cloud-api-keys| image:: https://img.shields.io/pypi/v/google-cloud-api-keys.svg :target: https://pypi.org/project/google-cloud-api-keys .. |PyPI-google-cloud-access-context-manager| image:: https://img.shields.io/pypi/v/google-cloud-access-context-manager.svg @@ -1664,8 +1728,6 @@ Libraries :target: https://pypi.org/project/google-analytics-data .. |PyPI-google-cloud-gke-multicloud| image:: https://img.shields.io/pypi/v/google-cloud-gke-multicloud.svg :target: https://pypi.org/project/google-cloud-gke-multicloud -.. |PyPI-google-cloud-managedkafka| image:: https://img.shields.io/pypi/v/google-cloud-managedkafka.svg - :target: https://pypi.org/project/google-cloud-managedkafka .. |PyPI-google-cloud-apigee-registry| image:: https://img.shields.io/pypi/v/google-cloud-apigee-registry.svg :target: https://pypi.org/project/google-cloud-apigee-registry .. |PyPI-google-cloud-apphub| image:: https://img.shields.io/pypi/v/google-cloud-apphub.svg @@ -1764,14 +1826,20 @@ Libraries :target: https://pypi.org/project/google-cloud-life-sciences .. |PyPI-google-maps-fleetengine| image:: https://img.shields.io/pypi/v/google-maps-fleetengine.svg :target: https://pypi.org/project/google-maps-fleetengine +.. |PyPI-google-cloud-managedkafka| image:: https://img.shields.io/pypi/v/google-cloud-managedkafka.svg + :target: https://pypi.org/project/google-cloud-managedkafka .. |PyPI-google-maps-mapsplatformdatasets| image:: https://img.shields.io/pypi/v/google-maps-mapsplatformdatasets.svg :target: https://pypi.org/project/google-maps-mapsplatformdatasets .. |PyPI-google-maps-routing| image:: https://img.shields.io/pypi/v/google-maps-routing.svg :target: https://pypi.org/project/google-maps-routing +.. |PyPI-google-ads-marketingplatform-admin| image:: https://img.shields.io/pypi/v/google-ads-marketingplatform-admin.svg + :target: https://pypi.org/project/google-ads-marketingplatform-admin .. |PyPI-google-cloud-media-translation| image:: https://img.shields.io/pypi/v/google-cloud-media-translation.svg :target: https://pypi.org/project/google-cloud-media-translation .. |PyPI-google-apps-meet| image:: https://img.shields.io/pypi/v/google-apps-meet.svg :target: https://pypi.org/project/google-apps-meet +.. |PyPI-google-cloud-memorystore| image:: https://img.shields.io/pypi/v/google-cloud-memorystore.svg + :target: https://pypi.org/project/google-cloud-memorystore .. |PyPI-google-cloud-redis-cluster| image:: https://img.shields.io/pypi/v/google-cloud-redis-cluster.svg :target: https://pypi.org/project/google-cloud-redis-cluster .. |PyPI-google-shopping-merchant-accounts| image:: https://img.shields.io/pypi/v/google-shopping-merchant-accounts.svg @@ -1792,30 +1860,42 @@ Libraries :target: https://pypi.org/project/google-shopping-merchant-inventories .. |PyPI-google-shopping-merchant-reports| image:: https://img.shields.io/pypi/v/google-shopping-merchant-reports.svg :target: https://pypi.org/project/google-shopping-merchant-reports +.. |PyPI-google-shopping-merchant-reviews| image:: https://img.shields.io/pypi/v/google-shopping-merchant-reviews.svg + :target: https://pypi.org/project/google-shopping-merchant-reviews .. |PyPI-google-cloud-migrationcenter| image:: https://img.shields.io/pypi/v/google-cloud-migrationcenter.svg :target: https://pypi.org/project/google-cloud-migrationcenter +.. |PyPI-google-cloud-modelarmor| image:: https://img.shields.io/pypi/v/google-cloud-modelarmor.svg + :target: https://pypi.org/project/google-cloud-modelarmor .. |PyPI-google-cloud-netapp| image:: https://img.shields.io/pypi/v/google-cloud-netapp.svg :target: https://pypi.org/project/google-cloud-netapp .. |PyPI-google-cloud-network-security| image:: https://img.shields.io/pypi/v/google-cloud-network-security.svg :target: https://pypi.org/project/google-cloud-network-security .. |PyPI-google-cloud-network-services| image:: https://img.shields.io/pypi/v/google-cloud-network-services.svg :target: https://pypi.org/project/google-cloud-network-services +.. |PyPI-google-cloud-oracledatabase| image:: https://img.shields.io/pypi/v/google-cloud-oracledatabase.svg + :target: https://pypi.org/project/google-cloud-oracledatabase .. |PyPI-google-cloud-parallelstore| image:: https://img.shields.io/pypi/v/google-cloud-parallelstore.svg :target: https://pypi.org/project/google-cloud-parallelstore +.. |PyPI-google-cloud-parametermanager| image:: https://img.shields.io/pypi/v/google-cloud-parametermanager.svg + :target: https://pypi.org/project/google-cloud-parametermanager .. |PyPI-google-cloud-phishing-protection| image:: https://img.shields.io/pypi/v/google-cloud-phishing-protection.svg :target: https://pypi.org/project/google-cloud-phishing-protection .. |PyPI-google-maps-places| image:: https://img.shields.io/pypi/v/google-maps-places.svg :target: https://pypi.org/project/google-maps-places +.. |PyPI-google-maps-areainsights| image:: https://img.shields.io/pypi/v/google-maps-areainsights.svg + :target: https://pypi.org/project/google-maps-areainsights .. |PyPI-google-cloud-policysimulator| image:: https://img.shields.io/pypi/v/google-cloud-policysimulator.svg :target: https://pypi.org/project/google-cloud-policysimulator .. |PyPI-google-cloud-policytroubleshooter-iam| image:: https://img.shields.io/pypi/v/google-cloud-policytroubleshooter-iam.svg :target: https://pypi.org/project/google-cloud-policytroubleshooter-iam .. |PyPI-google-cloud-private-catalog| image:: https://img.shields.io/pypi/v/google-cloud-private-catalog.svg :target: https://pypi.org/project/google-cloud-private-catalog +.. |PyPI-google-cloud-privilegedaccessmanager| image:: https://img.shields.io/pypi/v/google-cloud-privilegedaccessmanager.svg + :target: https://pypi.org/project/google-cloud-privilegedaccessmanager .. |PyPI-google-cloud-public-ca| image:: https://img.shields.io/pypi/v/google-cloud-public-ca.svg :target: https://pypi.org/project/google-cloud-public-ca -.. |PyPI-google-cloud-cloudquotas| image:: https://img.shields.io/pypi/v/google-cloud-cloudquotas.svg - :target: https://pypi.org/project/google-cloud-cloudquotas +.. |PyPI-google-cloud-quotas| image:: https://img.shields.io/pypi/v/google-cloud-quotas.svg + :target: https://pypi.org/project/google-cloud-quotas .. |PyPI-google-cloud-rapidmigrationassessment| image:: https://img.shields.io/pypi/v/google-cloud-rapidmigrationassessment.svg :target: https://pypi.org/project/google-cloud-rapidmigrationassessment .. |PyPI-google-cloud-recommendations-ai| image:: https://img.shields.io/pypi/v/google-cloud-recommendations-ai.svg diff --git a/ci/run_conditional_tests.sh b/ci/run_conditional_tests.sh index cba8757f95a9..91e34a6de1e3 100755 --- a/ci/run_conditional_tests.sh +++ b/ci/run_conditional_tests.sh @@ -15,7 +15,8 @@ # This script requires the following environment variables to be set: # `BUILD_TYPE` should be one of ["presubmit", "continuous"] -# `TEST_TYPE` should be one of ["lint", "lint_setup_py", "docs", "docfx", "prerelease", "unit"] +# `TEST_TYPE` should be one of ["docs", "docfx", "prerelease", "unit"] +# or match the name of the nox session that you want to run. # `PY_VERSION` should be one of ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # `TEST_TYPE` and `PY_VERSION` are required by the script `ci/run_single_test.sh` diff --git a/ci/run_single_test.sh b/ci/run_single_test.sh index 791c5ba3832d..ddbe4c43581b 100755 --- a/ci/run_single_test.sh +++ b/ci/run_single_test.sh @@ -40,14 +40,6 @@ fi set +e case ${TEST_TYPE} in - lint) - nox -s lint - retval=$? - ;; - lint_setup_py) - nox -s lint_setup_py - retval=$? - ;; docs) nox -s docs # This line needs to be directly after `nox -s docs` in order @@ -103,9 +95,16 @@ case ${TEST_TYPE} in retval=$? ;; *) + echo "unsupported PY_VERSION" + exit 1 ;; esac -esac + ;; + *) + nox -s ${TEST_TYPE} + retval=$? + ;; + esac # Clean up `__pycache__` and `.nox` directories to avoid error # `No space left on device` seen when running tests in Github Actions diff --git a/packages/google-ads-admanager/CHANGELOG.md b/packages/google-ads-admanager/CHANGELOG.md index a889acc59a27..2c3810fef2ad 100644 --- a/packages/google-ads-admanager/CHANGELOG.md +++ b/packages/google-ads-admanager/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.2.3...google-ads-admanager-v0.2.4) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.2.3](https://github.com/googleapis/google-cloud-python/compare/google-ads-admanager-v0.2.2...google-ads-admanager-v0.2.3) (2024-12-12) diff --git a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py index 355df6b536f8..668eac0d72ce 100644 --- a/packages/google-ads-admanager/google/ads/admanager/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py index 355df6b536f8..668eac0d72ce 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py index e64a74862298..0f390bb76c34 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -531,6 +533,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1126,16 +1155,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py index e82b2eb5f252..f5e70c65d36d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/ad_unit_service/transports/rest.py @@ -118,12 +118,35 @@ def post_get_ad_unit( ) -> ad_unit_messages.AdUnit: """Post-rpc interceptor for get_ad_unit - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ad_unit_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdUnitService server but before - it is returned to user code. + it is returned to user code. This `post_get_ad_unit` interceptor runs + before the `post_get_ad_unit_with_metadata` interceptor. """ return response + def post_get_ad_unit_with_metadata( + self, + response: ad_unit_messages.AdUnit, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ad_unit_messages.AdUnit, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ad_unit + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdUnitService server but before it is returned to user code. + + We recommend only using this `post_get_ad_unit_with_metadata` + interceptor in new development instead of the `post_get_ad_unit` interceptor. + When both interceptors are used, this `post_get_ad_unit_with_metadata` interceptor runs after the + `post_get_ad_unit` interceptor. The (possibly modified) response returned by + `post_get_ad_unit` will be passed to + `post_get_ad_unit_with_metadata`. + """ + return response, metadata + def pre_list_ad_units( self, request: ad_unit_service.ListAdUnitsRequest, @@ -143,12 +166,37 @@ def post_list_ad_units( ) -> ad_unit_service.ListAdUnitsResponse: """Post-rpc interceptor for list_ad_units - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_ad_units_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdUnitService server but before - it is returned to user code. + it is returned to user code. This `post_list_ad_units` interceptor runs + before the `post_list_ad_units_with_metadata` interceptor. """ return response + def post_list_ad_units_with_metadata( + self, + response: ad_unit_service.ListAdUnitsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + ad_unit_service.ListAdUnitsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_ad_units + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdUnitService server but before it is returned to user code. + + We recommend only using this `post_list_ad_units_with_metadata` + interceptor in new development instead of the `post_list_ad_units` interceptor. + When both interceptors are used, this `post_list_ad_units_with_metadata` interceptor runs after the + `post_list_ad_units` interceptor. The (possibly modified) response returned by + `post_list_ad_units` will be passed to + `post_list_ad_units_with_metadata`. + """ + return response, metadata + def pre_list_ad_unit_sizes( self, request: ad_unit_service.ListAdUnitSizesRequest, @@ -168,12 +216,37 @@ def post_list_ad_unit_sizes( ) -> ad_unit_service.ListAdUnitSizesResponse: """Post-rpc interceptor for list_ad_unit_sizes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_ad_unit_sizes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdUnitService server but before - it is returned to user code. + it is returned to user code. This `post_list_ad_unit_sizes` interceptor runs + before the `post_list_ad_unit_sizes_with_metadata` interceptor. """ return response + def post_list_ad_unit_sizes_with_metadata( + self, + response: ad_unit_service.ListAdUnitSizesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + ad_unit_service.ListAdUnitSizesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_ad_unit_sizes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdUnitService server but before it is returned to user code. + + We recommend only using this `post_list_ad_unit_sizes_with_metadata` + interceptor in new development instead of the `post_list_ad_unit_sizes` interceptor. + When both interceptors are used, this `post_list_ad_unit_sizes_with_metadata` interceptor runs after the + `post_list_ad_unit_sizes` interceptor. The (possibly modified) response returned by + `post_list_ad_unit_sizes` will be passed to + `post_list_ad_unit_sizes_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -407,6 +480,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ad_unit(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ad_unit_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -553,6 +630,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_ad_units(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_ad_units_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -701,6 +782,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_ad_unit_sizes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_ad_unit_sizes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py index 6ae632c4105b..87775fc8661a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -554,6 +556,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1028,16 +1057,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py index 99607ae444c1..268351452a69 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/company_service/transports/rest.py @@ -110,12 +110,35 @@ def post_get_company( ) -> company_messages.Company: """Post-rpc interceptor for get_company - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_company_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompanyService server but before - it is returned to user code. + it is returned to user code. This `post_get_company` interceptor runs + before the `post_get_company_with_metadata` interceptor. """ return response + def post_get_company_with_metadata( + self, + response: company_messages.Company, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[company_messages.Company, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_company + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompanyService server but before it is returned to user code. + + We recommend only using this `post_get_company_with_metadata` + interceptor in new development instead of the `post_get_company` interceptor. + When both interceptors are used, this `post_get_company_with_metadata` interceptor runs after the + `post_get_company` interceptor. The (possibly modified) response returned by + `post_get_company` will be passed to + `post_get_company_with_metadata`. + """ + return response, metadata + def pre_list_companies( self, request: company_service.ListCompaniesRequest, @@ -135,12 +158,37 @@ def post_list_companies( ) -> company_service.ListCompaniesResponse: """Post-rpc interceptor for list_companies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_companies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompanyService server but before - it is returned to user code. + it is returned to user code. This `post_list_companies` interceptor runs + before the `post_list_companies_with_metadata` interceptor. """ return response + def post_list_companies_with_metadata( + self, + response: company_service.ListCompaniesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + company_service.ListCompaniesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_companies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompanyService server but before it is returned to user code. + + We recommend only using this `post_list_companies_with_metadata` + interceptor in new development instead of the `post_list_companies` interceptor. + When both interceptors are used, this `post_list_companies_with_metadata` interceptor runs after the + `post_list_companies` interceptor. The (possibly modified) response returned by + `post_list_companies` will be passed to + `post_list_companies_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -372,6 +420,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_company(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_company_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -514,6 +566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_companies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_companies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py index 70d1153c5851..93528e67de0d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -983,16 +1012,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py index 4f302464ca68..5c619820e4ee 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_field_service/transports/rest.py @@ -111,12 +111,37 @@ def post_get_custom_field( ) -> custom_field_messages.CustomField: """Post-rpc interceptor for get_custom_field - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_field_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CustomFieldService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_field` interceptor runs + before the `post_get_custom_field_with_metadata` interceptor. """ return response + def post_get_custom_field_with_metadata( + self, + response: custom_field_messages.CustomField, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + custom_field_messages.CustomField, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_custom_field + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CustomFieldService server but before it is returned to user code. + + We recommend only using this `post_get_custom_field_with_metadata` + interceptor in new development instead of the `post_get_custom_field` interceptor. + When both interceptors are used, this `post_get_custom_field_with_metadata` interceptor runs after the + `post_get_custom_field` interceptor. The (possibly modified) response returned by + `post_get_custom_field` will be passed to + `post_get_custom_field_with_metadata`. + """ + return response, metadata + def pre_list_custom_fields( self, request: custom_field_service.ListCustomFieldsRequest, @@ -137,12 +162,38 @@ def post_list_custom_fields( ) -> custom_field_service.ListCustomFieldsResponse: """Post-rpc interceptor for list_custom_fields - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_fields_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CustomFieldService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_fields` interceptor runs + before the `post_list_custom_fields_with_metadata` interceptor. """ return response + def post_list_custom_fields_with_metadata( + self, + response: custom_field_service.ListCustomFieldsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + custom_field_service.ListCustomFieldsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_fields + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CustomFieldService server but before it is returned to user code. + + We recommend only using this `post_list_custom_fields_with_metadata` + interceptor in new development instead of the `post_list_custom_fields` interceptor. + When both interceptors are used, this `post_list_custom_fields_with_metadata` interceptor runs after the + `post_list_custom_fields` interceptor. The (possibly modified) response returned by + `post_list_custom_fields` will be passed to + `post_list_custom_fields_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -377,6 +428,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_field(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_field_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -524,6 +579,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_fields(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_fields_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py index fd79c9e1f297..7a3022f44e7b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -503,6 +505,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -997,16 +1026,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py index cfb4d42815d2..61e566af21c2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_key_service/transports/rest.py @@ -114,12 +114,38 @@ def post_get_custom_targeting_key( ) -> custom_targeting_key_messages.CustomTargetingKey: """Post-rpc interceptor for get_custom_targeting_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_targeting_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CustomTargetingKeyService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_targeting_key` interceptor runs + before the `post_get_custom_targeting_key_with_metadata` interceptor. """ return response + def post_get_custom_targeting_key_with_metadata( + self, + response: custom_targeting_key_messages.CustomTargetingKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + custom_targeting_key_messages.CustomTargetingKey, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_custom_targeting_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CustomTargetingKeyService server but before it is returned to user code. + + We recommend only using this `post_get_custom_targeting_key_with_metadata` + interceptor in new development instead of the `post_get_custom_targeting_key` interceptor. + When both interceptors are used, this `post_get_custom_targeting_key_with_metadata` interceptor runs after the + `post_get_custom_targeting_key` interceptor. The (possibly modified) response returned by + `post_get_custom_targeting_key` will be passed to + `post_get_custom_targeting_key_with_metadata`. + """ + return response, metadata + def pre_list_custom_targeting_keys( self, request: custom_targeting_key_service.ListCustomTargetingKeysRequest, @@ -140,12 +166,38 @@ def post_list_custom_targeting_keys( ) -> custom_targeting_key_service.ListCustomTargetingKeysResponse: """Post-rpc interceptor for list_custom_targeting_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_targeting_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CustomTargetingKeyService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_targeting_keys` interceptor runs + before the `post_list_custom_targeting_keys_with_metadata` interceptor. """ return response + def post_list_custom_targeting_keys_with_metadata( + self, + response: custom_targeting_key_service.ListCustomTargetingKeysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + custom_targeting_key_service.ListCustomTargetingKeysResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_targeting_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CustomTargetingKeyService server but before it is returned to user code. + + We recommend only using this `post_list_custom_targeting_keys_with_metadata` + interceptor in new development instead of the `post_list_custom_targeting_keys` interceptor. + When both interceptors are used, this `post_list_custom_targeting_keys_with_metadata` interceptor runs after the + `post_list_custom_targeting_keys` interceptor. The (possibly modified) response returned by + `post_list_custom_targeting_keys` will be passed to + `post_list_custom_targeting_keys_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -380,6 +432,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_targeting_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -534,6 +590,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_targeting_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_targeting_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py index eb982112d026..881de1ec24e3 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -512,6 +514,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1013,16 +1042,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py index 1528b6133e5e..1177b344ce46 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/custom_targeting_value_service/transports/rest.py @@ -114,12 +114,38 @@ def post_get_custom_targeting_value( ) -> custom_targeting_value_messages.CustomTargetingValue: """Post-rpc interceptor for get_custom_targeting_value - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_targeting_value_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CustomTargetingValueService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_targeting_value` interceptor runs + before the `post_get_custom_targeting_value_with_metadata` interceptor. """ return response + def post_get_custom_targeting_value_with_metadata( + self, + response: custom_targeting_value_messages.CustomTargetingValue, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + custom_targeting_value_messages.CustomTargetingValue, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_custom_targeting_value + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CustomTargetingValueService server but before it is returned to user code. + + We recommend only using this `post_get_custom_targeting_value_with_metadata` + interceptor in new development instead of the `post_get_custom_targeting_value` interceptor. + When both interceptors are used, this `post_get_custom_targeting_value_with_metadata` interceptor runs after the + `post_get_custom_targeting_value` interceptor. The (possibly modified) response returned by + `post_get_custom_targeting_value` will be passed to + `post_get_custom_targeting_value_with_metadata`. + """ + return response, metadata + def pre_list_custom_targeting_values( self, request: custom_targeting_value_service.ListCustomTargetingValuesRequest, @@ -140,12 +166,38 @@ def post_list_custom_targeting_values( ) -> custom_targeting_value_service.ListCustomTargetingValuesResponse: """Post-rpc interceptor for list_custom_targeting_values - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_targeting_values_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CustomTargetingValueService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_targeting_values` interceptor runs + before the `post_list_custom_targeting_values_with_metadata` interceptor. """ return response + def post_list_custom_targeting_values_with_metadata( + self, + response: custom_targeting_value_service.ListCustomTargetingValuesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + custom_targeting_value_service.ListCustomTargetingValuesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_targeting_values + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CustomTargetingValueService server but before it is returned to user code. + + We recommend only using this `post_list_custom_targeting_values_with_metadata` + interceptor in new development instead of the `post_list_custom_targeting_values` interceptor. + When both interceptors are used, this `post_list_custom_targeting_values_with_metadata` interceptor runs after the + `post_list_custom_targeting_values` interceptor. The (possibly modified) response returned by + `post_list_custom_targeting_values` will be passed to + `post_list_custom_targeting_values_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -383,6 +435,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_targeting_value(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_targeting_value_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -539,6 +595,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_targeting_values(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_targeting_values_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py index f8a09d16a91a..7a84529a19c4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -503,6 +505,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1552,16 +1581,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py index 7883b1005e15..43d89cdcce16 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/entity_signals_mapping_service/transports/rest.py @@ -147,12 +147,38 @@ def post_batch_create_entity_signals_mappings( ) -> entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse: """Post-rpc interceptor for batch_create_entity_signals_mappings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_entity_signals_mappings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntitySignalsMappingService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_entity_signals_mappings` interceptor runs + before the `post_batch_create_entity_signals_mappings_with_metadata` interceptor. """ return response + def post_batch_create_entity_signals_mappings_with_metadata( + self, + response: entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_create_entity_signals_mappings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntitySignalsMappingService server but before it is returned to user code. + + We recommend only using this `post_batch_create_entity_signals_mappings_with_metadata` + interceptor in new development instead of the `post_batch_create_entity_signals_mappings` interceptor. + When both interceptors are used, this `post_batch_create_entity_signals_mappings_with_metadata` interceptor runs after the + `post_batch_create_entity_signals_mappings` interceptor. The (possibly modified) response returned by + `post_batch_create_entity_signals_mappings` will be passed to + `post_batch_create_entity_signals_mappings_with_metadata`. + """ + return response, metadata + def pre_batch_update_entity_signals_mappings( self, request: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest, @@ -174,12 +200,38 @@ def post_batch_update_entity_signals_mappings( ) -> entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse: """Post-rpc interceptor for batch_update_entity_signals_mappings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_entity_signals_mappings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntitySignalsMappingService server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_entity_signals_mappings` interceptor runs + before the `post_batch_update_entity_signals_mappings_with_metadata` interceptor. """ return response + def post_batch_update_entity_signals_mappings_with_metadata( + self, + response: entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_update_entity_signals_mappings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntitySignalsMappingService server but before it is returned to user code. + + We recommend only using this `post_batch_update_entity_signals_mappings_with_metadata` + interceptor in new development instead of the `post_batch_update_entity_signals_mappings` interceptor. + When both interceptors are used, this `post_batch_update_entity_signals_mappings_with_metadata` interceptor runs after the + `post_batch_update_entity_signals_mappings` interceptor. The (possibly modified) response returned by + `post_batch_update_entity_signals_mappings` will be passed to + `post_batch_update_entity_signals_mappings_with_metadata`. + """ + return response, metadata + def pre_create_entity_signals_mapping( self, request: entity_signals_mapping_service.CreateEntitySignalsMappingRequest, @@ -200,12 +252,38 @@ def post_create_entity_signals_mapping( ) -> entity_signals_mapping_messages.EntitySignalsMapping: """Post-rpc interceptor for create_entity_signals_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_entity_signals_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntitySignalsMappingService server but before - it is returned to user code. + it is returned to user code. This `post_create_entity_signals_mapping` interceptor runs + before the `post_create_entity_signals_mapping_with_metadata` interceptor. """ return response + def post_create_entity_signals_mapping_with_metadata( + self, + response: entity_signals_mapping_messages.EntitySignalsMapping, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_signals_mapping_messages.EntitySignalsMapping, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_entity_signals_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntitySignalsMappingService server but before it is returned to user code. + + We recommend only using this `post_create_entity_signals_mapping_with_metadata` + interceptor in new development instead of the `post_create_entity_signals_mapping` interceptor. + When both interceptors are used, this `post_create_entity_signals_mapping_with_metadata` interceptor runs after the + `post_create_entity_signals_mapping` interceptor. The (possibly modified) response returned by + `post_create_entity_signals_mapping` will be passed to + `post_create_entity_signals_mapping_with_metadata`. + """ + return response, metadata + def pre_get_entity_signals_mapping( self, request: entity_signals_mapping_service.GetEntitySignalsMappingRequest, @@ -226,12 +304,38 @@ def post_get_entity_signals_mapping( ) -> entity_signals_mapping_messages.EntitySignalsMapping: """Post-rpc interceptor for get_entity_signals_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_entity_signals_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntitySignalsMappingService server but before - it is returned to user code. + it is returned to user code. This `post_get_entity_signals_mapping` interceptor runs + before the `post_get_entity_signals_mapping_with_metadata` interceptor. """ return response + def post_get_entity_signals_mapping_with_metadata( + self, + response: entity_signals_mapping_messages.EntitySignalsMapping, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_signals_mapping_messages.EntitySignalsMapping, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_entity_signals_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntitySignalsMappingService server but before it is returned to user code. + + We recommend only using this `post_get_entity_signals_mapping_with_metadata` + interceptor in new development instead of the `post_get_entity_signals_mapping` interceptor. + When both interceptors are used, this `post_get_entity_signals_mapping_with_metadata` interceptor runs after the + `post_get_entity_signals_mapping` interceptor. The (possibly modified) response returned by + `post_get_entity_signals_mapping` will be passed to + `post_get_entity_signals_mapping_with_metadata`. + """ + return response, metadata + def pre_list_entity_signals_mappings( self, request: entity_signals_mapping_service.ListEntitySignalsMappingsRequest, @@ -252,12 +356,38 @@ def post_list_entity_signals_mappings( ) -> entity_signals_mapping_service.ListEntitySignalsMappingsResponse: """Post-rpc interceptor for list_entity_signals_mappings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_entity_signals_mappings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntitySignalsMappingService server but before - it is returned to user code. + it is returned to user code. This `post_list_entity_signals_mappings` interceptor runs + before the `post_list_entity_signals_mappings_with_metadata` interceptor. """ return response + def post_list_entity_signals_mappings_with_metadata( + self, + response: entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_signals_mapping_service.ListEntitySignalsMappingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_entity_signals_mappings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntitySignalsMappingService server but before it is returned to user code. + + We recommend only using this `post_list_entity_signals_mappings_with_metadata` + interceptor in new development instead of the `post_list_entity_signals_mappings` interceptor. + When both interceptors are used, this `post_list_entity_signals_mappings_with_metadata` interceptor runs after the + `post_list_entity_signals_mappings` interceptor. The (possibly modified) response returned by + `post_list_entity_signals_mappings` will be passed to + `post_list_entity_signals_mappings_with_metadata`. + """ + return response, metadata + def pre_update_entity_signals_mapping( self, request: entity_signals_mapping_service.UpdateEntitySignalsMappingRequest, @@ -278,12 +408,38 @@ def post_update_entity_signals_mapping( ) -> entity_signals_mapping_messages.EntitySignalsMapping: """Post-rpc interceptor for update_entity_signals_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_entity_signals_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntitySignalsMappingService server but before - it is returned to user code. + it is returned to user code. This `post_update_entity_signals_mapping` interceptor runs + before the `post_update_entity_signals_mapping_with_metadata` interceptor. """ return response + def post_update_entity_signals_mapping_with_metadata( + self, + response: entity_signals_mapping_messages.EntitySignalsMapping, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_signals_mapping_messages.EntitySignalsMapping, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_entity_signals_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntitySignalsMappingService server but before it is returned to user code. + + We recommend only using this `post_update_entity_signals_mapping_with_metadata` + interceptor in new development instead of the `post_update_entity_signals_mapping` interceptor. + When both interceptors are used, this `post_update_entity_signals_mapping_with_metadata` interceptor runs after the + `post_update_entity_signals_mapping` interceptor. The (possibly modified) response returned by + `post_update_entity_signals_mapping` will be passed to + `post_update_entity_signals_mapping_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -537,6 +693,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_entity_signals_mappings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_batch_create_entity_signals_mappings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -701,6 +864,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_entity_signals_mappings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_batch_update_entity_signals_mappings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -856,6 +1026,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_entity_signals_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_entity_signals_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1006,6 +1183,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_entity_signals_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entity_signals_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1162,6 +1343,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_entity_signals_mappings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entity_signals_mappings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1317,6 +1502,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_entity_signals_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_entity_signals_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py index a55f2e1974bc..4d030db58f3c 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -921,16 +950,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py index 3546c7b0022b..a5963cc724e2 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/network_service/transports/rest.py @@ -110,12 +110,35 @@ def post_get_network( ) -> network_messages.Network: """Post-rpc interceptor for get_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkService server but before - it is returned to user code. + it is returned to user code. This `post_get_network` interceptor runs + before the `post_get_network_with_metadata` interceptor. """ return response + def post_get_network_with_metadata( + self, + response: network_messages.Network, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[network_messages.Network, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkService server but before it is returned to user code. + + We recommend only using this `post_get_network_with_metadata` + interceptor in new development instead of the `post_get_network` interceptor. + When both interceptors are used, this `post_get_network_with_metadata` interceptor runs after the + `post_get_network` interceptor. The (possibly modified) response returned by + `post_get_network` will be passed to + `post_get_network_with_metadata`. + """ + return response, metadata + def pre_list_networks( self, request: network_service.ListNetworksRequest, @@ -135,12 +158,37 @@ def post_list_networks( ) -> network_service.ListNetworksResponse: """Post-rpc interceptor for list_networks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_networks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkService server but before - it is returned to user code. + it is returned to user code. This `post_list_networks` interceptor runs + before the `post_list_networks_with_metadata` interceptor. """ return response + def post_list_networks_with_metadata( + self, + response: network_service.ListNetworksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + network_service.ListNetworksResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_networks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkService server but before it is returned to user code. + + We recommend only using this `post_list_networks_with_metadata` + interceptor in new development instead of the `post_list_networks` interceptor. + When both interceptors are used, this `post_list_networks_with_metadata` interceptor runs after the + `post_list_networks` interceptor. The (possibly modified) response returned by + `post_list_networks` will be passed to + `post_list_networks_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -372,6 +420,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -512,6 +564,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_networks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_networks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py index 9f0bd4093669..6ea148703b63 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -605,6 +607,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1082,16 +1111,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py index 8e683e62aca0..8a02a4452efd 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/order_service/transports/rest.py @@ -106,12 +106,35 @@ def pre_get_order( def post_get_order(self, response: order_messages.Order) -> order_messages.Order: """Post-rpc interceptor for get_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the OrderService server but before - it is returned to user code. + it is returned to user code. This `post_get_order` interceptor runs + before the `post_get_order_with_metadata` interceptor. """ return response + def post_get_order_with_metadata( + self, + response: order_messages.Order, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[order_messages.Order, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrderService server but before it is returned to user code. + + We recommend only using this `post_get_order_with_metadata` + interceptor in new development instead of the `post_get_order` interceptor. + When both interceptors are used, this `post_get_order_with_metadata` interceptor runs after the + `post_get_order` interceptor. The (possibly modified) response returned by + `post_get_order` will be passed to + `post_get_order_with_metadata`. + """ + return response, metadata + def pre_list_orders( self, request: order_service.ListOrdersRequest, @@ -131,12 +154,37 @@ def post_list_orders( ) -> order_service.ListOrdersResponse: """Post-rpc interceptor for list_orders - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_orders_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the OrderService server but before - it is returned to user code. + it is returned to user code. This `post_list_orders` interceptor runs + before the `post_list_orders_with_metadata` interceptor. """ return response + def post_list_orders_with_metadata( + self, + response: order_service.ListOrdersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + order_service.ListOrdersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_orders + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrderService server but before it is returned to user code. + + We recommend only using this `post_list_orders_with_metadata` + interceptor in new development instead of the `post_list_orders` interceptor. + When both interceptors are used, this `post_list_orders_with_metadata` interceptor runs after the + `post_list_orders` interceptor. The (possibly modified) response returned by + `post_list_orders` will be passed to + `post_list_orders_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -368,6 +416,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -514,6 +566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_orders(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_orders_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py index df86540e30dd..ecaea412ee2b 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -993,16 +1022,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py index 8e8f883e3453..194b9216f16e 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/placement_service/transports/rest.py @@ -110,12 +110,35 @@ def post_get_placement( ) -> placement_messages.Placement: """Post-rpc interceptor for get_placement - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_placement_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PlacementService server but before - it is returned to user code. + it is returned to user code. This `post_get_placement` interceptor runs + before the `post_get_placement_with_metadata` interceptor. """ return response + def post_get_placement_with_metadata( + self, + response: placement_messages.Placement, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[placement_messages.Placement, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_placement + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PlacementService server but before it is returned to user code. + + We recommend only using this `post_get_placement_with_metadata` + interceptor in new development instead of the `post_get_placement` interceptor. + When both interceptors are used, this `post_get_placement_with_metadata` interceptor runs after the + `post_get_placement` interceptor. The (possibly modified) response returned by + `post_get_placement` will be passed to + `post_get_placement_with_metadata`. + """ + return response, metadata + def pre_list_placements( self, request: placement_service.ListPlacementsRequest, @@ -135,12 +158,38 @@ def post_list_placements( ) -> placement_service.ListPlacementsResponse: """Post-rpc interceptor for list_placements - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_placements_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PlacementService server but before - it is returned to user code. + it is returned to user code. This `post_list_placements` interceptor runs + before the `post_list_placements_with_metadata` interceptor. """ return response + def post_list_placements_with_metadata( + self, + response: placement_service.ListPlacementsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + placement_service.ListPlacementsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_placements + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PlacementService server but before it is returned to user code. + + We recommend only using this `post_list_placements_with_metadata` + interceptor in new development instead of the `post_list_placements` interceptor. + When both interceptors are used, this `post_list_placements_with_metadata` interceptor runs after the + `post_list_placements` interceptor. The (possibly modified) response returned by + `post_list_placements` will be passed to + `post_list_placements_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -370,6 +419,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_placement(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_placement_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -512,6 +565,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_placements(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_placements_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py index 8fcc1028591a..fc8ef970aa63 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -494,6 +496,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1453,16 +1482,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py index 96bd9a927d92..928527f8565a 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/report_service/transports/rest.py @@ -142,12 +142,35 @@ def post_create_report( ) -> report_service.Report: """Post-rpc interceptor for create_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportService server but before - it is returned to user code. + it is returned to user code. This `post_create_report` interceptor runs + before the `post_create_report_with_metadata` interceptor. """ return response + def post_create_report_with_metadata( + self, + response: report_service.Report, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[report_service.Report, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportService server but before it is returned to user code. + + We recommend only using this `post_create_report_with_metadata` + interceptor in new development instead of the `post_create_report` interceptor. + When both interceptors are used, this `post_create_report_with_metadata` interceptor runs after the + `post_create_report` interceptor. The (possibly modified) response returned by + `post_create_report` will be passed to + `post_create_report_with_metadata`. + """ + return response, metadata + def pre_fetch_report_result_rows( self, request: report_service.FetchReportResultRowsRequest, @@ -168,12 +191,38 @@ def post_fetch_report_result_rows( ) -> report_service.FetchReportResultRowsResponse: """Post-rpc interceptor for fetch_report_result_rows - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_report_result_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_report_result_rows` interceptor runs + before the `post_fetch_report_result_rows_with_metadata` interceptor. """ return response + def post_fetch_report_result_rows_with_metadata( + self, + response: report_service.FetchReportResultRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + report_service.FetchReportResultRowsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_report_result_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportService server but before it is returned to user code. + + We recommend only using this `post_fetch_report_result_rows_with_metadata` + interceptor in new development instead of the `post_fetch_report_result_rows` interceptor. + When both interceptors are used, this `post_fetch_report_result_rows_with_metadata` interceptor runs after the + `post_fetch_report_result_rows` interceptor. The (possibly modified) response returned by + `post_fetch_report_result_rows` will be passed to + `post_fetch_report_result_rows_with_metadata`. + """ + return response, metadata + def pre_get_report( self, request: report_service.GetReportRequest, @@ -191,12 +240,35 @@ def pre_get_report( def post_get_report(self, response: report_service.Report) -> report_service.Report: """Post-rpc interceptor for get_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportService server but before - it is returned to user code. + it is returned to user code. This `post_get_report` interceptor runs + before the `post_get_report_with_metadata` interceptor. """ return response + def post_get_report_with_metadata( + self, + response: report_service.Report, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[report_service.Report, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportService server but before it is returned to user code. + + We recommend only using this `post_get_report_with_metadata` + interceptor in new development instead of the `post_get_report` interceptor. + When both interceptors are used, this `post_get_report_with_metadata` interceptor runs after the + `post_get_report` interceptor. The (possibly modified) response returned by + `post_get_report` will be passed to + `post_get_report_with_metadata`. + """ + return response, metadata + def pre_list_reports( self, request: report_service.ListReportsRequest, @@ -216,12 +288,37 @@ def post_list_reports( ) -> report_service.ListReportsResponse: """Post-rpc interceptor for list_reports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_reports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportService server but before - it is returned to user code. + it is returned to user code. This `post_list_reports` interceptor runs + before the `post_list_reports_with_metadata` interceptor. """ return response + def post_list_reports_with_metadata( + self, + response: report_service.ListReportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + report_service.ListReportsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_reports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportService server but before it is returned to user code. + + We recommend only using this `post_list_reports_with_metadata` + interceptor in new development instead of the `post_list_reports` interceptor. + When both interceptors are used, this `post_list_reports_with_metadata` interceptor runs after the + `post_list_reports` interceptor. The (possibly modified) response returned by + `post_list_reports` will be passed to + `post_list_reports_with_metadata`. + """ + return response, metadata + def pre_run_report( self, request: report_service.RunReportRequest, @@ -241,12 +338,35 @@ def post_run_report( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportService server but before - it is returned to user code. + it is returned to user code. This `post_run_report` interceptor runs + before the `post_run_report_with_metadata` interceptor. """ return response + def post_run_report_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportService server but before it is returned to user code. + + We recommend only using this `post_run_report_with_metadata` + interceptor in new development instead of the `post_run_report` interceptor. + When both interceptors are used, this `post_run_report_with_metadata` interceptor runs after the + `post_run_report` interceptor. The (possibly modified) response returned by + `post_run_report` will be passed to + `post_run_report_with_metadata`. + """ + return response, metadata + def pre_update_report( self, request: report_service.UpdateReportRequest, @@ -266,12 +386,35 @@ def post_update_report( ) -> report_service.Report: """Post-rpc interceptor for update_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportService server but before - it is returned to user code. + it is returned to user code. This `post_update_report` interceptor runs + before the `post_update_report_with_metadata` interceptor. """ return response + def post_update_report_with_metadata( + self, + response: report_service.Report, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[report_service.Report, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportService server but before it is returned to user code. + + We recommend only using this `post_update_report_with_metadata` + interceptor in new development instead of the `post_update_report` interceptor. + When both interceptors are used, this `post_update_report_with_metadata` interceptor runs after the + `post_update_report` interceptor. The (possibly modified) response returned by + `post_update_report` will be passed to + `post_update_report_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -546,6 +689,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -692,6 +839,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_report_result_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_report_result_rows_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -838,6 +989,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -982,6 +1137,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_reports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_reports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1138,6 +1297,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1284,6 +1447,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py index 9b00a7dcfc53..99268a57bfbb 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -490,6 +492,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -961,16 +990,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py index f2f0211e46f0..32daa99c54d4 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/role_service/transports/rest.py @@ -106,12 +106,35 @@ def pre_get_role( def post_get_role(self, response: role_messages.Role) -> role_messages.Role: """Post-rpc interceptor for get_role - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_role_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RoleService server but before - it is returned to user code. + it is returned to user code. This `post_get_role` interceptor runs + before the `post_get_role_with_metadata` interceptor. """ return response + def post_get_role_with_metadata( + self, + response: role_messages.Role, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[role_messages.Role, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_role + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RoleService server but before it is returned to user code. + + We recommend only using this `post_get_role_with_metadata` + interceptor in new development instead of the `post_get_role` interceptor. + When both interceptors are used, this `post_get_role_with_metadata` interceptor runs after the + `post_get_role` interceptor. The (possibly modified) response returned by + `post_get_role` will be passed to + `post_get_role_with_metadata`. + """ + return response, metadata + def pre_list_roles( self, request: role_service.ListRolesRequest, @@ -129,12 +152,35 @@ def post_list_roles( ) -> role_service.ListRolesResponse: """Post-rpc interceptor for list_roles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_roles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RoleService server but before - it is returned to user code. + it is returned to user code. This `post_list_roles` interceptor runs + before the `post_list_roles_with_metadata` interceptor. """ return response + def post_list_roles_with_metadata( + self, + response: role_service.ListRolesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[role_service.ListRolesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_roles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RoleService server but before it is returned to user code. + + We recommend only using this `post_list_roles_with_metadata` + interceptor in new development instead of the `post_list_roles` interceptor. + When both interceptors are used, this `post_list_roles_with_metadata` interceptor runs after the + `post_list_roles` interceptor. The (possibly modified) response returned by + `post_list_roles` will be passed to + `post_list_roles_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -366,6 +412,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_role(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_role_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -510,6 +560,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_roles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_roles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py index 831d3ae1ddcc..6eaf76b731ba 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -501,6 +503,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -991,16 +1020,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py index 4e32840d3ddd..ec40c279205d 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/taxonomy_category_service/transports/rest.py @@ -114,12 +114,38 @@ def post_get_taxonomy_category( ) -> taxonomy_category_messages.TaxonomyCategory: """Post-rpc interceptor for get_taxonomy_category - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_taxonomy_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TaxonomyCategoryService server but before - it is returned to user code. + it is returned to user code. This `post_get_taxonomy_category` interceptor runs + before the `post_get_taxonomy_category_with_metadata` interceptor. """ return response + def post_get_taxonomy_category_with_metadata( + self, + response: taxonomy_category_messages.TaxonomyCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + taxonomy_category_messages.TaxonomyCategory, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_taxonomy_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TaxonomyCategoryService server but before it is returned to user code. + + We recommend only using this `post_get_taxonomy_category_with_metadata` + interceptor in new development instead of the `post_get_taxonomy_category` interceptor. + When both interceptors are used, this `post_get_taxonomy_category_with_metadata` interceptor runs after the + `post_get_taxonomy_category` interceptor. The (possibly modified) response returned by + `post_get_taxonomy_category` will be passed to + `post_get_taxonomy_category_with_metadata`. + """ + return response, metadata + def pre_list_taxonomy_categories( self, request: taxonomy_category_service.ListTaxonomyCategoriesRequest, @@ -140,12 +166,38 @@ def post_list_taxonomy_categories( ) -> taxonomy_category_service.ListTaxonomyCategoriesResponse: """Post-rpc interceptor for list_taxonomy_categories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_taxonomy_categories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TaxonomyCategoryService server but before - it is returned to user code. + it is returned to user code. This `post_list_taxonomy_categories` interceptor runs + before the `post_list_taxonomy_categories_with_metadata` interceptor. """ return response + def post_list_taxonomy_categories_with_metadata( + self, + response: taxonomy_category_service.ListTaxonomyCategoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + taxonomy_category_service.ListTaxonomyCategoriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_taxonomy_categories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TaxonomyCategoryService server but before it is returned to user code. + + We recommend only using this `post_list_taxonomy_categories_with_metadata` + interceptor in new development instead of the `post_list_taxonomy_categories` interceptor. + When both interceptors are used, this `post_list_taxonomy_categories_with_metadata` interceptor runs after the + `post_list_taxonomy_categories` interceptor. The (possibly modified) response returned by + `post_list_taxonomy_categories` will be passed to + `post_list_taxonomy_categories_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -380,6 +432,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_taxonomy_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_taxonomy_category_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -527,6 +583,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_taxonomy_categories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_taxonomy_categories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py index fbd1b70e7d1d..fc99625902d7 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -846,16 +875,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py index 294fbf1cd532..33d60cefd300 100644 --- a/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py +++ b/packages/google-ads-admanager/google/ads/admanager_v1/services/user_service/transports/rest.py @@ -98,12 +98,35 @@ def pre_get_user( def post_get_user(self, response: user_messages.User) -> user_messages.User: """Post-rpc interceptor for get_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserService server but before - it is returned to user code. + it is returned to user code. This `post_get_user` interceptor runs + before the `post_get_user_with_metadata` interceptor. """ return response + def post_get_user_with_metadata( + self, + response: user_messages.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[user_messages.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserService server but before it is returned to user code. + + We recommend only using this `post_get_user_with_metadata` + interceptor in new development instead of the `post_get_user` interceptor. + When both interceptors are used, this `post_get_user_with_metadata` interceptor runs after the + `post_get_user` interceptor. The (possibly modified) response returned by + `post_get_user` will be passed to + `post_get_user_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -335,6 +358,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json index a3d2acec31ba..0421e99bdda8 100644 --- a/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json +++ b/packages/google-ads-admanager/samples/generated_samples/snippet_metadata_google.ads.admanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ads-admanager", - "version": "0.2.3" + "version": "0.2.4" }, "snippets": [ { diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py index cbbc892a654a..c32e26f6eebc 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_ad_unit_service.py @@ -67,6 +67,13 @@ applied_label, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -316,6 +323,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AdUnitServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AdUnitServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1804,10 +1854,13 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AdUnitServiceRestInterceptor, "post_get_ad_unit" ) as post, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "post_get_ad_unit_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AdUnitServiceRestInterceptor, "pre_get_ad_unit" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ad_unit_service.GetAdUnitRequest.pb( ad_unit_service.GetAdUnitRequest() ) @@ -1831,6 +1884,7 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ad_unit_messages.AdUnit() + post_with_metadata.return_value = ad_unit_messages.AdUnit(), metadata client.get_ad_unit( request, @@ -1842,6 +1896,7 @@ def test_get_ad_unit_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_ad_units_rest_bad_request( @@ -1928,10 +1983,13 @@ def test_list_ad_units_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AdUnitServiceRestInterceptor, "post_list_ad_units" ) as post, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "post_list_ad_units_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AdUnitServiceRestInterceptor, "pre_list_ad_units" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ad_unit_service.ListAdUnitsRequest.pb( ad_unit_service.ListAdUnitsRequest() ) @@ -1957,6 +2015,10 @@ def test_list_ad_units_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ad_unit_service.ListAdUnitsResponse() + post_with_metadata.return_value = ( + ad_unit_service.ListAdUnitsResponse(), + metadata, + ) client.list_ad_units( request, @@ -1968,6 +2030,7 @@ def test_list_ad_units_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_ad_unit_sizes_rest_bad_request( @@ -2054,10 +2117,13 @@ def test_list_ad_unit_sizes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AdUnitServiceRestInterceptor, "post_list_ad_unit_sizes" ) as post, mock.patch.object( + transports.AdUnitServiceRestInterceptor, "post_list_ad_unit_sizes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AdUnitServiceRestInterceptor, "pre_list_ad_unit_sizes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ad_unit_service.ListAdUnitSizesRequest.pb( ad_unit_service.ListAdUnitSizesRequest() ) @@ -2083,6 +2149,10 @@ def test_list_ad_unit_sizes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ad_unit_service.ListAdUnitSizesResponse() + post_with_metadata.return_value = ( + ad_unit_service.ListAdUnitSizesResponse(), + metadata, + ) client.list_ad_unit_sizes( request, @@ -2094,6 +2164,7 @@ def test_list_ad_unit_sizes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py index ea9adbde0902..9338a6c3d62a 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_company_service.py @@ -67,6 +67,13 @@ company_type_enum, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CompanyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CompanyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1530,10 +1580,13 @@ def test_get_company_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CompanyServiceRestInterceptor, "post_get_company" ) as post, mock.patch.object( + transports.CompanyServiceRestInterceptor, "post_get_company_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CompanyServiceRestInterceptor, "pre_get_company" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = company_service.GetCompanyRequest.pb( company_service.GetCompanyRequest() ) @@ -1557,6 +1610,7 @@ def test_get_company_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = company_messages.Company() + post_with_metadata.return_value = company_messages.Company(), metadata client.get_company( request, @@ -1568,6 +1622,7 @@ def test_get_company_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_companies_rest_bad_request( @@ -1654,10 +1709,13 @@ def test_list_companies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CompanyServiceRestInterceptor, "post_list_companies" ) as post, mock.patch.object( + transports.CompanyServiceRestInterceptor, "post_list_companies_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CompanyServiceRestInterceptor, "pre_list_companies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = company_service.ListCompaniesRequest.pb( company_service.ListCompaniesRequest() ) @@ -1683,6 +1741,10 @@ def test_list_companies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = company_service.ListCompaniesResponse() + post_with_metadata.return_value = ( + company_service.ListCompaniesResponse(), + metadata, + ) client.list_companies( request, @@ -1694,6 +1756,7 @@ def test_list_companies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py index cf1494107490..4f4473d90db4 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_field_service.py @@ -64,6 +64,13 @@ custom_field_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -330,6 +337,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CustomFieldServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CustomFieldServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1566,10 +1616,14 @@ def test_get_custom_field_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CustomFieldServiceRestInterceptor, "post_get_custom_field" ) as post, mock.patch.object( + transports.CustomFieldServiceRestInterceptor, + "post_get_custom_field_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CustomFieldServiceRestInterceptor, "pre_get_custom_field" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = custom_field_service.GetCustomFieldRequest.pb( custom_field_service.GetCustomFieldRequest() ) @@ -1595,6 +1649,7 @@ def test_get_custom_field_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = custom_field_messages.CustomField() + post_with_metadata.return_value = custom_field_messages.CustomField(), metadata client.get_custom_field( request, @@ -1606,6 +1661,7 @@ def test_get_custom_field_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_fields_rest_bad_request( @@ -1692,10 +1748,14 @@ def test_list_custom_fields_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CustomFieldServiceRestInterceptor, "post_list_custom_fields" ) as post, mock.patch.object( + transports.CustomFieldServiceRestInterceptor, + "post_list_custom_fields_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CustomFieldServiceRestInterceptor, "pre_list_custom_fields" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = custom_field_service.ListCustomFieldsRequest.pb( custom_field_service.ListCustomFieldsRequest() ) @@ -1721,6 +1781,10 @@ def test_list_custom_fields_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = custom_field_service.ListCustomFieldsResponse() + post_with_metadata.return_value = ( + custom_field_service.ListCustomFieldsResponse(), + metadata, + ) client.list_custom_fields( request, @@ -1732,6 +1796,7 @@ def test_list_custom_fields_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py index 4aed3143f97d..5a552ec6ed77 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_key_service.py @@ -64,6 +64,13 @@ custom_targeting_key_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +345,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CustomTargetingKeyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CustomTargetingKeyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1602,11 +1652,15 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): transports.CustomTargetingKeyServiceRestInterceptor, "post_get_custom_targeting_key", ) as post, mock.patch.object( + transports.CustomTargetingKeyServiceRestInterceptor, + "post_get_custom_targeting_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CustomTargetingKeyServiceRestInterceptor, "pre_get_custom_targeting_key", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = custom_targeting_key_service.GetCustomTargetingKeyRequest.pb( custom_targeting_key_service.GetCustomTargetingKeyRequest() ) @@ -1632,6 +1686,10 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = custom_targeting_key_messages.CustomTargetingKey() + post_with_metadata.return_value = ( + custom_targeting_key_messages.CustomTargetingKey(), + metadata, + ) client.get_custom_targeting_key( request, @@ -1643,6 +1701,7 @@ def test_get_custom_targeting_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_targeting_keys_rest_bad_request( @@ -1732,11 +1791,15 @@ def test_list_custom_targeting_keys_rest_interceptors(null_interceptor): transports.CustomTargetingKeyServiceRestInterceptor, "post_list_custom_targeting_keys", ) as post, mock.patch.object( + transports.CustomTargetingKeyServiceRestInterceptor, + "post_list_custom_targeting_keys_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CustomTargetingKeyServiceRestInterceptor, "pre_list_custom_targeting_keys", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = custom_targeting_key_service.ListCustomTargetingKeysRequest.pb( custom_targeting_key_service.ListCustomTargetingKeysRequest() ) @@ -1766,6 +1829,10 @@ def test_list_custom_targeting_keys_rest_interceptors(null_interceptor): post.return_value = ( custom_targeting_key_service.ListCustomTargetingKeysResponse() ) + post_with_metadata.return_value = ( + custom_targeting_key_service.ListCustomTargetingKeysResponse(), + metadata, + ) client.list_custom_targeting_keys( request, @@ -1777,6 +1844,7 @@ def test_list_custom_targeting_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py index 7012ac0c2344..d61a340e705f 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_custom_targeting_value_service.py @@ -64,6 +64,13 @@ custom_targeting_value_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CustomTargetingValueServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CustomTargetingValueServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1615,11 +1665,15 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): transports.CustomTargetingValueServiceRestInterceptor, "post_get_custom_targeting_value", ) as post, mock.patch.object( + transports.CustomTargetingValueServiceRestInterceptor, + "post_get_custom_targeting_value_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CustomTargetingValueServiceRestInterceptor, "pre_get_custom_targeting_value", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = custom_targeting_value_service.GetCustomTargetingValueRequest.pb( custom_targeting_value_service.GetCustomTargetingValueRequest() ) @@ -1645,6 +1699,10 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = custom_targeting_value_messages.CustomTargetingValue() + post_with_metadata.return_value = ( + custom_targeting_value_messages.CustomTargetingValue(), + metadata, + ) client.get_custom_targeting_value( request, @@ -1656,6 +1714,7 @@ def test_get_custom_targeting_value_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_targeting_values_rest_bad_request( @@ -1747,11 +1806,15 @@ def test_list_custom_targeting_values_rest_interceptors(null_interceptor): transports.CustomTargetingValueServiceRestInterceptor, "post_list_custom_targeting_values", ) as post, mock.patch.object( + transports.CustomTargetingValueServiceRestInterceptor, + "post_list_custom_targeting_values_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CustomTargetingValueServiceRestInterceptor, "pre_list_custom_targeting_values", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = custom_targeting_value_service.ListCustomTargetingValuesRequest.pb( custom_targeting_value_service.ListCustomTargetingValuesRequest() ) @@ -1781,6 +1844,10 @@ def test_list_custom_targeting_values_rest_interceptors(null_interceptor): post.return_value = ( custom_targeting_value_service.ListCustomTargetingValuesResponse() ) + post_with_metadata.return_value = ( + custom_targeting_value_service.ListCustomTargetingValuesResponse(), + metadata, + ) client.list_custom_targeting_values( request, @@ -1792,6 +1859,7 @@ def test_list_custom_targeting_values_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py index 2368f1e1d2e5..48c875d6ead0 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_entity_signals_mapping_service.py @@ -64,6 +64,13 @@ entity_signals_mapping_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EntitySignalsMappingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EntitySignalsMappingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2442,11 +2492,15 @@ def test_get_entity_signals_mapping_rest_interceptors(null_interceptor): transports.EntitySignalsMappingServiceRestInterceptor, "post_get_entity_signals_mapping", ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_get_entity_signals_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntitySignalsMappingServiceRestInterceptor, "pre_get_entity_signals_mapping", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_signals_mapping_service.GetEntitySignalsMappingRequest.pb( entity_signals_mapping_service.GetEntitySignalsMappingRequest() ) @@ -2472,6 +2526,10 @@ def test_get_entity_signals_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + post_with_metadata.return_value = ( + entity_signals_mapping_messages.EntitySignalsMapping(), + metadata, + ) client.get_entity_signals_mapping( request, @@ -2483,6 +2541,7 @@ def test_get_entity_signals_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_entity_signals_mappings_rest_bad_request( @@ -2574,11 +2633,15 @@ def test_list_entity_signals_mappings_rest_interceptors(null_interceptor): transports.EntitySignalsMappingServiceRestInterceptor, "post_list_entity_signals_mappings", ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_list_entity_signals_mappings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntitySignalsMappingServiceRestInterceptor, "pre_list_entity_signals_mappings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_signals_mapping_service.ListEntitySignalsMappingsRequest.pb( entity_signals_mapping_service.ListEntitySignalsMappingsRequest() ) @@ -2608,6 +2671,10 @@ def test_list_entity_signals_mappings_rest_interceptors(null_interceptor): post.return_value = ( entity_signals_mapping_service.ListEntitySignalsMappingsResponse() ) + post_with_metadata.return_value = ( + entity_signals_mapping_service.ListEntitySignalsMappingsResponse(), + metadata, + ) client.list_entity_signals_mappings( request, @@ -2619,6 +2686,7 @@ def test_list_entity_signals_mappings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_entity_signals_mapping_rest_bad_request( @@ -2792,11 +2860,15 @@ def test_create_entity_signals_mapping_rest_interceptors(null_interceptor): transports.EntitySignalsMappingServiceRestInterceptor, "post_create_entity_signals_mapping", ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_create_entity_signals_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntitySignalsMappingServiceRestInterceptor, "pre_create_entity_signals_mapping", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( entity_signals_mapping_service.CreateEntitySignalsMappingRequest.pb( entity_signals_mapping_service.CreateEntitySignalsMappingRequest() @@ -2824,6 +2896,10 @@ def test_create_entity_signals_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + post_with_metadata.return_value = ( + entity_signals_mapping_messages.EntitySignalsMapping(), + metadata, + ) client.create_entity_signals_mapping( request, @@ -2835,6 +2911,7 @@ def test_create_entity_signals_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_entity_signals_mapping_rest_bad_request( @@ -3016,11 +3093,15 @@ def test_update_entity_signals_mapping_rest_interceptors(null_interceptor): transports.EntitySignalsMappingServiceRestInterceptor, "post_update_entity_signals_mapping", ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_update_entity_signals_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntitySignalsMappingServiceRestInterceptor, "pre_update_entity_signals_mapping", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( entity_signals_mapping_service.UpdateEntitySignalsMappingRequest.pb( entity_signals_mapping_service.UpdateEntitySignalsMappingRequest() @@ -3048,6 +3129,10 @@ def test_update_entity_signals_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_signals_mapping_messages.EntitySignalsMapping() + post_with_metadata.return_value = ( + entity_signals_mapping_messages.EntitySignalsMapping(), + metadata, + ) client.update_entity_signals_mapping( request, @@ -3059,6 +3144,7 @@ def test_update_entity_signals_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_entity_signals_mappings_rest_bad_request( @@ -3150,11 +3236,15 @@ def test_batch_create_entity_signals_mappings_rest_interceptors(null_interceptor transports.EntitySignalsMappingServiceRestInterceptor, "post_batch_create_entity_signals_mappings", ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_batch_create_entity_signals_mappings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntitySignalsMappingServiceRestInterceptor, "pre_batch_create_entity_signals_mappings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest.pb( entity_signals_mapping_service.BatchCreateEntitySignalsMappingsRequest() @@ -3186,6 +3276,10 @@ def test_batch_create_entity_signals_mappings_rest_interceptors(null_interceptor post.return_value = ( entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse() ) + post_with_metadata.return_value = ( + entity_signals_mapping_service.BatchCreateEntitySignalsMappingsResponse(), + metadata, + ) client.batch_create_entity_signals_mappings( request, @@ -3197,6 +3291,7 @@ def test_batch_create_entity_signals_mappings_rest_interceptors(null_interceptor pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_entity_signals_mappings_rest_bad_request( @@ -3288,11 +3383,15 @@ def test_batch_update_entity_signals_mappings_rest_interceptors(null_interceptor transports.EntitySignalsMappingServiceRestInterceptor, "post_batch_update_entity_signals_mappings", ) as post, mock.patch.object( + transports.EntitySignalsMappingServiceRestInterceptor, + "post_batch_update_entity_signals_mappings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntitySignalsMappingServiceRestInterceptor, "pre_batch_update_entity_signals_mappings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest.pb( entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsRequest() @@ -3324,6 +3423,10 @@ def test_batch_update_entity_signals_mappings_rest_interceptors(null_interceptor post.return_value = ( entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse() ) + post_with_metadata.return_value = ( + entity_signals_mapping_service.BatchUpdateEntitySignalsMappingsResponse(), + metadata, + ) client.batch_update_entity_signals_mappings( request, @@ -3335,6 +3438,7 @@ def test_batch_update_entity_signals_mappings_rest_interceptors(null_interceptor pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py index 2ca64e50c3ee..6524641e689f 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_network_service.py @@ -59,6 +59,13 @@ ) from google.ads.admanager_v1.types import network_messages, network_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -312,6 +319,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworkServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworkServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1287,10 +1337,13 @@ def test_get_network_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkServiceRestInterceptor, "post_get_network" ) as post, mock.patch.object( + transports.NetworkServiceRestInterceptor, "post_get_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkServiceRestInterceptor, "pre_get_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = network_service.GetNetworkRequest.pb( network_service.GetNetworkRequest() ) @@ -1314,6 +1367,7 @@ def test_get_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = network_messages.Network() + post_with_metadata.return_value = network_messages.Network(), metadata client.get_network( request, @@ -1325,6 +1379,7 @@ def test_get_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_networks_rest_bad_request( @@ -1406,10 +1461,13 @@ def test_list_networks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkServiceRestInterceptor, "post_list_networks" ) as post, mock.patch.object( + transports.NetworkServiceRestInterceptor, "post_list_networks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkServiceRestInterceptor, "pre_list_networks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = network_service.ListNetworksRequest.pb( network_service.ListNetworksRequest() ) @@ -1435,6 +1493,10 @@ def test_list_networks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = network_service.ListNetworksResponse() + post_with_metadata.return_value = ( + network_service.ListNetworksResponse(), + metadata, + ) client.list_networks( request, @@ -1446,6 +1508,7 @@ def test_list_networks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py index 62013e27fa51..a214e33fab4c 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_order_service.py @@ -67,6 +67,13 @@ order_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = OrderServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = OrderServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1522,10 +1572,13 @@ def test_get_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.OrderServiceRestInterceptor, "post_get_order" ) as post, mock.patch.object( + transports.OrderServiceRestInterceptor, "post_get_order_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.OrderServiceRestInterceptor, "pre_get_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = order_service.GetOrderRequest.pb(order_service.GetOrderRequest()) transcode.return_value = { "method": "post", @@ -1547,6 +1600,7 @@ def test_get_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = order_messages.Order() + post_with_metadata.return_value = order_messages.Order(), metadata client.get_order( request, @@ -1558,6 +1612,7 @@ def test_get_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_orders_rest_bad_request(request_type=order_service.ListOrdersRequest): @@ -1642,10 +1697,13 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.OrderServiceRestInterceptor, "post_list_orders" ) as post, mock.patch.object( + transports.OrderServiceRestInterceptor, "post_list_orders_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.OrderServiceRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = order_service.ListOrdersRequest.pb( order_service.ListOrdersRequest() ) @@ -1671,6 +1729,7 @@ def test_list_orders_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = order_service.ListOrdersResponse() + post_with_metadata.return_value = order_service.ListOrdersResponse(), metadata client.list_orders( request, @@ -1682,6 +1741,7 @@ def test_list_orders_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py index aeb3edbe704e..79f8c74f31e1 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_placement_service.py @@ -65,6 +65,13 @@ placement_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PlacementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PlacementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1533,10 +1583,13 @@ def test_get_placement_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlacementServiceRestInterceptor, "post_get_placement" ) as post, mock.patch.object( + transports.PlacementServiceRestInterceptor, "post_get_placement_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlacementServiceRestInterceptor, "pre_get_placement" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = placement_service.GetPlacementRequest.pb( placement_service.GetPlacementRequest() ) @@ -1562,6 +1615,7 @@ def test_get_placement_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = placement_messages.Placement() + post_with_metadata.return_value = placement_messages.Placement(), metadata client.get_placement( request, @@ -1573,6 +1627,7 @@ def test_get_placement_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_placements_rest_bad_request( @@ -1659,10 +1714,13 @@ def test_list_placements_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlacementServiceRestInterceptor, "post_list_placements" ) as post, mock.patch.object( + transports.PlacementServiceRestInterceptor, "post_list_placements_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlacementServiceRestInterceptor, "pre_list_placements" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = placement_service.ListPlacementsRequest.pb( placement_service.ListPlacementsRequest() ) @@ -1688,6 +1746,10 @@ def test_list_placements_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = placement_service.ListPlacementsResponse() + post_with_metadata.return_value = ( + placement_service.ListPlacementsResponse(), + metadata, + ) client.list_placements( request, @@ -1699,6 +1761,7 @@ def test_list_placements_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py index 53295bb95d07..412dd291ae78 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_report_service.py @@ -74,6 +74,13 @@ ) from google.ads.admanager_v1.types import report_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -323,6 +330,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ReportServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ReportServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2217,10 +2267,13 @@ def test_get_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReportServiceRestInterceptor, "post_get_report" ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_get_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReportServiceRestInterceptor, "pre_get_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = report_service.GetReportRequest.pb( report_service.GetReportRequest() ) @@ -2244,6 +2297,7 @@ def test_get_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = report_service.Report() + post_with_metadata.return_value = report_service.Report(), metadata client.get_report( request, @@ -2255,6 +2309,7 @@ def test_get_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_reports_rest_bad_request(request_type=report_service.ListReportsRequest): @@ -2339,10 +2394,13 @@ def test_list_reports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReportServiceRestInterceptor, "post_list_reports" ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_list_reports_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReportServiceRestInterceptor, "pre_list_reports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = report_service.ListReportsRequest.pb( report_service.ListReportsRequest() ) @@ -2368,6 +2426,7 @@ def test_list_reports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = report_service.ListReportsResponse() + post_with_metadata.return_value = report_service.ListReportsResponse(), metadata client.list_reports( request, @@ -2379,6 +2438,7 @@ def test_list_reports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_report_rest_bad_request( @@ -2621,10 +2681,13 @@ def test_create_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReportServiceRestInterceptor, "post_create_report" ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_create_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReportServiceRestInterceptor, "pre_create_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = report_service.CreateReportRequest.pb( report_service.CreateReportRequest() ) @@ -2648,6 +2711,7 @@ def test_create_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = report_service.Report() + post_with_metadata.return_value = report_service.Report(), metadata client.create_report( request, @@ -2659,6 +2723,7 @@ def test_create_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_report_rest_bad_request( @@ -2901,10 +2966,13 @@ def test_update_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReportServiceRestInterceptor, "post_update_report" ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_update_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReportServiceRestInterceptor, "pre_update_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = report_service.UpdateReportRequest.pb( report_service.UpdateReportRequest() ) @@ -2928,6 +2996,7 @@ def test_update_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = report_service.Report() + post_with_metadata.return_value = report_service.Report(), metadata client.update_report( request, @@ -2939,6 +3008,7 @@ def test_update_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_report_rest_bad_request(request_type=report_service.RunReportRequest): @@ -3017,10 +3087,13 @@ def test_run_report_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ReportServiceRestInterceptor, "post_run_report" ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, "post_run_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReportServiceRestInterceptor, "pre_run_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = report_service.RunReportRequest.pb( report_service.RunReportRequest() ) @@ -3044,6 +3117,7 @@ def test_run_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_report( request, @@ -3055,6 +3129,7 @@ def test_run_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_report_result_rows_rest_bad_request( @@ -3141,10 +3216,14 @@ def test_fetch_report_result_rows_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReportServiceRestInterceptor, "post_fetch_report_result_rows" ) as post, mock.patch.object( + transports.ReportServiceRestInterceptor, + "post_fetch_report_result_rows_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReportServiceRestInterceptor, "pre_fetch_report_result_rows" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = report_service.FetchReportResultRowsRequest.pb( report_service.FetchReportResultRowsRequest() ) @@ -3170,6 +3249,10 @@ def test_fetch_report_result_rows_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = report_service.FetchReportResultRowsResponse() + post_with_metadata.return_value = ( + report_service.FetchReportResultRowsResponse(), + metadata, + ) client.fetch_report_result_rows( request, @@ -3181,6 +3264,7 @@ def test_fetch_report_result_rows_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py index 539557e51834..e6ad78ef2745 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_role_service.py @@ -60,6 +60,13 @@ ) from google.ads.admanager_v1.types import role_enums, role_messages, role_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -294,6 +301,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RoleServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RoleServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1477,10 +1527,13 @@ def test_get_role_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoleServiceRestInterceptor, "post_get_role" ) as post, mock.patch.object( + transports.RoleServiceRestInterceptor, "post_get_role_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoleServiceRestInterceptor, "pre_get_role" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = role_service.GetRoleRequest.pb(role_service.GetRoleRequest()) transcode.return_value = { "method": "post", @@ -1502,6 +1555,7 @@ def test_get_role_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = role_messages.Role() + post_with_metadata.return_value = role_messages.Role(), metadata client.get_role( request, @@ -1513,6 +1567,7 @@ def test_get_role_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_roles_rest_bad_request(request_type=role_service.ListRolesRequest): @@ -1597,10 +1652,13 @@ def test_list_roles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoleServiceRestInterceptor, "post_list_roles" ) as post, mock.patch.object( + transports.RoleServiceRestInterceptor, "post_list_roles_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoleServiceRestInterceptor, "pre_list_roles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = role_service.ListRolesRequest.pb(role_service.ListRolesRequest()) transcode.return_value = { "method": "post", @@ -1624,6 +1682,7 @@ def test_list_roles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = role_service.ListRolesResponse() + post_with_metadata.return_value = role_service.ListRolesResponse(), metadata client.list_roles( request, @@ -1635,6 +1694,7 @@ def test_list_roles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py index b8dfaa8586f0..e17de5923ede 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_taxonomy_category_service.py @@ -64,6 +64,13 @@ taxonomy_type_enum, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TaxonomyCategoryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TaxonomyCategoryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1582,10 +1632,14 @@ def test_get_taxonomy_category_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TaxonomyCategoryServiceRestInterceptor, "post_get_taxonomy_category" ) as post, mock.patch.object( + transports.TaxonomyCategoryServiceRestInterceptor, + "post_get_taxonomy_category_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TaxonomyCategoryServiceRestInterceptor, "pre_get_taxonomy_category" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = taxonomy_category_service.GetTaxonomyCategoryRequest.pb( taxonomy_category_service.GetTaxonomyCategoryRequest() ) @@ -1611,6 +1665,10 @@ def test_get_taxonomy_category_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = taxonomy_category_messages.TaxonomyCategory() + post_with_metadata.return_value = ( + taxonomy_category_messages.TaxonomyCategory(), + metadata, + ) client.get_taxonomy_category( request, @@ -1622,6 +1680,7 @@ def test_get_taxonomy_category_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_taxonomy_categories_rest_bad_request( @@ -1711,11 +1770,15 @@ def test_list_taxonomy_categories_rest_interceptors(null_interceptor): transports.TaxonomyCategoryServiceRestInterceptor, "post_list_taxonomy_categories", ) as post, mock.patch.object( + transports.TaxonomyCategoryServiceRestInterceptor, + "post_list_taxonomy_categories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TaxonomyCategoryServiceRestInterceptor, "pre_list_taxonomy_categories", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = taxonomy_category_service.ListTaxonomyCategoriesRequest.pb( taxonomy_category_service.ListTaxonomyCategoriesRequest() ) @@ -1741,6 +1804,10 @@ def test_list_taxonomy_categories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = taxonomy_category_service.ListTaxonomyCategoriesResponse() + post_with_metadata.return_value = ( + taxonomy_category_service.ListTaxonomyCategoriesResponse(), + metadata, + ) client.list_taxonomy_categories( request, @@ -1752,6 +1819,7 @@ def test_list_taxonomy_categories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py index e7c70abf8365..7819d3308ae5 100644 --- a/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py +++ b/packages/google-ads-admanager/tests/unit/gapic/admanager_v1/test_user_service.py @@ -56,6 +56,13 @@ from google.ads.admanager_v1.services.user_service import UserServiceClient, transports from google.ads.admanager_v1.types import user_messages, user_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -290,6 +297,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = UserServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = UserServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1223,10 +1273,13 @@ def test_get_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserServiceRestInterceptor, "post_get_user" ) as post, mock.patch.object( + transports.UserServiceRestInterceptor, "post_get_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UserServiceRestInterceptor, "pre_get_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_service.GetUserRequest.pb(user_service.GetUserRequest()) transcode.return_value = { "method": "post", @@ -1248,6 +1301,7 @@ def test_get_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = user_messages.User() + post_with_metadata.return_value = user_messages.User(), metadata client.get_user( request, @@ -1259,6 +1313,7 @@ def test_get_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ads-marketingplatform-admin/CHANGELOG.md b/packages/google-ads-marketingplatform-admin/CHANGELOG.md index f494b07d9789..96087c6a641a 100644 --- a/packages/google-ads-marketingplatform-admin/CHANGELOG.md +++ b/packages/google-ads-marketingplatform-admin/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.4](https://github.com/googleapis/google-cloud-python/compare/google-ads-marketingplatform-admin-v0.1.3...google-ads-marketingplatform-admin-v0.1.4) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.1.3](https://github.com/googleapis/google-cloud-python/compare/google-ads-marketingplatform-admin-v0.1.2...google-ads-marketingplatform-admin-v0.1.3) (2024-12-12) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py index 114e40645800..937ede8823ef 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.3" # {x-release-please-version} +__version__ = "0.1.4" # {x-release-please-version} diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py index 15e600d41a85..35c9a48b8075 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -540,6 +542,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py index 517279fa52d3..53f2ab002944 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py @@ -134,12 +134,35 @@ def post_create_analytics_account_link( ) -> resources.AnalyticsAccountLink: """Post-rpc interceptor for create_analytics_account_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_analytics_account_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MarketingplatformAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_analytics_account_link` interceptor runs + before the `post_create_analytics_account_link_with_metadata` interceptor. """ return response + def post_create_analytics_account_link_with_metadata( + self, + response: resources.AnalyticsAccountLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AnalyticsAccountLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_analytics_account_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_create_analytics_account_link_with_metadata` + interceptor in new development instead of the `post_create_analytics_account_link` interceptor. + When both interceptors are used, this `post_create_analytics_account_link_with_metadata` interceptor runs after the + `post_create_analytics_account_link` interceptor. The (possibly modified) response returned by + `post_create_analytics_account_link` will be passed to + `post_create_analytics_account_link_with_metadata`. + """ + return response, metadata + def pre_delete_analytics_account_link( self, request: marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, @@ -175,12 +198,35 @@ def post_get_organization( ) -> resources.Organization: """Post-rpc interceptor for get_organization - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_organization_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MarketingplatformAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_organization` interceptor runs + before the `post_get_organization_with_metadata` interceptor. """ return response + def post_get_organization_with_metadata( + self, + response: resources.Organization, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Organization, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_organization + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_get_organization_with_metadata` + interceptor in new development instead of the `post_get_organization` interceptor. + When both interceptors are used, this `post_get_organization_with_metadata` interceptor runs after the + `post_get_organization` interceptor. The (possibly modified) response returned by + `post_get_organization` will be passed to + `post_get_organization_with_metadata`. + """ + return response, metadata + def pre_list_analytics_account_links( self, request: marketingplatform_admin.ListAnalyticsAccountLinksRequest, @@ -201,12 +247,38 @@ def post_list_analytics_account_links( ) -> marketingplatform_admin.ListAnalyticsAccountLinksResponse: """Post-rpc interceptor for list_analytics_account_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_analytics_account_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MarketingplatformAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_analytics_account_links` interceptor runs + before the `post_list_analytics_account_links_with_metadata` interceptor. """ return response + def post_list_analytics_account_links_with_metadata( + self, + response: marketingplatform_admin.ListAnalyticsAccountLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.ListAnalyticsAccountLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_analytics_account_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_list_analytics_account_links_with_metadata` + interceptor in new development instead of the `post_list_analytics_account_links` interceptor. + When both interceptors are used, this `post_list_analytics_account_links_with_metadata` interceptor runs after the + `post_list_analytics_account_links` interceptor. The (possibly modified) response returned by + `post_list_analytics_account_links` will be passed to + `post_list_analytics_account_links_with_metadata`. + """ + return response, metadata + def pre_set_property_service_level( self, request: marketingplatform_admin.SetPropertyServiceLevelRequest, @@ -227,12 +299,38 @@ def post_set_property_service_level( ) -> marketingplatform_admin.SetPropertyServiceLevelResponse: """Post-rpc interceptor for set_property_service_level - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_property_service_level_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MarketingplatformAdminService server but before - it is returned to user code. + it is returned to user code. This `post_set_property_service_level` interceptor runs + before the `post_set_property_service_level_with_metadata` interceptor. """ return response + def post_set_property_service_level_with_metadata( + self, + response: marketingplatform_admin.SetPropertyServiceLevelResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.SetPropertyServiceLevelResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for set_property_service_level + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_set_property_service_level_with_metadata` + interceptor in new development instead of the `post_set_property_service_level` interceptor. + When both interceptors are used, this `post_set_property_service_level_with_metadata` interceptor runs after the + `post_set_property_service_level` interceptor. The (possibly modified) response returned by + `post_set_property_service_level` will be passed to + `post_set_property_service_level_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MarketingplatformAdminServiceRestStub: @@ -459,6 +557,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_analytics_account_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_analytics_account_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -718,6 +823,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_organization(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_organization_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -867,6 +976,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_analytics_account_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_analytics_account_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1024,6 +1137,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_property_service_level(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_property_service_level_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json index c4d2a89fac50..1c841f3768f6 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ads-marketingplatform-admin", - "version": "0.1.3" + "version": "0.1.4" }, "snippets": [ { diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py index 09189bc7f2a3..76e2117fc452 100644 --- a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -63,6 +63,13 @@ resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -350,6 +357,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MarketingplatformAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MarketingplatformAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4560,10 +4610,14 @@ def test_get_organization_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_get_organization_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = marketingplatform_admin.GetOrganizationRequest.pb( marketingplatform_admin.GetOrganizationRequest() ) @@ -4587,6 +4641,7 @@ def test_get_organization_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Organization() + post_with_metadata.return_value = resources.Organization(), metadata client.get_organization( request, @@ -4598,6 +4653,7 @@ def test_get_organization_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_analytics_account_links_rest_bad_request( @@ -4685,11 +4741,15 @@ def test_list_analytics_account_links_rest_interceptors(null_interceptor): transports.MarketingplatformAdminServiceRestInterceptor, "post_list_analytics_account_links", ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_analytics_account_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MarketingplatformAdminServiceRestInterceptor, "pre_list_analytics_account_links", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = marketingplatform_admin.ListAnalyticsAccountLinksRequest.pb( marketingplatform_admin.ListAnalyticsAccountLinksRequest() ) @@ -4717,6 +4777,10 @@ def test_list_analytics_account_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + post_with_metadata.return_value = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse(), + metadata, + ) client.list_analytics_account_links( request, @@ -4728,6 +4792,7 @@ def test_list_analytics_account_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_analytics_account_link_rest_bad_request( @@ -4899,11 +4964,15 @@ def test_create_analytics_account_link_rest_interceptors(null_interceptor): transports.MarketingplatformAdminServiceRestInterceptor, "post_create_analytics_account_link", ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_create_analytics_account_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MarketingplatformAdminServiceRestInterceptor, "pre_create_analytics_account_link", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = marketingplatform_admin.CreateAnalyticsAccountLinkRequest.pb( marketingplatform_admin.CreateAnalyticsAccountLinkRequest() ) @@ -4929,6 +4998,7 @@ def test_create_analytics_account_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AnalyticsAccountLink() + post_with_metadata.return_value = resources.AnalyticsAccountLink(), metadata client.create_analytics_account_link( request, @@ -4940,6 +5010,7 @@ def test_create_analytics_account_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_analytics_account_link_rest_bad_request( @@ -5138,11 +5209,15 @@ def test_set_property_service_level_rest_interceptors(null_interceptor): transports.MarketingplatformAdminServiceRestInterceptor, "post_set_property_service_level", ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_set_property_service_level_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MarketingplatformAdminServiceRestInterceptor, "pre_set_property_service_level", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = marketingplatform_admin.SetPropertyServiceLevelRequest.pb( marketingplatform_admin.SetPropertyServiceLevelRequest() ) @@ -5168,6 +5243,10 @@ def test_set_property_service_level_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + post_with_metadata.return_value = ( + marketingplatform_admin.SetPropertyServiceLevelResponse(), + metadata, + ) client.set_property_service_level( request, @@ -5179,6 +5258,7 @@ def test_set_property_service_level_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-ai-generativelanguage/CHANGELOG.md b/packages/google-ai-generativelanguage/CHANGELOG.md index 5c3e6bba7a9b..6e297a21a3fc 100644 --- a/packages/google-ai-generativelanguage/CHANGELOG.md +++ b/packages/google-ai-generativelanguage/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.6.16](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.15...google-ai-generativelanguage-v0.6.16) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + +## [0.6.15](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.14...google-ai-generativelanguage-v0.6.15) (2025-01-13) + + +### Features + +* Add BidiGenerateContent + all the necessary protos ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add civic_integrity toggle to generation_config ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add GoogleSearch tool type ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add id to FunctionCall and FunctionResponse ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add image_safety block_reason + finish_reason ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add response_modalities to generation_config ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add return type `Schema response` to function declarations ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add TuningMultiturnExample ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add v1alpha ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) +* Add voice_config to generation_config ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) + + +### Documentation + +* Update safety filter list to include civic_integrity ([2c1e359](https://github.com/googleapis/google-cloud-python/commit/2c1e35981f7064f293669109097eb4b8c4942692)) + ## [0.6.14](https://github.com/googleapis/google-cloud-python/compare/google-ai-generativelanguage-v0.6.13...google-ai-generativelanguage-v0.6.14) (2024-12-12) diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/cache_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/cache_service.rst new file mode 100644 index 000000000000..6d1b22455271 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/cache_service.rst @@ -0,0 +1,10 @@ +CacheService +------------------------------ + +.. automodule:: google.ai.generativelanguage_v1alpha.services.cache_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1alpha.services.cache_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/discuss_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/discuss_service.rst new file mode 100644 index 000000000000..4996aa39dbf3 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/discuss_service.rst @@ -0,0 +1,6 @@ +DiscussService +-------------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.discuss_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/file_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/file_service.rst new file mode 100644 index 000000000000..fc5f5538643c --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/file_service.rst @@ -0,0 +1,10 @@ +FileService +----------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.file_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1alpha.services.file_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/generative_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/generative_service.rst new file mode 100644 index 000000000000..6c27b40a2a46 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/generative_service.rst @@ -0,0 +1,6 @@ +GenerativeService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.generative_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/model_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/model_service.rst new file mode 100644 index 000000000000..7f119e39a597 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/model_service.rst @@ -0,0 +1,10 @@ +ModelService +------------------------------ + +.. automodule:: google.ai.generativelanguage_v1alpha.services.model_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1alpha.services.model_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/permission_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/permission_service.rst new file mode 100644 index 000000000000..0873faf0c999 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/permission_service.rst @@ -0,0 +1,10 @@ +PermissionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.permission_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1alpha.services.permission_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/prediction_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/prediction_service.rst new file mode 100644 index 000000000000..6a93c8cc2eb7 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/prediction_service.rst @@ -0,0 +1,6 @@ +PredictionService +----------------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.prediction_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/retriever_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/retriever_service.rst new file mode 100644 index 000000000000..5d9b9f330225 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/retriever_service.rst @@ -0,0 +1,10 @@ +RetrieverService +---------------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.retriever_service + :members: + :inherited-members: + +.. automodule:: google.ai.generativelanguage_v1alpha.services.retriever_service.pagers + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/services_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/services_.rst new file mode 100644 index 000000000000..17ca3d964c15 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/services_.rst @@ -0,0 +1,14 @@ +Services for Google Ai Generativelanguage v1alpha API +===================================================== +.. toctree:: + :maxdepth: 2 + + cache_service + discuss_service + file_service + generative_service + model_service + permission_service + prediction_service + retriever_service + text_service diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/text_service.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/text_service.rst new file mode 100644 index 000000000000..2c27b0cc60b7 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/text_service.rst @@ -0,0 +1,6 @@ +TextService +----------------------------- + +.. automodule:: google.ai.generativelanguage_v1alpha.services.text_service + :members: + :inherited-members: diff --git a/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/types_.rst b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/types_.rst new file mode 100644 index 000000000000..ea7512835fb8 --- /dev/null +++ b/packages/google-ai-generativelanguage/docs/generativelanguage_v1alpha/types_.rst @@ -0,0 +1,6 @@ +Types for Google Ai Generativelanguage v1alpha API +================================================== + +.. automodule:: google.ai.generativelanguage_v1alpha.types + :members: + :show-inheritance: diff --git a/packages/google-ai-generativelanguage/docs/index.rst b/packages/google-ai-generativelanguage/docs/index.rst index 5688bf71543b..b973b49f5784 100644 --- a/packages/google-ai-generativelanguage/docs/index.rst +++ b/packages/google-ai-generativelanguage/docs/index.rst @@ -22,6 +22,14 @@ API Reference generativelanguage_v1/services_ generativelanguage_v1/types_ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + generativelanguage_v1alpha/services_ + generativelanguage_v1alpha/types_ + API Reference ------------- .. toctree:: diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py index 750b54051c3f..fb8789d5bab9 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/__init__.py @@ -144,11 +144,14 @@ GroundingMetadata, GroundingSupport, LogprobsResult, + PrebuiltVoiceConfig, RetrievalMetadata, SearchEntryPoint, Segment, SemanticRetrieverConfig, + SpeechConfig, TaskType, + VoiceConfig, ) from google.ai.generativelanguage_v1beta.types.model import Model from google.ai.generativelanguage_v1beta.types.model_service import ( @@ -330,10 +333,13 @@ "GroundingMetadata", "GroundingSupport", "LogprobsResult", + "PrebuiltVoiceConfig", "RetrievalMetadata", "SearchEntryPoint", "Segment", "SemanticRetrieverConfig", + "SpeechConfig", + "VoiceConfig", "TaskType", "Model", "CreateTunedModelMetadata", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py index 0b6dbde2b051..a22e7bbe7e4a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py index 0b6dbde2b051..a22e7bbe7e4a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py index 2a0471ed2dc9..88a9c247893c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/async_client.py @@ -353,7 +353,7 @@ async def sample_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this @@ -494,7 +494,7 @@ async def sample_stream_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py index 258a77134a14..0b0f0af5c846 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -483,6 +485,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -740,7 +769,7 @@ def sample_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this @@ -878,7 +907,7 @@ def sample_stream_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this @@ -1399,16 +1428,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1454,16 +1487,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py index 152a68d02151..e659cd0e866e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/generative_service/transports/rest.py @@ -135,12 +135,38 @@ def post_batch_embed_contents( ) -> generative_service.BatchEmbedContentsResponse: """Post-rpc interceptor for batch_embed_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_embed_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_batch_embed_contents` interceptor runs + before the `post_batch_embed_contents_with_metadata` interceptor. """ return response + def post_batch_embed_contents_with_metadata( + self, + response: generative_service.BatchEmbedContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.BatchEmbedContentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_embed_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_batch_embed_contents_with_metadata` + interceptor in new development instead of the `post_batch_embed_contents` interceptor. + When both interceptors are used, this `post_batch_embed_contents_with_metadata` interceptor runs after the + `post_batch_embed_contents` interceptor. The (possibly modified) response returned by + `post_batch_embed_contents` will be passed to + `post_batch_embed_contents_with_metadata`. + """ + return response, metadata + def pre_count_tokens( self, request: generative_service.CountTokensRequest, @@ -160,12 +186,37 @@ def post_count_tokens( ) -> generative_service.CountTokensResponse: """Post-rpc interceptor for count_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_count_tokens` interceptor runs + before the `post_count_tokens_with_metadata` interceptor. """ return response + def post_count_tokens_with_metadata( + self, + response: generative_service.CountTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.CountTokensResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for count_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_count_tokens_with_metadata` + interceptor in new development instead of the `post_count_tokens` interceptor. + When both interceptors are used, this `post_count_tokens_with_metadata` interceptor runs after the + `post_count_tokens` interceptor. The (possibly modified) response returned by + `post_count_tokens` will be passed to + `post_count_tokens_with_metadata`. + """ + return response, metadata + def pre_embed_content( self, request: generative_service.EmbedContentRequest, @@ -185,12 +236,37 @@ def post_embed_content( ) -> generative_service.EmbedContentResponse: """Post-rpc interceptor for embed_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_embed_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_embed_content` interceptor runs + before the `post_embed_content_with_metadata` interceptor. """ return response + def post_embed_content_with_metadata( + self, + response: generative_service.EmbedContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.EmbedContentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for embed_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_embed_content_with_metadata` + interceptor in new development instead of the `post_embed_content` interceptor. + When both interceptors are used, this `post_embed_content_with_metadata` interceptor runs after the + `post_embed_content` interceptor. The (possibly modified) response returned by + `post_embed_content` will be passed to + `post_embed_content_with_metadata`. + """ + return response, metadata + def pre_generate_content( self, request: generative_service.GenerateContentRequest, @@ -211,12 +287,38 @@ def post_generate_content( ) -> generative_service.GenerateContentResponse: """Post-rpc interceptor for generate_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_generate_content` interceptor runs + before the `post_generate_content_with_metadata` interceptor. """ return response + def post_generate_content_with_metadata( + self, + response: generative_service.GenerateContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateContentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_generate_content_with_metadata` + interceptor in new development instead of the `post_generate_content` interceptor. + When both interceptors are used, this `post_generate_content_with_metadata` interceptor runs after the + `post_generate_content` interceptor. The (possibly modified) response returned by + `post_generate_content` will be passed to + `post_generate_content_with_metadata`. + """ + return response, metadata + def pre_stream_generate_content( self, request: generative_service.GenerateContentRequest, @@ -237,12 +339,37 @@ def post_stream_generate_content( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for stream_generate_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stream_generate_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_stream_generate_content` interceptor runs + before the `post_stream_generate_content_with_metadata` interceptor. """ return response + def post_stream_generate_content_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for stream_generate_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_stream_generate_content_with_metadata` + interceptor in new development instead of the `post_stream_generate_content` interceptor. + When both interceptors are used, this `post_stream_generate_content_with_metadata` interceptor runs after the + `post_stream_generate_content` interceptor. The (possibly modified) response returned by + `post_stream_generate_content` will be passed to + `post_stream_generate_content_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -531,6 +658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_embed_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_embed_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -687,6 +818,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -836,6 +971,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_embed_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1004,6 +1143,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1174,6 +1317,10 @@ def __call__( ) resp = self._interceptor.post_stream_generate_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stream_generate_content_with_metadata( + resp, response_metadata + ) return resp @property diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py index 8eb5853bd189..b3c62cd5cfac 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -479,6 +481,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -977,16 +1006,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1032,16 +1065,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py index 99e1125ccde2..55fe63d2c8cc 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/services/model_service/transports/rest.py @@ -106,12 +106,33 @@ def pre_get_model( def post_get_model(self, response: model.Model) -> model.Model: """Post-rpc interceptor for get_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. """ return response + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + def pre_list_models( self, request: model_service.ListModelsRequest, @@ -131,12 +152,37 @@ def post_list_models( ) -> model_service.ListModelsResponse: """Post-rpc interceptor for list_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. """ return response + def post_list_models_with_metadata( + self, + response: model_service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -420,6 +466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -566,6 +616,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py index f52aa49108f1..bcc8bd6f3ed6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1/types/generative_service.py @@ -98,7 +98,7 @@ class GenerateContentRequest(proto.Message): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. contents (MutableSequence[google.ai.generativelanguage_v1.types.Content]): Required. The content of the current conversation with the model. @@ -124,8 +124,8 @@ class GenerateContentRequest(proto.Message): will use the default safety setting for that category. Harm categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, - HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. Refer to the + HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT, + HARM_CATEGORY_CIVIC_INTEGRITY are supported. Refer to the `guide `__ for detailed information on available safety settings. Also refer to the `Safety @@ -287,6 +287,11 @@ class GenerationConfig(proto.Message): [Candidate.logprobs_result][google.ai.generativelanguage.v1.Candidate.logprobs_result]. This field is a member of `oneof`_ ``_logprobs``. + enable_enhanced_civic_answers (bool): + Optional. Enables enhanced civic answers. It + may not be available for all models. + + This field is a member of `oneof`_ ``_enable_enhanced_civic_answers``. """ candidate_count: int = proto.Field( @@ -338,6 +343,11 @@ class GenerationConfig(proto.Message): number=18, optional=True, ) + enable_enhanced_civic_answers: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) class GenerateContentResponse(proto.Message): @@ -397,12 +407,16 @@ class BlockReason(proto.Enum): included from the terminology blocklist. PROHIBITED_CONTENT (4): Prompt was blocked due to prohibited content. + IMAGE_SAFETY (5): + Candidates blocked due to unsafe image + generation content. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 BLOCKLIST = 3 PROHIBITED_CONTENT = 4 + IMAGE_SAFETY = 5 block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( proto.Field( @@ -548,6 +562,9 @@ class FinishReason(proto.Enum): MALFORMED_FUNCTION_CALL (10): The function call generated by the model is invalid. + IMAGE_SAFETY (11): + Token generation stopped because generated + images contain safety violations. """ FINISH_REASON_UNSPECIFIED = 0 STOP = 1 @@ -560,6 +577,7 @@ class FinishReason(proto.Enum): PROHIBITED_CONTENT = 8 SPII = 9 MALFORMED_FUNCTION_CALL = 10 + IMAGE_SAFETY = 11 index: int = proto.Field( proto.INT32, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/__init__.py new file mode 100644 index 000000000000..33d2e7c26a31 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/__init__.py @@ -0,0 +1,413 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cache_service import CacheServiceAsyncClient, CacheServiceClient +from .services.discuss_service import DiscussServiceAsyncClient, DiscussServiceClient +from .services.file_service import FileServiceAsyncClient, FileServiceClient +from .services.generative_service import ( + GenerativeServiceAsyncClient, + GenerativeServiceClient, +) +from .services.model_service import ModelServiceAsyncClient, ModelServiceClient +from .services.permission_service import ( + PermissionServiceAsyncClient, + PermissionServiceClient, +) +from .services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, +) +from .services.retriever_service import ( + RetrieverServiceAsyncClient, + RetrieverServiceClient, +) +from .services.text_service import TextServiceAsyncClient, TextServiceClient +from .types.cache_service import ( + CreateCachedContentRequest, + DeleteCachedContentRequest, + GetCachedContentRequest, + ListCachedContentsRequest, + ListCachedContentsResponse, + UpdateCachedContentRequest, +) +from .types.cached_content import CachedContent +from .types.citation import CitationMetadata, CitationSource +from .types.content import ( + Blob, + CodeExecution, + CodeExecutionResult, + Content, + DynamicRetrievalConfig, + ExecutableCode, + FileData, + FunctionCall, + FunctionCallingConfig, + FunctionDeclaration, + FunctionResponse, + GoogleSearchRetrieval, + GroundingPassage, + GroundingPassages, + Part, + Schema, + Tool, + ToolConfig, + Type, +) +from .types.discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from .types.file import File, VideoMetadata +from .types.file_service import ( + CreateFileRequest, + CreateFileResponse, + DeleteFileRequest, + GetFileRequest, + ListFilesRequest, + ListFilesResponse, +) +from .types.generative_service import ( + AttributionSourceId, + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + BidiGenerateContentClientContent, + BidiGenerateContentClientMessage, + BidiGenerateContentRealtimeInput, + BidiGenerateContentServerContent, + BidiGenerateContentServerMessage, + BidiGenerateContentSetup, + BidiGenerateContentSetupComplete, + BidiGenerateContentToolCall, + BidiGenerateContentToolCallCancellation, + BidiGenerateContentToolResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateAnswerRequest, + GenerateAnswerResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + PrebuiltVoiceConfig, + RetrievalMetadata, + SearchEntryPoint, + Segment, + SemanticRetrieverConfig, + SpeechConfig, + TaskType, + VoiceConfig, +) +from .types.model import Model +from .types.model_service import ( + CreateTunedModelMetadata, + CreateTunedModelRequest, + DeleteTunedModelRequest, + GetModelRequest, + GetTunedModelRequest, + ListModelsRequest, + ListModelsResponse, + ListTunedModelsRequest, + ListTunedModelsResponse, + UpdateTunedModelRequest, +) +from .types.permission import Permission +from .types.permission_service import ( + CreatePermissionRequest, + DeletePermissionRequest, + GetPermissionRequest, + ListPermissionsRequest, + ListPermissionsResponse, + TransferOwnershipRequest, + TransferOwnershipResponse, + UpdatePermissionRequest, +) +from .types.prediction_service import PredictRequest, PredictResponse +from .types.retriever import ( + Chunk, + ChunkData, + Condition, + Corpus, + CustomMetadata, + Document, + MetadataFilter, + StringList, +) +from .types.retriever_service import ( + BatchCreateChunksRequest, + BatchCreateChunksResponse, + BatchDeleteChunksRequest, + BatchUpdateChunksRequest, + BatchUpdateChunksResponse, + CreateChunkRequest, + CreateCorpusRequest, + CreateDocumentRequest, + DeleteChunkRequest, + DeleteCorpusRequest, + DeleteDocumentRequest, + GetChunkRequest, + GetCorpusRequest, + GetDocumentRequest, + ListChunksRequest, + ListChunksResponse, + ListCorporaRequest, + ListCorporaResponse, + ListDocumentsRequest, + ListDocumentsResponse, + QueryCorpusRequest, + QueryCorpusResponse, + QueryDocumentRequest, + QueryDocumentResponse, + RelevantChunk, + UpdateChunkRequest, + UpdateCorpusRequest, + UpdateDocumentRequest, +) +from .types.safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from .types.text_service import ( + BatchEmbedTextRequest, + BatchEmbedTextResponse, + CountTextTokensRequest, + CountTextTokensResponse, + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) +from .types.tuned_model import ( + Dataset, + Hyperparameters, + TunedModel, + TunedModelSource, + TuningContent, + TuningExample, + TuningExamples, + TuningMultiturnExample, + TuningPart, + TuningSnapshot, + TuningTask, +) + +__all__ = ( + "CacheServiceAsyncClient", + "DiscussServiceAsyncClient", + "FileServiceAsyncClient", + "GenerativeServiceAsyncClient", + "ModelServiceAsyncClient", + "PermissionServiceAsyncClient", + "PredictionServiceAsyncClient", + "RetrieverServiceAsyncClient", + "TextServiceAsyncClient", + "AttributionSourceId", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "BatchDeleteChunksRequest", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "BatchEmbedTextRequest", + "BatchEmbedTextResponse", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "BidiGenerateContentClientContent", + "BidiGenerateContentClientMessage", + "BidiGenerateContentRealtimeInput", + "BidiGenerateContentServerContent", + "BidiGenerateContentServerMessage", + "BidiGenerateContentSetup", + "BidiGenerateContentSetupComplete", + "BidiGenerateContentToolCall", + "BidiGenerateContentToolCallCancellation", + "BidiGenerateContentToolResponse", + "Blob", + "CacheServiceClient", + "CachedContent", + "Candidate", + "Chunk", + "ChunkData", + "CitationMetadata", + "CitationSource", + "CodeExecution", + "CodeExecutionResult", + "Condition", + "Content", + "ContentEmbedding", + "ContentFilter", + "Corpus", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "CountTextTokensRequest", + "CountTextTokensResponse", + "CountTokensRequest", + "CountTokensResponse", + "CreateCachedContentRequest", + "CreateChunkRequest", + "CreateCorpusRequest", + "CreateDocumentRequest", + "CreateFileRequest", + "CreateFileResponse", + "CreatePermissionRequest", + "CreateTunedModelMetadata", + "CreateTunedModelRequest", + "CustomMetadata", + "Dataset", + "DeleteCachedContentRequest", + "DeleteChunkRequest", + "DeleteCorpusRequest", + "DeleteDocumentRequest", + "DeleteFileRequest", + "DeletePermissionRequest", + "DeleteTunedModelRequest", + "DiscussServiceClient", + "Document", + "DynamicRetrievalConfig", + "EmbedContentRequest", + "EmbedContentResponse", + "EmbedTextRequest", + "EmbedTextResponse", + "Embedding", + "Example", + "ExecutableCode", + "File", + "FileData", + "FileServiceClient", + "FunctionCall", + "FunctionCallingConfig", + "FunctionDeclaration", + "FunctionResponse", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerateMessageRequest", + "GenerateMessageResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "GenerationConfig", + "GenerativeServiceClient", + "GetCachedContentRequest", + "GetChunkRequest", + "GetCorpusRequest", + "GetDocumentRequest", + "GetFileRequest", + "GetModelRequest", + "GetPermissionRequest", + "GetTunedModelRequest", + "GoogleSearchRetrieval", + "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingPassage", + "GroundingPassages", + "GroundingSupport", + "HarmCategory", + "Hyperparameters", + "ListCachedContentsRequest", + "ListCachedContentsResponse", + "ListChunksRequest", + "ListChunksResponse", + "ListCorporaRequest", + "ListCorporaResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", + "ListFilesRequest", + "ListFilesResponse", + "ListModelsRequest", + "ListModelsResponse", + "ListPermissionsRequest", + "ListPermissionsResponse", + "ListTunedModelsRequest", + "ListTunedModelsResponse", + "LogprobsResult", + "Message", + "MessagePrompt", + "MetadataFilter", + "Model", + "ModelServiceClient", + "Part", + "Permission", + "PermissionServiceClient", + "PrebuiltVoiceConfig", + "PredictRequest", + "PredictResponse", + "PredictionServiceClient", + "QueryCorpusRequest", + "QueryCorpusResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "RelevantChunk", + "RetrievalMetadata", + "RetrieverServiceClient", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "Schema", + "SearchEntryPoint", + "Segment", + "SemanticRetrieverConfig", + "SpeechConfig", + "StringList", + "TaskType", + "TextCompletion", + "TextPrompt", + "TextServiceClient", + "Tool", + "ToolConfig", + "TransferOwnershipRequest", + "TransferOwnershipResponse", + "TunedModel", + "TunedModelSource", + "TuningContent", + "TuningExample", + "TuningExamples", + "TuningMultiturnExample", + "TuningPart", + "TuningSnapshot", + "TuningTask", + "Type", + "UpdateCachedContentRequest", + "UpdateChunkRequest", + "UpdateCorpusRequest", + "UpdateDocumentRequest", + "UpdatePermissionRequest", + "UpdateTunedModelRequest", + "VideoMetadata", + "VoiceConfig", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/gapic_metadata.json b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/gapic_metadata.json new file mode 100644 index 000000000000..db219e407c36 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/gapic_metadata.json @@ -0,0 +1,1020 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.ai.generativelanguage_v1alpha", + "protoPackage": "google.ai.generativelanguage.v1alpha", + "schema": "1.0", + "services": { + "CacheService": { + "clients": { + "grpc": { + "libraryClient": "CacheServiceClient", + "rpcs": { + "CreateCachedContent": { + "methods": [ + "create_cached_content" + ] + }, + "DeleteCachedContent": { + "methods": [ + "delete_cached_content" + ] + }, + "GetCachedContent": { + "methods": [ + "get_cached_content" + ] + }, + "ListCachedContents": { + "methods": [ + "list_cached_contents" + ] + }, + "UpdateCachedContent": { + "methods": [ + "update_cached_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CacheServiceAsyncClient", + "rpcs": { + "CreateCachedContent": { + "methods": [ + "create_cached_content" + ] + }, + "DeleteCachedContent": { + "methods": [ + "delete_cached_content" + ] + }, + "GetCachedContent": { + "methods": [ + "get_cached_content" + ] + }, + "ListCachedContents": { + "methods": [ + "list_cached_contents" + ] + }, + "UpdateCachedContent": { + "methods": [ + "update_cached_content" + ] + } + } + }, + "rest": { + "libraryClient": "CacheServiceClient", + "rpcs": { + "CreateCachedContent": { + "methods": [ + "create_cached_content" + ] + }, + "DeleteCachedContent": { + "methods": [ + "delete_cached_content" + ] + }, + "GetCachedContent": { + "methods": [ + "get_cached_content" + ] + }, + "ListCachedContents": { + "methods": [ + "list_cached_contents" + ] + }, + "UpdateCachedContent": { + "methods": [ + "update_cached_content" + ] + } + } + } + } + }, + "DiscussService": { + "clients": { + "grpc": { + "libraryClient": "DiscussServiceClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DiscussServiceAsyncClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + }, + "rest": { + "libraryClient": "DiscussServiceClient", + "rpcs": { + "CountMessageTokens": { + "methods": [ + "count_message_tokens" + ] + }, + "GenerateMessage": { + "methods": [ + "generate_message" + ] + } + } + } + } + }, + "FileService": { + "clients": { + "grpc": { + "libraryClient": "FileServiceClient", + "rpcs": { + "CreateFile": { + "methods": [ + "create_file" + ] + }, + "DeleteFile": { + "methods": [ + "delete_file" + ] + }, + "GetFile": { + "methods": [ + "get_file" + ] + }, + "ListFiles": { + "methods": [ + "list_files" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FileServiceAsyncClient", + "rpcs": { + "CreateFile": { + "methods": [ + "create_file" + ] + }, + "DeleteFile": { + "methods": [ + "delete_file" + ] + }, + "GetFile": { + "methods": [ + "get_file" + ] + }, + "ListFiles": { + "methods": [ + "list_files" + ] + } + } + }, + "rest": { + "libraryClient": "FileServiceClient", + "rpcs": { + "CreateFile": { + "methods": [ + "create_file" + ] + }, + "DeleteFile": { + "methods": [ + "delete_file" + ] + }, + "GetFile": { + "methods": [ + "get_file" + ] + }, + "ListFiles": { + "methods": [ + "list_files" + ] + } + } + } + } + }, + "GenerativeService": { + "clients": { + "grpc": { + "libraryClient": "GenerativeServiceClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "BidiGenerateContent": { + "methods": [ + "bidi_generate_content" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateAnswer": { + "methods": [ + "generate_answer" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "GenerativeServiceAsyncClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "BidiGenerateContent": { + "methods": [ + "bidi_generate_content" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateAnswer": { + "methods": [ + "generate_answer" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + }, + "rest": { + "libraryClient": "GenerativeServiceClient", + "rpcs": { + "BatchEmbedContents": { + "methods": [ + "batch_embed_contents" + ] + }, + "BidiGenerateContent": { + "methods": [ + "bidi_generate_content" + ] + }, + "CountTokens": { + "methods": [ + "count_tokens" + ] + }, + "EmbedContent": { + "methods": [ + "embed_content" + ] + }, + "GenerateAnswer": { + "methods": [ + "generate_answer" + ] + }, + "GenerateContent": { + "methods": [ + "generate_content" + ] + }, + "StreamGenerateContent": { + "methods": [ + "stream_generate_content" + ] + } + } + } + } + }, + "ModelService": { + "clients": { + "grpc": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "CreateTunedModel": { + "methods": [ + "create_tuned_model" + ] + }, + "DeleteTunedModel": { + "methods": [ + "delete_tuned_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "GetTunedModel": { + "methods": [ + "get_tuned_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "ListTunedModels": { + "methods": [ + "list_tuned_models" + ] + }, + "UpdateTunedModel": { + "methods": [ + "update_tuned_model" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ModelServiceAsyncClient", + "rpcs": { + "CreateTunedModel": { + "methods": [ + "create_tuned_model" + ] + }, + "DeleteTunedModel": { + "methods": [ + "delete_tuned_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "GetTunedModel": { + "methods": [ + "get_tuned_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "ListTunedModels": { + "methods": [ + "list_tuned_models" + ] + }, + "UpdateTunedModel": { + "methods": [ + "update_tuned_model" + ] + } + } + }, + "rest": { + "libraryClient": "ModelServiceClient", + "rpcs": { + "CreateTunedModel": { + "methods": [ + "create_tuned_model" + ] + }, + "DeleteTunedModel": { + "methods": [ + "delete_tuned_model" + ] + }, + "GetModel": { + "methods": [ + "get_model" + ] + }, + "GetTunedModel": { + "methods": [ + "get_tuned_model" + ] + }, + "ListModels": { + "methods": [ + "list_models" + ] + }, + "ListTunedModels": { + "methods": [ + "list_tuned_models" + ] + }, + "UpdateTunedModel": { + "methods": [ + "update_tuned_model" + ] + } + } + } + } + }, + "PermissionService": { + "clients": { + "grpc": { + "libraryClient": "PermissionServiceClient", + "rpcs": { + "CreatePermission": { + "methods": [ + "create_permission" + ] + }, + "DeletePermission": { + "methods": [ + "delete_permission" + ] + }, + "GetPermission": { + "methods": [ + "get_permission" + ] + }, + "ListPermissions": { + "methods": [ + "list_permissions" + ] + }, + "TransferOwnership": { + "methods": [ + "transfer_ownership" + ] + }, + "UpdatePermission": { + "methods": [ + "update_permission" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PermissionServiceAsyncClient", + "rpcs": { + "CreatePermission": { + "methods": [ + "create_permission" + ] + }, + "DeletePermission": { + "methods": [ + "delete_permission" + ] + }, + "GetPermission": { + "methods": [ + "get_permission" + ] + }, + "ListPermissions": { + "methods": [ + "list_permissions" + ] + }, + "TransferOwnership": { + "methods": [ + "transfer_ownership" + ] + }, + "UpdatePermission": { + "methods": [ + "update_permission" + ] + } + } + }, + "rest": { + "libraryClient": "PermissionServiceClient", + "rpcs": { + "CreatePermission": { + "methods": [ + "create_permission" + ] + }, + "DeletePermission": { + "methods": [ + "delete_permission" + ] + }, + "GetPermission": { + "methods": [ + "get_permission" + ] + }, + "ListPermissions": { + "methods": [ + "list_permissions" + ] + }, + "TransferOwnership": { + "methods": [ + "transfer_ownership" + ] + }, + "UpdatePermission": { + "methods": [ + "update_permission" + ] + } + } + } + } + }, + "PredictionService": { + "clients": { + "grpc": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PredictionServiceAsyncClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + }, + "rest": { + "libraryClient": "PredictionServiceClient", + "rpcs": { + "Predict": { + "methods": [ + "predict" + ] + } + } + } + } + }, + "RetrieverService": { + "clients": { + "grpc": { + "libraryClient": "RetrieverServiceClient", + "rpcs": { + "BatchCreateChunks": { + "methods": [ + "batch_create_chunks" + ] + }, + "BatchDeleteChunks": { + "methods": [ + "batch_delete_chunks" + ] + }, + "BatchUpdateChunks": { + "methods": [ + "batch_update_chunks" + ] + }, + "CreateChunk": { + "methods": [ + "create_chunk" + ] + }, + "CreateCorpus": { + "methods": [ + "create_corpus" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteChunk": { + "methods": [ + "delete_chunk" + ] + }, + "DeleteCorpus": { + "methods": [ + "delete_corpus" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetChunk": { + "methods": [ + "get_chunk" + ] + }, + "GetCorpus": { + "methods": [ + "get_corpus" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListChunks": { + "methods": [ + "list_chunks" + ] + }, + "ListCorpora": { + "methods": [ + "list_corpora" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "QueryCorpus": { + "methods": [ + "query_corpus" + ] + }, + "QueryDocument": { + "methods": [ + "query_document" + ] + }, + "UpdateChunk": { + "methods": [ + "update_chunk" + ] + }, + "UpdateCorpus": { + "methods": [ + "update_corpus" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "grpc-async": { + "libraryClient": "RetrieverServiceAsyncClient", + "rpcs": { + "BatchCreateChunks": { + "methods": [ + "batch_create_chunks" + ] + }, + "BatchDeleteChunks": { + "methods": [ + "batch_delete_chunks" + ] + }, + "BatchUpdateChunks": { + "methods": [ + "batch_update_chunks" + ] + }, + "CreateChunk": { + "methods": [ + "create_chunk" + ] + }, + "CreateCorpus": { + "methods": [ + "create_corpus" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteChunk": { + "methods": [ + "delete_chunk" + ] + }, + "DeleteCorpus": { + "methods": [ + "delete_corpus" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetChunk": { + "methods": [ + "get_chunk" + ] + }, + "GetCorpus": { + "methods": [ + "get_corpus" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListChunks": { + "methods": [ + "list_chunks" + ] + }, + "ListCorpora": { + "methods": [ + "list_corpora" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "QueryCorpus": { + "methods": [ + "query_corpus" + ] + }, + "QueryDocument": { + "methods": [ + "query_document" + ] + }, + "UpdateChunk": { + "methods": [ + "update_chunk" + ] + }, + "UpdateCorpus": { + "methods": [ + "update_corpus" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + }, + "rest": { + "libraryClient": "RetrieverServiceClient", + "rpcs": { + "BatchCreateChunks": { + "methods": [ + "batch_create_chunks" + ] + }, + "BatchDeleteChunks": { + "methods": [ + "batch_delete_chunks" + ] + }, + "BatchUpdateChunks": { + "methods": [ + "batch_update_chunks" + ] + }, + "CreateChunk": { + "methods": [ + "create_chunk" + ] + }, + "CreateCorpus": { + "methods": [ + "create_corpus" + ] + }, + "CreateDocument": { + "methods": [ + "create_document" + ] + }, + "DeleteChunk": { + "methods": [ + "delete_chunk" + ] + }, + "DeleteCorpus": { + "methods": [ + "delete_corpus" + ] + }, + "DeleteDocument": { + "methods": [ + "delete_document" + ] + }, + "GetChunk": { + "methods": [ + "get_chunk" + ] + }, + "GetCorpus": { + "methods": [ + "get_corpus" + ] + }, + "GetDocument": { + "methods": [ + "get_document" + ] + }, + "ListChunks": { + "methods": [ + "list_chunks" + ] + }, + "ListCorpora": { + "methods": [ + "list_corpora" + ] + }, + "ListDocuments": { + "methods": [ + "list_documents" + ] + }, + "QueryCorpus": { + "methods": [ + "query_corpus" + ] + }, + "QueryDocument": { + "methods": [ + "query_document" + ] + }, + "UpdateChunk": { + "methods": [ + "update_chunk" + ] + }, + "UpdateCorpus": { + "methods": [ + "update_corpus" + ] + }, + "UpdateDocument": { + "methods": [ + "update_document" + ] + } + } + } + } + }, + "TextService": { + "clients": { + "grpc": { + "libraryClient": "TextServiceClient", + "rpcs": { + "BatchEmbedText": { + "methods": [ + "batch_embed_text" + ] + }, + "CountTextTokens": { + "methods": [ + "count_text_tokens" + ] + }, + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TextServiceAsyncClient", + "rpcs": { + "BatchEmbedText": { + "methods": [ + "batch_embed_text" + ] + }, + "CountTextTokens": { + "methods": [ + "count_text_tokens" + ] + }, + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + }, + "rest": { + "libraryClient": "TextServiceClient", + "rpcs": { + "BatchEmbedText": { + "methods": [ + "batch_embed_text" + ] + }, + "CountTextTokens": { + "methods": [ + "count_text_tokens" + ] + }, + "EmbedText": { + "methods": [ + "embed_text" + ] + }, + "GenerateText": { + "methods": [ + "generate_text" + ] + } + } + } + } + } + } +} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/gapic_version.py new file mode 100644 index 000000000000..a22e7bbe7e4a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/py.typed b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/py.typed new file mode 100644 index 000000000000..38773eee6363 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-ai-generativelanguage package uses inline types. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/__init__.py new file mode 100644 index 000000000000..2f8bc2e1ba03 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import CacheServiceAsyncClient +from .client import CacheServiceClient + +__all__ = ( + "CacheServiceClient", + "CacheServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/async_client.py new file mode 100644 index 000000000000..ff56bab13223 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/async_client.py @@ -0,0 +1,950 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.cache_service import pagers +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content +from google.ai.generativelanguage_v1alpha.types import content + +from .client import CacheServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, CacheServiceTransport +from .transports.grpc_asyncio import CacheServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class CacheServiceAsyncClient: + """API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + """ + + _client: CacheServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CacheServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CacheServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CacheServiceClient._DEFAULT_UNIVERSE + + cached_content_path = staticmethod(CacheServiceClient.cached_content_path) + parse_cached_content_path = staticmethod( + CacheServiceClient.parse_cached_content_path + ) + model_path = staticmethod(CacheServiceClient.model_path) + parse_model_path = staticmethod(CacheServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + CacheServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CacheServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(CacheServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(CacheServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(CacheServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + CacheServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(CacheServiceClient.common_project_path) + parse_common_project_path = staticmethod( + CacheServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(CacheServiceClient.common_location_path) + parse_common_location_path = staticmethod( + CacheServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceAsyncClient: The constructed client. + """ + return CacheServiceClient.from_service_account_info.__func__(CacheServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceAsyncClient: The constructed client. + """ + return CacheServiceClient.from_service_account_file.__func__(CacheServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CacheServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CacheServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CacheServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = CacheServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CacheServiceTransport, Callable[..., CacheServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cache service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CacheServiceTransport,Callable[..., CacheServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CacheServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CacheServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "credentialsType": None, + }, + ) + + async def list_cached_contents( + self, + request: Optional[Union[cache_service.ListCachedContentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListCachedContentsAsyncPager: + r"""Lists CachedContents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListCachedContentsRequest, dict]]): + The request object. Request to list CachedContents. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.cache_service.pagers.ListCachedContentsAsyncPager: + Response with CachedContents list. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.ListCachedContentsRequest): + request = cache_service.ListCachedContentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_cached_contents + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCachedContentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_cached_content( + self, + request: Optional[Union[cache_service.CreateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_cached_content.CachedContent: + r"""Creates CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCachedContentRequest( + ) + + # Make the request + response = await client.create_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreateCachedContentRequest, dict]]): + The request object. Request to create CachedContent. + cached_content (:class:`google.ai.generativelanguage_v1alpha.types.CachedContent`): + Required. The cached content to + create. + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.CreateCachedContentRequest): + request = cache_service.CreateCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_cached_content + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_cached_content( + self, + request: Optional[Union[cache_service.GetCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cached_content.CachedContent: + r"""Reads CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCachedContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetCachedContentRequest, dict]]): + The request object. Request to read CachedContent. + name (:class:`str`): + Required. The resource name referring to the content + cache entry. Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.GetCachedContentRequest): + request = cache_service.GetCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cached_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cached_content( + self, + request: Optional[Union[cache_service.UpdateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_cached_content.CachedContent: + r"""Updates CachedContent resource (only expiration is + updatable). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCachedContentRequest( + ) + + # Make the request + response = await client.update_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.UpdateCachedContentRequest, dict]]): + The request object. Request to update CachedContent. + cached_content (:class:`google.ai.generativelanguage_v1alpha.types.CachedContent`): + Required. The content cache entry to + update + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The list of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.UpdateCachedContentRequest): + request = cache_service.UpdateCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cached_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cached_content.name", request.cached_content.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_cached_content( + self, + request: Optional[Union[cache_service.DeleteCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCachedContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_cached_content(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeleteCachedContentRequest, dict]]): + The request object. Request to delete CachedContent. + name (:class:`str`): + Required. The resource name referring to the content + cache entry Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.DeleteCachedContentRequest): + request = cache_service.DeleteCachedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_cached_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CacheServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CacheServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/client.py new file mode 100644 index 000000000000..131318a9c1e1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/client.py @@ -0,0 +1,1377 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.cache_service import pagers +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content +from google.ai.generativelanguage_v1alpha.types import content + +from .transports.base import DEFAULT_CLIENT_INFO, CacheServiceTransport +from .transports.grpc import CacheServiceGrpcTransport +from .transports.grpc_asyncio import CacheServiceGrpcAsyncIOTransport +from .transports.rest import CacheServiceRestTransport + + +class CacheServiceClientMeta(type): + """Metaclass for the CacheService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[CacheServiceTransport]] + _transport_registry["grpc"] = CacheServiceGrpcTransport + _transport_registry["grpc_asyncio"] = CacheServiceGrpcAsyncIOTransport + _transport_registry["rest"] = CacheServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CacheServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CacheServiceClient(metaclass=CacheServiceClientMeta): + """API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CacheServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CacheServiceTransport: + """Returns the transport used by the client instance. + + Returns: + CacheServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cached_content_path( + id: str, + ) -> str: + """Returns a fully-qualified cached_content string.""" + return "cachedContents/{id}".format( + id=id, + ) + + @staticmethod + def parse_cached_content_path(path: str) -> Dict[str, str]: + """Parses a cached_content path into its component segments.""" + m = re.match(r"^cachedContents/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = CacheServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = CacheServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CacheServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, CacheServiceTransport, Callable[..., CacheServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cache service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CacheServiceTransport,Callable[..., CacheServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CacheServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = CacheServiceClient._read_environment_variables() + self._client_cert_source = CacheServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = CacheServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CacheServiceTransport) + if transport_provided: + # transport is a CacheServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CacheServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or CacheServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[CacheServiceTransport], Callable[..., CacheServiceTransport] + ] = ( + CacheServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CacheServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.CacheServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "credentialsType": None, + }, + ) + + def list_cached_contents( + self, + request: Optional[Union[cache_service.ListCachedContentsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListCachedContentsPager: + r"""Lists CachedContents. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListCachedContentsRequest, dict]): + The request object. Request to list CachedContents. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.cache_service.pagers.ListCachedContentsPager: + Response with CachedContents list. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.ListCachedContentsRequest): + request = cache_service.ListCachedContentsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_cached_contents] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCachedContentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_cached_content( + self, + request: Optional[Union[cache_service.CreateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_cached_content.CachedContent: + r"""Creates CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCachedContentRequest( + ) + + # Make the request + response = client.create_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreateCachedContentRequest, dict]): + The request object. Request to create CachedContent. + cached_content (google.ai.generativelanguage_v1alpha.types.CachedContent): + Required. The cached content to + create. + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.CreateCachedContentRequest): + request = cache_service.CreateCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_cached_content] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_cached_content( + self, + request: Optional[Union[cache_service.GetCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cached_content.CachedContent: + r"""Reads CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCachedContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetCachedContentRequest, dict]): + The request object. Request to read CachedContent. + name (str): + Required. The resource name referring to the content + cache entry. Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.GetCachedContentRequest): + request = cache_service.GetCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cached_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cached_content( + self, + request: Optional[Union[cache_service.UpdateCachedContentRequest, dict]] = None, + *, + cached_content: Optional[gag_cached_content.CachedContent] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_cached_content.CachedContent: + r"""Updates CachedContent resource (only expiration is + updatable). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCachedContentRequest( + ) + + # Make the request + response = client.update_cached_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.UpdateCachedContentRequest, dict]): + The request object. Request to update CachedContent. + cached_content (google.ai.generativelanguage_v1alpha.types.CachedContent): + Required. The content cache entry to + update + + This corresponds to the ``cached_content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([cached_content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.UpdateCachedContentRequest): + request = cache_service.UpdateCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if cached_content is not None: + request.cached_content = cached_content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cached_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("cached_content.name", request.cached_content.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_cached_content( + self, + request: Optional[Union[cache_service.DeleteCachedContentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes CachedContent resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCachedContentRequest( + name="name_value", + ) + + # Make the request + client.delete_cached_content(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeleteCachedContentRequest, dict]): + The request object. Request to delete CachedContent. + name (str): + Required. The resource name referring to the content + cache entry Format: ``cachedContents/{id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cache_service.DeleteCachedContentRequest): + request = cache_service.DeleteCachedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_cached_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "CacheServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("CacheServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/pagers.py new file mode 100644 index 000000000000..0a43aadf2d53 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/pagers.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ai.generativelanguage_v1alpha.types import cache_service, cached_content + + +class ListCachedContentsPager: + """A pager for iterating through ``list_cached_contents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListCachedContentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``cached_contents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCachedContents`` requests and continue to iterate + through the ``cached_contents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListCachedContentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., cache_service.ListCachedContentsResponse], + request: cache_service.ListCachedContentsRequest, + response: cache_service.ListCachedContentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListCachedContentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListCachedContentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = cache_service.ListCachedContentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cache_service.ListCachedContentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[cached_content.CachedContent]: + for page in self.pages: + yield from page.cached_contents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCachedContentsAsyncPager: + """A pager for iterating through ``list_cached_contents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListCachedContentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``cached_contents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCachedContents`` requests and continue to iterate + through the ``cached_contents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListCachedContentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[cache_service.ListCachedContentsResponse]], + request: cache_service.ListCachedContentsRequest, + response: cache_service.ListCachedContentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListCachedContentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListCachedContentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = cache_service.ListCachedContentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cache_service.ListCachedContentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[cached_content.CachedContent]: + async def async_generator(): + async for page in self.pages: + for response in page.cached_contents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/README.rst new file mode 100644 index 000000000000..8647ac1a9f9b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CacheServiceTransport` is the ABC for all transports. +- public child `CacheServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CacheServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCacheServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CacheServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/__init__.py new file mode 100644 index 000000000000..cef091cd23ab --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CacheServiceTransport +from .grpc import CacheServiceGrpcTransport +from .grpc_asyncio import CacheServiceGrpcAsyncIOTransport +from .rest import CacheServiceRestInterceptor, CacheServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CacheServiceTransport]] +_transport_registry["grpc"] = CacheServiceGrpcTransport +_transport_registry["grpc_asyncio"] = CacheServiceGrpcAsyncIOTransport +_transport_registry["rest"] = CacheServiceRestTransport + +__all__ = ( + "CacheServiceTransport", + "CacheServiceGrpcTransport", + "CacheServiceGrpcAsyncIOTransport", + "CacheServiceRestTransport", + "CacheServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/base.py new file mode 100644 index 000000000000..ee82d216203c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/base.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class CacheServiceTransport(abc.ABC): + """Abstract transport class for CacheService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_cached_contents: gapic_v1.method.wrap_method( + self.list_cached_contents, + default_timeout=None, + client_info=client_info, + ), + self.create_cached_content: gapic_v1.method.wrap_method( + self.create_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.get_cached_content: gapic_v1.method.wrap_method( + self.get_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.update_cached_content: gapic_v1.method.wrap_method( + self.update_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.delete_cached_content: gapic_v1.method.wrap_method( + self.delete_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + Union[ + cache_service.ListCachedContentsResponse, + Awaitable[cache_service.ListCachedContentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], + Union[ + gag_cached_content.CachedContent, + Awaitable[gag_cached_content.CachedContent], + ], + ]: + raise NotImplementedError() + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], + Union[cached_content.CachedContent, Awaitable[cached_content.CachedContent]], + ]: + raise NotImplementedError() + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], + Union[ + gag_cached_content.CachedContent, + Awaitable[gag_cached_content.CachedContent], + ], + ]: + raise NotImplementedError() + + @property + def delete_cached_content( + self, + ) -> Callable[ + [cache_service.DeleteCachedContentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("CacheServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/grpc.py new file mode 100644 index 000000000000..c337588e39d7 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/grpc.py @@ -0,0 +1,517 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content + +from .base import DEFAULT_CLIENT_INFO, CacheServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CacheServiceGrpcTransport(CacheServiceTransport): + """gRPC backend transport for CacheService. + + API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + cache_service.ListCachedContentsResponse, + ]: + r"""Return a callable for the list cached contents method over gRPC. + + Lists CachedContents. + + Returns: + Callable[[~.ListCachedContentsRequest], + ~.ListCachedContentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_cached_contents" not in self._stubs: + self._stubs["list_cached_contents"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/ListCachedContents", + request_serializer=cache_service.ListCachedContentsRequest.serialize, + response_deserializer=cache_service.ListCachedContentsResponse.deserialize, + ) + return self._stubs["list_cached_contents"] + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], gag_cached_content.CachedContent + ]: + r"""Return a callable for the create cached content method over gRPC. + + Creates CachedContent resource. + + Returns: + Callable[[~.CreateCachedContentRequest], + ~.CachedContent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cached_content" not in self._stubs: + self._stubs["create_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/CreateCachedContent", + request_serializer=cache_service.CreateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["create_cached_content"] + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], cached_content.CachedContent + ]: + r"""Return a callable for the get cached content method over gRPC. + + Reads CachedContent resource. + + Returns: + Callable[[~.GetCachedContentRequest], + ~.CachedContent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cached_content" not in self._stubs: + self._stubs["get_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/GetCachedContent", + request_serializer=cache_service.GetCachedContentRequest.serialize, + response_deserializer=cached_content.CachedContent.deserialize, + ) + return self._stubs["get_cached_content"] + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], gag_cached_content.CachedContent + ]: + r"""Return a callable for the update cached content method over gRPC. + + Updates CachedContent resource (only expiration is + updatable). + + Returns: + Callable[[~.UpdateCachedContentRequest], + ~.CachedContent]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cached_content" not in self._stubs: + self._stubs["update_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/UpdateCachedContent", + request_serializer=cache_service.UpdateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["update_cached_content"] + + @property + def delete_cached_content( + self, + ) -> Callable[[cache_service.DeleteCachedContentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete cached content method over gRPC. + + Deletes CachedContent resource. + + Returns: + Callable[[~.DeleteCachedContentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cached_content" not in self._stubs: + self._stubs["delete_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/DeleteCachedContent", + request_serializer=cache_service.DeleteCachedContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_cached_content"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("CacheServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..764e2a1cb29f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/grpc_asyncio.py @@ -0,0 +1,573 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content + +from .base import DEFAULT_CLIENT_INFO, CacheServiceTransport +from .grpc import CacheServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CacheServiceGrpcAsyncIOTransport(CacheServiceTransport): + """gRPC AsyncIO backend transport for CacheService. + + API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + Awaitable[cache_service.ListCachedContentsResponse], + ]: + r"""Return a callable for the list cached contents method over gRPC. + + Lists CachedContents. + + Returns: + Callable[[~.ListCachedContentsRequest], + Awaitable[~.ListCachedContentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_cached_contents" not in self._stubs: + self._stubs["list_cached_contents"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/ListCachedContents", + request_serializer=cache_service.ListCachedContentsRequest.serialize, + response_deserializer=cache_service.ListCachedContentsResponse.deserialize, + ) + return self._stubs["list_cached_contents"] + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], + Awaitable[gag_cached_content.CachedContent], + ]: + r"""Return a callable for the create cached content method over gRPC. + + Creates CachedContent resource. + + Returns: + Callable[[~.CreateCachedContentRequest], + Awaitable[~.CachedContent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_cached_content" not in self._stubs: + self._stubs["create_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/CreateCachedContent", + request_serializer=cache_service.CreateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["create_cached_content"] + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], Awaitable[cached_content.CachedContent] + ]: + r"""Return a callable for the get cached content method over gRPC. + + Reads CachedContent resource. + + Returns: + Callable[[~.GetCachedContentRequest], + Awaitable[~.CachedContent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_cached_content" not in self._stubs: + self._stubs["get_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/GetCachedContent", + request_serializer=cache_service.GetCachedContentRequest.serialize, + response_deserializer=cached_content.CachedContent.deserialize, + ) + return self._stubs["get_cached_content"] + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], + Awaitable[gag_cached_content.CachedContent], + ]: + r"""Return a callable for the update cached content method over gRPC. + + Updates CachedContent resource (only expiration is + updatable). + + Returns: + Callable[[~.UpdateCachedContentRequest], + Awaitable[~.CachedContent]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_cached_content" not in self._stubs: + self._stubs["update_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/UpdateCachedContent", + request_serializer=cache_service.UpdateCachedContentRequest.serialize, + response_deserializer=gag_cached_content.CachedContent.deserialize, + ) + return self._stubs["update_cached_content"] + + @property + def delete_cached_content( + self, + ) -> Callable[ + [cache_service.DeleteCachedContentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete cached content method over gRPC. + + Deletes CachedContent resource. + + Returns: + Callable[[~.DeleteCachedContentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_cached_content" not in self._stubs: + self._stubs["delete_cached_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.CacheService/DeleteCachedContent", + request_serializer=cache_service.DeleteCachedContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_cached_content"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_cached_contents: self._wrap_method( + self.list_cached_contents, + default_timeout=None, + client_info=client_info, + ), + self.create_cached_content: self._wrap_method( + self.create_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.get_cached_content: self._wrap_method( + self.get_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.update_cached_content: self._wrap_method( + self.update_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.delete_cached_content: self._wrap_method( + self.delete_cached_content, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("CacheServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/rest.py new file mode 100644 index 000000000000..9ca15b4f12e0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/rest.py @@ -0,0 +1,1543 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseCacheServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class CacheServiceRestInterceptor: + """Interceptor for CacheService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CacheServiceRestTransport. + + .. code-block:: python + class MyCustomCacheServiceInterceptor(CacheServiceRestInterceptor): + def pre_create_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_cached_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cached_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_cached_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_cached_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_cached_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cached_content(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CacheServiceRestTransport(interceptor=MyCustomCacheServiceInterceptor()) + client = CacheServiceClient(transport=transport) + + + """ + + def pre_create_cached_content( + self, + request: cache_service.CreateCachedContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.CreateCachedContentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_create_cached_content( + self, response: gag_cached_content.CachedContent + ) -> gag_cached_content.CachedContent: + """Post-rpc interceptor for create_cached_content + + DEPRECATED. Please use the `post_create_cached_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. This `post_create_cached_content` interceptor runs + before the `post_create_cached_content_with_metadata` interceptor. + """ + return response + + def post_create_cached_content_with_metadata( + self, + response: gag_cached_content.CachedContent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gag_cached_content.CachedContent, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_cached_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_create_cached_content_with_metadata` + interceptor in new development instead of the `post_create_cached_content` interceptor. + When both interceptors are used, this `post_create_cached_content_with_metadata` interceptor runs after the + `post_create_cached_content` interceptor. The (possibly modified) response returned by + `post_create_cached_content` will be passed to + `post_create_cached_content_with_metadata`. + """ + return response, metadata + + def pre_delete_cached_content( + self, + request: cache_service.DeleteCachedContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.DeleteCachedContentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def pre_get_cached_content( + self, + request: cache_service.GetCachedContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.GetCachedContentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_get_cached_content( + self, response: cached_content.CachedContent + ) -> cached_content.CachedContent: + """Post-rpc interceptor for get_cached_content + + DEPRECATED. Please use the `post_get_cached_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. This `post_get_cached_content` interceptor runs + before the `post_get_cached_content_with_metadata` interceptor. + """ + return response + + def post_get_cached_content_with_metadata( + self, + response: cached_content.CachedContent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cached_content.CachedContent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cached_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_get_cached_content_with_metadata` + interceptor in new development instead of the `post_get_cached_content` interceptor. + When both interceptors are used, this `post_get_cached_content_with_metadata` interceptor runs after the + `post_get_cached_content` interceptor. The (possibly modified) response returned by + `post_get_cached_content` will be passed to + `post_get_cached_content_with_metadata`. + """ + return response, metadata + + def pre_list_cached_contents( + self, + request: cache_service.ListCachedContentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.ListCachedContentsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_cached_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_list_cached_contents( + self, response: cache_service.ListCachedContentsResponse + ) -> cache_service.ListCachedContentsResponse: + """Post-rpc interceptor for list_cached_contents + + DEPRECATED. Please use the `post_list_cached_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. This `post_list_cached_contents` interceptor runs + before the `post_list_cached_contents_with_metadata` interceptor. + """ + return response + + def post_list_cached_contents_with_metadata( + self, + response: cache_service.ListCachedContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.ListCachedContentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_cached_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_list_cached_contents_with_metadata` + interceptor in new development instead of the `post_list_cached_contents` interceptor. + When both interceptors are used, this `post_list_cached_contents_with_metadata` interceptor runs after the + `post_list_cached_contents` interceptor. The (possibly modified) response returned by + `post_list_cached_contents` will be passed to + `post_list_cached_contents_with_metadata`. + """ + return response, metadata + + def pre_update_cached_content( + self, + request: cache_service.UpdateCachedContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.UpdateCachedContentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_cached_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_update_cached_content( + self, response: gag_cached_content.CachedContent + ) -> gag_cached_content.CachedContent: + """Post-rpc interceptor for update_cached_content + + DEPRECATED. Please use the `post_update_cached_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. This `post_update_cached_content` interceptor runs + before the `post_update_cached_content_with_metadata` interceptor. + """ + return response + + def post_update_cached_content_with_metadata( + self, + response: gag_cached_content.CachedContent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gag_cached_content.CachedContent, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_cached_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_update_cached_content_with_metadata` + interceptor in new development instead of the `post_update_cached_content` interceptor. + When both interceptors are used, this `post_update_cached_content_with_metadata` interceptor runs after the + `post_update_cached_content` interceptor. The (possibly modified) response returned by + `post_update_cached_content` will be passed to + `post_update_cached_content_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CacheService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CacheService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CacheServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CacheServiceRestInterceptor + + +class CacheServiceRestTransport(_BaseCacheServiceRestTransport): + """REST backend synchronous transport for CacheService. + + API for managing cache of content (CachedContent resources) + that can be used in GenerativeService requests. This way + generate content requests can benefit from preprocessing work + being done earlier, possibly lowering their computational cost. + It is intended to be used with large contexts. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CacheServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CacheServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateCachedContent( + _BaseCacheServiceRestTransport._BaseCreateCachedContent, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.CreateCachedContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: cache_service.CreateCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_cached_content.CachedContent: + r"""Call the create cached content method over HTTP. + + Args: + request (~.cache_service.CreateCachedContentRequest): + The request object. Request to create CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gag_cached_content.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseCreateCachedContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_cached_content( + request, metadata + ) + transcoded_request = _BaseCacheServiceRestTransport._BaseCreateCachedContent._get_transcoded_request( + http_options, request + ) + + body = _BaseCacheServiceRestTransport._BaseCreateCachedContent._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseCacheServiceRestTransport._BaseCreateCachedContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.CreateCachedContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "CreateCachedContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._CreateCachedContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_cached_content.CachedContent() + pb_resp = gag_cached_content.CachedContent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_cached_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cached_content_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gag_cached_content.CachedContent.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.CacheServiceClient.create_cached_content", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "CreateCachedContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteCachedContent( + _BaseCacheServiceRestTransport._BaseDeleteCachedContent, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.DeleteCachedContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: cache_service.DeleteCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete cached content method over HTTP. + + Args: + request (~.cache_service.DeleteCachedContentRequest): + The request object. Request to delete CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseDeleteCachedContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_cached_content( + request, metadata + ) + transcoded_request = _BaseCacheServiceRestTransport._BaseDeleteCachedContent._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCacheServiceRestTransport._BaseDeleteCachedContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.DeleteCachedContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "DeleteCachedContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._DeleteCachedContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetCachedContent( + _BaseCacheServiceRestTransport._BaseGetCachedContent, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.GetCachedContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: cache_service.GetCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cached_content.CachedContent: + r"""Call the get cached content method over HTTP. + + Args: + request (~.cache_service.GetCachedContentRequest): + The request object. Request to read CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cached_content.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseGetCachedContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_cached_content( + request, metadata + ) + transcoded_request = _BaseCacheServiceRestTransport._BaseGetCachedContent._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCacheServiceRestTransport._BaseGetCachedContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.GetCachedContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "GetCachedContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._GetCachedContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cached_content.CachedContent() + pb_resp = cached_content.CachedContent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_cached_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cached_content_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = cached_content.CachedContent.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.CacheServiceClient.get_cached_content", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "GetCachedContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListCachedContents( + _BaseCacheServiceRestTransport._BaseListCachedContents, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.ListCachedContents") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: cache_service.ListCachedContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cache_service.ListCachedContentsResponse: + r"""Call the list cached contents method over HTTP. + + Args: + request (~.cache_service.ListCachedContentsRequest): + The request object. Request to list CachedContents. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cache_service.ListCachedContentsResponse: + Response with CachedContents list. + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseListCachedContents._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_cached_contents( + request, metadata + ) + transcoded_request = _BaseCacheServiceRestTransport._BaseListCachedContents._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCacheServiceRestTransport._BaseListCachedContents._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.ListCachedContents", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "ListCachedContents", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._ListCachedContents._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cache_service.ListCachedContentsResponse() + pb_resp = cache_service.ListCachedContentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_cached_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_cached_contents_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = cache_service.ListCachedContentsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.CacheServiceClient.list_cached_contents", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "ListCachedContents", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateCachedContent( + _BaseCacheServiceRestTransport._BaseUpdateCachedContent, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.UpdateCachedContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: cache_service.UpdateCachedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_cached_content.CachedContent: + r"""Call the update cached content method over HTTP. + + Args: + request (~.cache_service.UpdateCachedContentRequest): + The request object. Request to update CachedContent. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gag_cached_content.CachedContent: + Content that has been preprocessed + and can be used in subsequent request to + GenerativeService. + + Cached content can be only used with + model it was created for. + + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseUpdateCachedContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_cached_content( + request, metadata + ) + transcoded_request = _BaseCacheServiceRestTransport._BaseUpdateCachedContent._get_transcoded_request( + http_options, request + ) + + body = _BaseCacheServiceRestTransport._BaseUpdateCachedContent._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseCacheServiceRestTransport._BaseUpdateCachedContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.UpdateCachedContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "UpdateCachedContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._UpdateCachedContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_cached_content.CachedContent() + pb_resp = gag_cached_content.CachedContent.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_cached_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cached_content_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gag_cached_content.CachedContent.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.CacheServiceClient.update_cached_content", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "UpdateCachedContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_cached_content( + self, + ) -> Callable[ + [cache_service.CreateCachedContentRequest], gag_cached_content.CachedContent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_cached_content( + self, + ) -> Callable[[cache_service.DeleteCachedContentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cached_content( + self, + ) -> Callable[ + [cache_service.GetCachedContentRequest], cached_content.CachedContent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_cached_contents( + self, + ) -> Callable[ + [cache_service.ListCachedContentsRequest], + cache_service.ListCachedContentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCachedContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cached_content( + self, + ) -> Callable[ + [cache_service.UpdateCachedContentRequest], gag_cached_content.CachedContent + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCachedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseCacheServiceRestTransport._BaseGetOperation, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCacheServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseCacheServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseCacheServiceRestTransport._BaseListOperations, CacheServiceRestStub + ): + def __hash__(self): + return hash("CacheServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseCacheServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCacheServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCacheServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.CacheServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CacheServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.CacheService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CacheServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/rest_base.py new file mode 100644 index 000000000000..cb05981f3fdd --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/cache_service/transports/rest_base.py @@ -0,0 +1,399 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content + +from .base import DEFAULT_CLIENT_INFO, CacheServiceTransport + + +class _BaseCacheServiceRestTransport(CacheServiceTransport): + """Base REST backend transport for CacheService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateCachedContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/cachedContents", + "body": "cached_content", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cache_service.CreateCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCacheServiceRestTransport._BaseCreateCachedContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteCachedContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=cachedContents/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cache_service.DeleteCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCacheServiceRestTransport._BaseDeleteCachedContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetCachedContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=cachedContents/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cache_service.GetCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCacheServiceRestTransport._BaseGetCachedContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListCachedContents: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/cachedContents", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cache_service.ListCachedContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateCachedContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{cached_content.name=cachedContents/*}", + "body": "cached_content", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cache_service.UpdateCachedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCacheServiceRestTransport._BaseUpdateCachedContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseCacheServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/__init__.py new file mode 100644 index 000000000000..0f3a84e6ba34 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DiscussServiceAsyncClient +from .client import DiscussServiceClient + +__all__ = ( + "DiscussServiceClient", + "DiscussServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/async_client.py new file mode 100644 index 000000000000..badd7396f4a8 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/async_client.py @@ -0,0 +1,740 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import discuss_service, safety + +from .client import DiscussServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .transports.grpc_asyncio import DiscussServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class DiscussServiceAsyncClient: + """An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + """ + + _client: DiscussServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DiscussServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DiscussServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DiscussServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(DiscussServiceClient.model_path) + parse_model_path = staticmethod(DiscussServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + DiscussServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DiscussServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DiscussServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DiscussServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DiscussServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DiscussServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DiscussServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DiscussServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DiscussServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DiscussServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceAsyncClient: The constructed client. + """ + return DiscussServiceClient.from_service_account_info.__func__(DiscussServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceAsyncClient: The constructed client. + """ + return DiscussServiceClient.from_service_account_file.__func__(DiscussServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DiscussServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DiscussServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DiscussServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DiscussServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, DiscussServiceTransport, Callable[..., DiscussServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the discuss service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DiscussServiceTransport,Callable[..., DiscussServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DiscussServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DiscussServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "credentialsType": None, + }, + ) + + async def generate_message( + self, + request: Optional[Union[discuss_service.GenerateMessageRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Generates a response from the model given an input + ``MessagePrompt``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_generate_message(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GenerateMessageRequest, dict]]): + The request object. Request to generate a message + response from the model. + model (:class:`str`): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1alpha.types.MessagePrompt`): + Required. The structured textual + input given to the model as a prompt. + Given a + prompt, the model will return what it + predicts is the next message in the + discussion. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (:class:`float`): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (:class:`int`): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If + unset, this will default to ``1``. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (:class:`float`): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens + whose probability sum is at least ``top_p``. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (:class:`int`): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateMessageResponse: + The response from the model. + + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [model, prompt, temperature, candidate_count, top_p, top_k] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, discuss_service.GenerateMessageRequest): + request = discuss_service.GenerateMessageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_message + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_message_tokens( + self, + request: Optional[ + Union[discuss_service.CountMessageTokensRequest, dict] + ] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Runs a model's tokenizer on a string and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_message_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CountMessageTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1alpha.types.MessagePrompt`): + Required. The prompt, whose token + count is to be returned. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CountMessageTokensResponse: + A response from CountMessageTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, discuss_service.CountMessageTokensRequest): + request = discuss_service.CountMessageTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.count_message_tokens + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DiscussServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DiscussServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/client.py new file mode 100644 index 000000000000..57e764cb37c2 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/client.py @@ -0,0 +1,1165 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import discuss_service, safety + +from .transports.base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .transports.grpc import DiscussServiceGrpcTransport +from .transports.grpc_asyncio import DiscussServiceGrpcAsyncIOTransport +from .transports.rest import DiscussServiceRestTransport + + +class DiscussServiceClientMeta(type): + """Metaclass for the DiscussService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DiscussServiceTransport]] + _transport_registry["grpc"] = DiscussServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DiscussServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DiscussServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DiscussServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DiscussServiceClient(metaclass=DiscussServiceClientMeta): + """An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DiscussServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DiscussServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DiscussServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DiscussServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DiscussServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DiscussServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, DiscussServiceTransport, Callable[..., DiscussServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the discuss service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DiscussServiceTransport,Callable[..., DiscussServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DiscussServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DiscussServiceClient._read_environment_variables() + self._client_cert_source = DiscussServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DiscussServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DiscussServiceTransport) + if transport_provided: + # transport is a DiscussServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DiscussServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DiscussServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[DiscussServiceTransport], Callable[..., DiscussServiceTransport] + ] = ( + DiscussServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DiscussServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.DiscussServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "credentialsType": None, + }, + ) + + def generate_message( + self, + request: Optional[Union[discuss_service.GenerateMessageRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Generates a response from the model given an input + ``MessagePrompt``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_generate_message(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GenerateMessageRequest, dict]): + The request object. Request to generate a message + response from the model. + model (str): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1alpha.types.MessagePrompt): + Required. The structured textual + input given to the model as a prompt. + Given a + prompt, the model will return what it + predicts is the next message in the + discussion. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (int): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If + unset, this will default to ``1``. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (float): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens + whose probability sum is at least ``top_p``. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateMessageResponse: + The response from the model. + + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [model, prompt, temperature, candidate_count, top_p, top_k] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, discuss_service.GenerateMessageRequest): + request = discuss_service.GenerateMessageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_message] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_message_tokens( + self, + request: Optional[ + Union[discuss_service.CountMessageTokensRequest, dict] + ] = None, + *, + model: Optional[str] = None, + prompt: Optional[discuss_service.MessagePrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Runs a model's tokenizer on a string and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_message_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CountMessageTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1alpha.types.MessagePrompt): + Required. The prompt, whose token + count is to be returned. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CountMessageTokensResponse: + A response from CountMessageTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, discuss_service.CountMessageTokensRequest): + request = discuss_service.CountMessageTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_message_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DiscussServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("DiscussServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/README.rst new file mode 100644 index 000000000000..864ca745c717 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DiscussServiceTransport` is the ABC for all transports. +- public child `DiscussServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DiscussServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDiscussServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DiscussServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/__init__.py new file mode 100644 index 000000000000..05b2d4522c01 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DiscussServiceTransport +from .grpc import DiscussServiceGrpcTransport +from .grpc_asyncio import DiscussServiceGrpcAsyncIOTransport +from .rest import DiscussServiceRestInterceptor, DiscussServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DiscussServiceTransport]] +_transport_registry["grpc"] = DiscussServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DiscussServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DiscussServiceRestTransport + +__all__ = ( + "DiscussServiceTransport", + "DiscussServiceGrpcTransport", + "DiscussServiceGrpcAsyncIOTransport", + "DiscussServiceRestTransport", + "DiscussServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/base.py new file mode 100644 index 000000000000..345e7b694b51 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/base.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import discuss_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class DiscussServiceTransport(abc.ABC): + """Abstract transport class for DiscussService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_message: gapic_v1.method.wrap_method( + self.generate_message, + default_timeout=None, + client_info=client_info, + ), + self.count_message_tokens: gapic_v1.method.wrap_method( + self.count_message_tokens, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + Union[ + discuss_service.GenerateMessageResponse, + Awaitable[discuss_service.GenerateMessageResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + Union[ + discuss_service.CountMessageTokensResponse, + Awaitable[discuss_service.CountMessageTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DiscussServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/grpc.py new file mode 100644 index 000000000000..b614519801a2 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/grpc.py @@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DiscussServiceGrpcTransport(DiscussServiceTransport): + """gRPC backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + discuss_service.GenerateMessageResponse, + ]: + r"""Return a callable for the generate message method over gRPC. + + Generates a response from the model given an input + ``MessagePrompt``. + + Returns: + Callable[[~.GenerateMessageRequest], + ~.GenerateMessageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_message" not in self._stubs: + self._stubs["generate_message"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.DiscussService/GenerateMessage", + request_serializer=discuss_service.GenerateMessageRequest.serialize, + response_deserializer=discuss_service.GenerateMessageResponse.deserialize, + ) + return self._stubs["generate_message"] + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + discuss_service.CountMessageTokensResponse, + ]: + r"""Return a callable for the count message tokens method over gRPC. + + Runs a model's tokenizer on a string and returns the + token count. + + Returns: + Callable[[~.CountMessageTokensRequest], + ~.CountMessageTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_message_tokens" not in self._stubs: + self._stubs["count_message_tokens"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.DiscussService/CountMessageTokens", + request_serializer=discuss_service.CountMessageTokensRequest.serialize, + response_deserializer=discuss_service.CountMessageTokensResponse.deserialize, + ) + return self._stubs["count_message_tokens"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DiscussServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..acd1b9564f63 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/grpc_asyncio.py @@ -0,0 +1,468 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport +from .grpc import DiscussServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DiscussServiceGrpcAsyncIOTransport(DiscussServiceTransport): + """gRPC AsyncIO backend transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + Awaitable[discuss_service.GenerateMessageResponse], + ]: + r"""Return a callable for the generate message method over gRPC. + + Generates a response from the model given an input + ``MessagePrompt``. + + Returns: + Callable[[~.GenerateMessageRequest], + Awaitable[~.GenerateMessageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_message" not in self._stubs: + self._stubs["generate_message"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.DiscussService/GenerateMessage", + request_serializer=discuss_service.GenerateMessageRequest.serialize, + response_deserializer=discuss_service.GenerateMessageResponse.deserialize, + ) + return self._stubs["generate_message"] + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + Awaitable[discuss_service.CountMessageTokensResponse], + ]: + r"""Return a callable for the count message tokens method over gRPC. + + Runs a model's tokenizer on a string and returns the + token count. + + Returns: + Callable[[~.CountMessageTokensRequest], + Awaitable[~.CountMessageTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_message_tokens" not in self._stubs: + self._stubs["count_message_tokens"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.DiscussService/CountMessageTokens", + request_serializer=discuss_service.CountMessageTokensRequest.serialize, + response_deserializer=discuss_service.CountMessageTokensResponse.deserialize, + ) + return self._stubs["count_message_tokens"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_message: self._wrap_method( + self.generate_message, + default_timeout=None, + client_info=client_info, + ), + self.count_message_tokens: self._wrap_method( + self.count_message_tokens, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("DiscussServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/rest.py new file mode 100644 index 000000000000..e0d416b9d02d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/rest.py @@ -0,0 +1,968 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseDiscussServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class DiscussServiceRestInterceptor: + """Interceptor for DiscussService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DiscussServiceRestTransport. + + .. code-block:: python + class MyCustomDiscussServiceInterceptor(DiscussServiceRestInterceptor): + def pre_count_message_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_message_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_message(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_message(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DiscussServiceRestTransport(interceptor=MyCustomDiscussServiceInterceptor()) + client = DiscussServiceClient(transport=transport) + + + """ + + def pre_count_message_tokens( + self, + request: discuss_service.CountMessageTokensRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.CountMessageTokensRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for count_message_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_count_message_tokens( + self, response: discuss_service.CountMessageTokensResponse + ) -> discuss_service.CountMessageTokensResponse: + """Post-rpc interceptor for count_message_tokens + + DEPRECATED. Please use the `post_count_message_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. This `post_count_message_tokens` interceptor runs + before the `post_count_message_tokens_with_metadata` interceptor. + """ + return response + + def post_count_message_tokens_with_metadata( + self, + response: discuss_service.CountMessageTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.CountMessageTokensResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for count_message_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_count_message_tokens_with_metadata` + interceptor in new development instead of the `post_count_message_tokens` interceptor. + When both interceptors are used, this `post_count_message_tokens_with_metadata` interceptor runs after the + `post_count_message_tokens` interceptor. The (possibly modified) response returned by + `post_count_message_tokens` will be passed to + `post_count_message_tokens_with_metadata`. + """ + return response, metadata + + def pre_generate_message( + self, + request: discuss_service.GenerateMessageRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.GenerateMessageRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for generate_message + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_generate_message( + self, response: discuss_service.GenerateMessageResponse + ) -> discuss_service.GenerateMessageResponse: + """Post-rpc interceptor for generate_message + + DEPRECATED. Please use the `post_generate_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. This `post_generate_message` interceptor runs + before the `post_generate_message_with_metadata` interceptor. + """ + return response + + def post_generate_message_with_metadata( + self, + response: discuss_service.GenerateMessageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.GenerateMessageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_generate_message_with_metadata` + interceptor in new development instead of the `post_generate_message` interceptor. + When both interceptors are used, this `post_generate_message_with_metadata` interceptor runs after the + `post_generate_message` interceptor. The (possibly modified) response returned by + `post_generate_message` will be passed to + `post_generate_message_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DiscussService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DiscussService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DiscussServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DiscussServiceRestInterceptor + + +class DiscussServiceRestTransport(_BaseDiscussServiceRestTransport): + """REST backend synchronous transport for DiscussService. + + An API for using Generative Language Models (GLMs) in dialog + applications. + Also known as large language models (LLMs), this API provides + models that are trained for multi-turn dialog. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DiscussServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DiscussServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CountMessageTokens( + _BaseDiscussServiceRestTransport._BaseCountMessageTokens, DiscussServiceRestStub + ): + def __hash__(self): + return hash("DiscussServiceRestTransport.CountMessageTokens") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: discuss_service.CountMessageTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discuss_service.CountMessageTokensResponse: + r"""Call the count message tokens method over HTTP. + + Args: + request (~.discuss_service.CountMessageTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.discuss_service.CountMessageTokensResponse: + A response from ``CountMessageTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options = ( + _BaseDiscussServiceRestTransport._BaseCountMessageTokens._get_http_options() + ) + + request, metadata = self._interceptor.pre_count_message_tokens( + request, metadata + ) + transcoded_request = _BaseDiscussServiceRestTransport._BaseCountMessageTokens._get_transcoded_request( + http_options, request + ) + + body = _BaseDiscussServiceRestTransport._BaseCountMessageTokens._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDiscussServiceRestTransport._BaseCountMessageTokens._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.DiscussServiceClient.CountMessageTokens", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "CountMessageTokens", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DiscussServiceRestTransport._CountMessageTokens._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discuss_service.CountMessageTokensResponse() + pb_resp = discuss_service.CountMessageTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_count_message_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_message_tokens_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + discuss_service.CountMessageTokensResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.DiscussServiceClient.count_message_tokens", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "CountMessageTokens", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GenerateMessage( + _BaseDiscussServiceRestTransport._BaseGenerateMessage, DiscussServiceRestStub + ): + def __hash__(self): + return hash("DiscussServiceRestTransport.GenerateMessage") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: discuss_service.GenerateMessageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discuss_service.GenerateMessageResponse: + r"""Call the generate message method over HTTP. + + Args: + request (~.discuss_service.GenerateMessageRequest): + The request object. Request to generate a message + response from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.discuss_service.GenerateMessageResponse: + The response from the model. + + This includes candidate messages and + conversation history in the form of + chronologically-ordered messages. + + """ + + http_options = ( + _BaseDiscussServiceRestTransport._BaseGenerateMessage._get_http_options() + ) + + request, metadata = self._interceptor.pre_generate_message( + request, metadata + ) + transcoded_request = _BaseDiscussServiceRestTransport._BaseGenerateMessage._get_transcoded_request( + http_options, request + ) + + body = _BaseDiscussServiceRestTransport._BaseGenerateMessage._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDiscussServiceRestTransport._BaseGenerateMessage._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.DiscussServiceClient.GenerateMessage", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "GenerateMessage", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DiscussServiceRestTransport._GenerateMessage._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discuss_service.GenerateMessageResponse() + pb_resp = discuss_service.GenerateMessageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_generate_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_message_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = discuss_service.GenerateMessageResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.DiscussServiceClient.generate_message", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "GenerateMessage", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def count_message_tokens( + self, + ) -> Callable[ + [discuss_service.CountMessageTokensRequest], + discuss_service.CountMessageTokensResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountMessageTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_message( + self, + ) -> Callable[ + [discuss_service.GenerateMessageRequest], + discuss_service.GenerateMessageResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateMessage(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseDiscussServiceRestTransport._BaseGetOperation, DiscussServiceRestStub + ): + def __hash__(self): + return hash("DiscussServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseDiscussServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDiscussServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDiscussServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.DiscussServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DiscussServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseDiscussServiceRestTransport._BaseListOperations, DiscussServiceRestStub + ): + def __hash__(self): + return hash("DiscussServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseDiscussServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDiscussServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDiscussServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.DiscussServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DiscussServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.DiscussService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DiscussServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/rest_base.py new file mode 100644 index 000000000000..863221cd4479 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/discuss_service/transports/rest_base.py @@ -0,0 +1,268 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import discuss_service + +from .base import DEFAULT_CLIENT_INFO, DiscussServiceTransport + + +class _BaseDiscussServiceRestTransport(DiscussServiceTransport): + """Base REST backend transport for DiscussService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCountMessageTokens: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:countMessageTokens", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = discuss_service.CountMessageTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDiscussServiceRestTransport._BaseCountMessageTokens._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGenerateMessage: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:generateMessage", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = discuss_service.GenerateMessageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDiscussServiceRestTransport._BaseGenerateMessage._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseDiscussServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/__init__.py new file mode 100644 index 000000000000..ccbe9be60abc --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import FileServiceAsyncClient +from .client import FileServiceClient + +__all__ = ( + "FileServiceClient", + "FileServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/async_client.py new file mode 100644 index 000000000000..2fb70165a176 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/async_client.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.file_service import pagers +from google.ai.generativelanguage_v1alpha.types import file, file_service + +from .client import FileServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, FileServiceTransport +from .transports.grpc_asyncio import FileServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class FileServiceAsyncClient: + """An API for uploading and managing files.""" + + _client: FileServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = FileServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FileServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = FileServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = FileServiceClient._DEFAULT_UNIVERSE + + file_path = staticmethod(FileServiceClient.file_path) + parse_file_path = staticmethod(FileServiceClient.parse_file_path) + common_billing_account_path = staticmethod( + FileServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + FileServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(FileServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(FileServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(FileServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + FileServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(FileServiceClient.common_project_path) + parse_common_project_path = staticmethod( + FileServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(FileServiceClient.common_location_path) + parse_common_location_path = staticmethod( + FileServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileServiceAsyncClient: The constructed client. + """ + return FileServiceClient.from_service_account_info.__func__(FileServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileServiceAsyncClient: The constructed client. + """ + return FileServiceClient.from_service_account_file.__func__(FileServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FileServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FileServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FileServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = FileServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, FileServiceTransport, Callable[..., FileServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the file service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FileServiceTransport,Callable[..., FileServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FileServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FileServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.FileServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "credentialsType": None, + }, + ) + + async def create_file( + self, + request: Optional[Union[file_service.CreateFileRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file_service.CreateFileResponse: + r"""Creates a ``File``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateFileRequest( + ) + + # Make the request + response = await client.create_file(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreateFileRequest, dict]]): + The request object. Request for ``CreateFile``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CreateFileResponse: + Response for CreateFile. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.CreateFileRequest): + request = file_service.CreateFileRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_file + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_files( + self, + request: Optional[Union[file_service.ListFilesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFilesAsyncPager: + r"""Lists the metadata for ``File``\ s owned by the requesting + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_files(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListFilesRequest( + ) + + # Make the request + page_result = client.list_files(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListFilesRequest, dict]]): + The request object. Request for ``ListFiles``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.file_service.pagers.ListFilesAsyncPager: + Response for ListFiles. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.ListFilesRequest): + request = file_service.ListFilesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_files + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFilesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_file( + self, + request: Optional[Union[file_service.GetFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file.File: + r"""Gets the metadata for the given ``File``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetFileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetFileRequest, dict]]): + The request object. Request for ``GetFile``. + name (:class:`str`): + Required. The name of the ``File`` to get. Example: + ``files/abc-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.File: + A file uploaded to the API. + Next ID: 15 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.GetFileRequest): + request = file_service.GetFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_file( + self, + request: Optional[Union[file_service.DeleteFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the ``File``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteFileRequest( + name="name_value", + ) + + # Make the request + await client.delete_file(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeleteFileRequest, dict]]): + The request object. Request for ``DeleteFile``. + name (:class:`str`): + Required. The name of the ``File`` to delete. Example: + ``files/abc-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.DeleteFileRequest): + request = file_service.DeleteFileRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_file + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "FileServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FileServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/client.py new file mode 100644 index 000000000000..f5c535a4bb8a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/client.py @@ -0,0 +1,1202 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.file_service import pagers +from google.ai.generativelanguage_v1alpha.types import file, file_service + +from .transports.base import DEFAULT_CLIENT_INFO, FileServiceTransport +from .transports.grpc import FileServiceGrpcTransport +from .transports.grpc_asyncio import FileServiceGrpcAsyncIOTransport +from .transports.rest import FileServiceRestTransport + + +class FileServiceClientMeta(type): + """Metaclass for the FileService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[FileServiceTransport]] + _transport_registry["grpc"] = FileServiceGrpcTransport + _transport_registry["grpc_asyncio"] = FileServiceGrpcAsyncIOTransport + _transport_registry["rest"] = FileServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[FileServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FileServiceClient(metaclass=FileServiceClientMeta): + """An API for uploading and managing files.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FileServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FileServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FileServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def file_path( + file: str, + ) -> str: + """Returns a fully-qualified file string.""" + return "files/{file}".format( + file=file, + ) + + @staticmethod + def parse_file_path(path: str) -> Dict[str, str]: + """Parses a file path into its component segments.""" + m = re.match(r"^files/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = FileServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = FileServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = FileServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = FileServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, FileServiceTransport, Callable[..., FileServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the file service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,FileServiceTransport,Callable[..., FileServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FileServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = FileServiceClient._read_environment_variables() + self._client_cert_source = FileServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = FileServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, FileServiceTransport) + if transport_provided: + # transport is a FileServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(FileServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or FileServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[FileServiceTransport], Callable[..., FileServiceTransport] + ] = ( + FileServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., FileServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.FileServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "credentialsType": None, + }, + ) + + def create_file( + self, + request: Optional[Union[file_service.CreateFileRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file_service.CreateFileResponse: + r"""Creates a ``File``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateFileRequest( + ) + + # Make the request + response = client.create_file(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreateFileRequest, dict]): + The request object. Request for ``CreateFile``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CreateFileResponse: + Response for CreateFile. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.CreateFileRequest): + request = file_service.CreateFileRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_file] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_files( + self, + request: Optional[Union[file_service.ListFilesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFilesPager: + r"""Lists the metadata for ``File``\ s owned by the requesting + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_files(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListFilesRequest( + ) + + # Make the request + page_result = client.list_files(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListFilesRequest, dict]): + The request object. Request for ``ListFiles``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.file_service.pagers.ListFilesPager: + Response for ListFiles. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.ListFilesRequest): + request = file_service.ListFilesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_files] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFilesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_file( + self, + request: Optional[Union[file_service.GetFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file.File: + r"""Gets the metadata for the given ``File``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetFileRequest( + name="name_value", + ) + + # Make the request + response = client.get_file(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetFileRequest, dict]): + The request object. Request for ``GetFile``. + name (str): + Required. The name of the ``File`` to get. Example: + ``files/abc-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.File: + A file uploaded to the API. + Next ID: 15 + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.GetFileRequest): + request = file_service.GetFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_file( + self, + request: Optional[Union[file_service.DeleteFileRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the ``File``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteFileRequest( + name="name_value", + ) + + # Make the request + client.delete_file(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeleteFileRequest, dict]): + The request object. Request for ``DeleteFile``. + name (str): + Required. The name of the ``File`` to delete. Example: + ``files/abc-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, file_service.DeleteFileRequest): + request = file_service.DeleteFileRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_file] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "FileServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("FileServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/pagers.py new file mode 100644 index 000000000000..adff51f2d9a9 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/pagers.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ai.generativelanguage_v1alpha.types import file, file_service + + +class ListFilesPager: + """A pager for iterating through ``list_files`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListFilesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``files`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFiles`` requests and continue to iterate + through the ``files`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListFilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., file_service.ListFilesResponse], + request: file_service.ListFilesRequest, + response: file_service.ListFilesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListFilesRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListFilesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = file_service.ListFilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[file_service.ListFilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[file.File]: + for page in self.pages: + yield from page.files + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFilesAsyncPager: + """A pager for iterating through ``list_files`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListFilesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``files`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFiles`` requests and continue to iterate + through the ``files`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListFilesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[file_service.ListFilesResponse]], + request: file_service.ListFilesRequest, + response: file_service.ListFilesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListFilesRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListFilesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = file_service.ListFilesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[file_service.ListFilesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[file.File]: + async def async_generator(): + async for page in self.pages: + for response in page.files: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/README.rst new file mode 100644 index 000000000000..f61177d3c2c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`FileServiceTransport` is the ABC for all transports. +- public child `FileServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `FileServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseFileServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `FileServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/__init__.py new file mode 100644 index 000000000000..4ea65f1e94d8 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FileServiceTransport +from .grpc import FileServiceGrpcTransport +from .grpc_asyncio import FileServiceGrpcAsyncIOTransport +from .rest import FileServiceRestInterceptor, FileServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FileServiceTransport]] +_transport_registry["grpc"] = FileServiceGrpcTransport +_transport_registry["grpc_asyncio"] = FileServiceGrpcAsyncIOTransport +_transport_registry["rest"] = FileServiceRestTransport + +__all__ = ( + "FileServiceTransport", + "FileServiceGrpcTransport", + "FileServiceGrpcAsyncIOTransport", + "FileServiceRestTransport", + "FileServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/base.py new file mode 100644 index 000000000000..6730b2082aef --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/base.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import file, file_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class FileServiceTransport(abc.ABC): + """Abstract transport class for FileService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_file: gapic_v1.method.wrap_method( + self.create_file, + default_timeout=None, + client_info=client_info, + ), + self.list_files: gapic_v1.method.wrap_method( + self.list_files, + default_timeout=None, + client_info=client_info, + ), + self.get_file: gapic_v1.method.wrap_method( + self.get_file, + default_timeout=None, + client_info=client_info, + ), + self.delete_file: gapic_v1.method.wrap_method( + self.delete_file, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_file( + self, + ) -> Callable[ + [file_service.CreateFileRequest], + Union[ + file_service.CreateFileResponse, Awaitable[file_service.CreateFileResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_files( + self, + ) -> Callable[ + [file_service.ListFilesRequest], + Union[ + file_service.ListFilesResponse, Awaitable[file_service.ListFilesResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_file( + self, + ) -> Callable[ + [file_service.GetFileRequest], Union[file.File, Awaitable[file.File]] + ]: + raise NotImplementedError() + + @property + def delete_file( + self, + ) -> Callable[ + [file_service.DeleteFileRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("FileServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/grpc.py new file mode 100644 index 000000000000..46de87d6157e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/grpc.py @@ -0,0 +1,472 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import file, file_service + +from .base import DEFAULT_CLIENT_INFO, FileServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class FileServiceGrpcTransport(FileServiceTransport): + """gRPC backend transport for FileService. + + An API for uploading and managing files. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_file( + self, + ) -> Callable[[file_service.CreateFileRequest], file_service.CreateFileResponse]: + r"""Return a callable for the create file method over gRPC. + + Creates a ``File``. + + Returns: + Callable[[~.CreateFileRequest], + ~.CreateFileResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_file" not in self._stubs: + self._stubs["create_file"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/CreateFile", + request_serializer=file_service.CreateFileRequest.serialize, + response_deserializer=file_service.CreateFileResponse.deserialize, + ) + return self._stubs["create_file"] + + @property + def list_files( + self, + ) -> Callable[[file_service.ListFilesRequest], file_service.ListFilesResponse]: + r"""Return a callable for the list files method over gRPC. + + Lists the metadata for ``File``\ s owned by the requesting + project. + + Returns: + Callable[[~.ListFilesRequest], + ~.ListFilesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_files" not in self._stubs: + self._stubs["list_files"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/ListFiles", + request_serializer=file_service.ListFilesRequest.serialize, + response_deserializer=file_service.ListFilesResponse.deserialize, + ) + return self._stubs["list_files"] + + @property + def get_file(self) -> Callable[[file_service.GetFileRequest], file.File]: + r"""Return a callable for the get file method over gRPC. + + Gets the metadata for the given ``File``. + + Returns: + Callable[[~.GetFileRequest], + ~.File]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_file" not in self._stubs: + self._stubs["get_file"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/GetFile", + request_serializer=file_service.GetFileRequest.serialize, + response_deserializer=file.File.deserialize, + ) + return self._stubs["get_file"] + + @property + def delete_file( + self, + ) -> Callable[[file_service.DeleteFileRequest], empty_pb2.Empty]: + r"""Return a callable for the delete file method over gRPC. + + Deletes the ``File``. + + Returns: + Callable[[~.DeleteFileRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_file" not in self._stubs: + self._stubs["delete_file"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/DeleteFile", + request_serializer=file_service.DeleteFileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_file"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("FileServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..c1d9aa30ce58 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/grpc_asyncio.py @@ -0,0 +1,523 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import file, file_service + +from .base import DEFAULT_CLIENT_INFO, FileServiceTransport +from .grpc import FileServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class FileServiceGrpcAsyncIOTransport(FileServiceTransport): + """gRPC AsyncIO backend transport for FileService. + + An API for uploading and managing files. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_file( + self, + ) -> Callable[ + [file_service.CreateFileRequest], Awaitable[file_service.CreateFileResponse] + ]: + r"""Return a callable for the create file method over gRPC. + + Creates a ``File``. + + Returns: + Callable[[~.CreateFileRequest], + Awaitable[~.CreateFileResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_file" not in self._stubs: + self._stubs["create_file"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/CreateFile", + request_serializer=file_service.CreateFileRequest.serialize, + response_deserializer=file_service.CreateFileResponse.deserialize, + ) + return self._stubs["create_file"] + + @property + def list_files( + self, + ) -> Callable[ + [file_service.ListFilesRequest], Awaitable[file_service.ListFilesResponse] + ]: + r"""Return a callable for the list files method over gRPC. + + Lists the metadata for ``File``\ s owned by the requesting + project. + + Returns: + Callable[[~.ListFilesRequest], + Awaitable[~.ListFilesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_files" not in self._stubs: + self._stubs["list_files"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/ListFiles", + request_serializer=file_service.ListFilesRequest.serialize, + response_deserializer=file_service.ListFilesResponse.deserialize, + ) + return self._stubs["list_files"] + + @property + def get_file(self) -> Callable[[file_service.GetFileRequest], Awaitable[file.File]]: + r"""Return a callable for the get file method over gRPC. + + Gets the metadata for the given ``File``. + + Returns: + Callable[[~.GetFileRequest], + Awaitable[~.File]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_file" not in self._stubs: + self._stubs["get_file"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/GetFile", + request_serializer=file_service.GetFileRequest.serialize, + response_deserializer=file.File.deserialize, + ) + return self._stubs["get_file"] + + @property + def delete_file( + self, + ) -> Callable[[file_service.DeleteFileRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete file method over gRPC. + + Deletes the ``File``. + + Returns: + Callable[[~.DeleteFileRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_file" not in self._stubs: + self._stubs["delete_file"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.FileService/DeleteFile", + request_serializer=file_service.DeleteFileRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_file"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_file: self._wrap_method( + self.create_file, + default_timeout=None, + client_info=client_info, + ), + self.list_files: self._wrap_method( + self.list_files, + default_timeout=None, + client_info=client_info, + ), + self.get_file: self._wrap_method( + self.get_file, + default_timeout=None, + client_info=client_info, + ), + self.delete_file: self._wrap_method( + self.delete_file, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("FileServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/rest.py new file mode 100644 index 000000000000..62059fbfb08a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/rest.py @@ -0,0 +1,1272 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import file, file_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseFileServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class FileServiceRestInterceptor: + """Interceptor for FileService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FileServiceRestTransport. + + .. code-block:: python + class MyCustomFileServiceInterceptor(FileServiceRestInterceptor): + def pre_create_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_file(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_file(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_file(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_files(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_files(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FileServiceRestTransport(interceptor=MyCustomFileServiceInterceptor()) + client = FileServiceClient(transport=transport) + + + """ + + def pre_create_file( + self, + request: file_service.CreateFileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file_service.CreateFileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileService server. + """ + return request, metadata + + def post_create_file( + self, response: file_service.CreateFileResponse + ) -> file_service.CreateFileResponse: + """Post-rpc interceptor for create_file + + DEPRECATED. Please use the `post_create_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FileService server but before + it is returned to user code. This `post_create_file` interceptor runs + before the `post_create_file_with_metadata` interceptor. + """ + return response + + def post_create_file_with_metadata( + self, + response: file_service.CreateFileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + file_service.CreateFileResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FileService server but before it is returned to user code. + + We recommend only using this `post_create_file_with_metadata` + interceptor in new development instead of the `post_create_file` interceptor. + When both interceptors are used, this `post_create_file_with_metadata` interceptor runs after the + `post_create_file` interceptor. The (possibly modified) response returned by + `post_create_file` will be passed to + `post_create_file_with_metadata`. + """ + return response, metadata + + def pre_delete_file( + self, + request: file_service.DeleteFileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file_service.DeleteFileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileService server. + """ + return request, metadata + + def pre_get_file( + self, + request: file_service.GetFileRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file_service.GetFileRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_file + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileService server. + """ + return request, metadata + + def post_get_file(self, response: file.File) -> file.File: + """Post-rpc interceptor for get_file + + DEPRECATED. Please use the `post_get_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FileService server but before + it is returned to user code. This `post_get_file` interceptor runs + before the `post_get_file_with_metadata` interceptor. + """ + return response + + def post_get_file_with_metadata( + self, response: file.File, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[file.File, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FileService server but before it is returned to user code. + + We recommend only using this `post_get_file_with_metadata` + interceptor in new development instead of the `post_get_file` interceptor. + When both interceptors are used, this `post_get_file_with_metadata` interceptor runs after the + `post_get_file` interceptor. The (possibly modified) response returned by + `post_get_file` will be passed to + `post_get_file_with_metadata`. + """ + return response, metadata + + def pre_list_files( + self, + request: file_service.ListFilesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file_service.ListFilesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_files + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileService server. + """ + return request, metadata + + def post_list_files( + self, response: file_service.ListFilesResponse + ) -> file_service.ListFilesResponse: + """Post-rpc interceptor for list_files + + DEPRECATED. Please use the `post_list_files_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the FileService server but before + it is returned to user code. This `post_list_files` interceptor runs + before the `post_list_files_with_metadata` interceptor. + """ + return response + + def post_list_files_with_metadata( + self, + response: file_service.ListFilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file_service.ListFilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_files + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FileService server but before it is returned to user code. + + We recommend only using this `post_list_files_with_metadata` + interceptor in new development instead of the `post_list_files` interceptor. + When both interceptors are used, this `post_list_files_with_metadata` interceptor runs after the + `post_list_files` interceptor. The (possibly modified) response returned by + `post_list_files` will be passed to + `post_list_files_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the FileService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the FileService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the FileService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FileServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FileServiceRestInterceptor + + +class FileServiceRestTransport(_BaseFileServiceRestTransport): + """REST backend synchronous transport for FileService. + + An API for uploading and managing files. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[FileServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FileServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateFile( + _BaseFileServiceRestTransport._BaseCreateFile, FileServiceRestStub + ): + def __hash__(self): + return hash("FileServiceRestTransport.CreateFile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: file_service.CreateFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file_service.CreateFileResponse: + r"""Call the create file method over HTTP. + + Args: + request (~.file_service.CreateFileRequest): + The request object. Request for ``CreateFile``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.file_service.CreateFileResponse: + Response for ``CreateFile``. + """ + + http_options = ( + _BaseFileServiceRestTransport._BaseCreateFile._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_file(request, metadata) + transcoded_request = ( + _BaseFileServiceRestTransport._BaseCreateFile._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseFileServiceRestTransport._BaseCreateFile._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseFileServiceRestTransport._BaseCreateFile._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.FileServiceClient.CreateFile", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "CreateFile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FileServiceRestTransport._CreateFile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = file_service.CreateFileResponse() + pb_resp = file_service.CreateFileResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_file_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = file_service.CreateFileResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.FileServiceClient.create_file", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "CreateFile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteFile( + _BaseFileServiceRestTransport._BaseDeleteFile, FileServiceRestStub + ): + def __hash__(self): + return hash("FileServiceRestTransport.DeleteFile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: file_service.DeleteFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete file method over HTTP. + + Args: + request (~.file_service.DeleteFileRequest): + The request object. Request for ``DeleteFile``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseFileServiceRestTransport._BaseDeleteFile._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_file(request, metadata) + transcoded_request = ( + _BaseFileServiceRestTransport._BaseDeleteFile._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseFileServiceRestTransport._BaseDeleteFile._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.FileServiceClient.DeleteFile", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "DeleteFile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FileServiceRestTransport._DeleteFile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetFile(_BaseFileServiceRestTransport._BaseGetFile, FileServiceRestStub): + def __hash__(self): + return hash("FileServiceRestTransport.GetFile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: file_service.GetFileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file.File: + r"""Call the get file method over HTTP. + + Args: + request (~.file_service.GetFileRequest): + The request object. Request for ``GetFile``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.file.File: + A file uploaded to the API. + Next ID: 15 + + """ + + http_options = ( + _BaseFileServiceRestTransport._BaseGetFile._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_file(request, metadata) + transcoded_request = ( + _BaseFileServiceRestTransport._BaseGetFile._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseFileServiceRestTransport._BaseGetFile._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.FileServiceClient.GetFile", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "GetFile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FileServiceRestTransport._GetFile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = file.File() + pb_resp = file.File.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_file_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = file.File.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.FileServiceClient.get_file", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "GetFile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListFiles(_BaseFileServiceRestTransport._BaseListFiles, FileServiceRestStub): + def __hash__(self): + return hash("FileServiceRestTransport.ListFiles") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: file_service.ListFilesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> file_service.ListFilesResponse: + r"""Call the list files method over HTTP. + + Args: + request (~.file_service.ListFilesRequest): + The request object. Request for ``ListFiles``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.file_service.ListFilesResponse: + Response for ``ListFiles``. + """ + + http_options = ( + _BaseFileServiceRestTransport._BaseListFiles._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_files(request, metadata) + transcoded_request = ( + _BaseFileServiceRestTransport._BaseListFiles._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseFileServiceRestTransport._BaseListFiles._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.FileServiceClient.ListFiles", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "ListFiles", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FileServiceRestTransport._ListFiles._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = file_service.ListFilesResponse() + pb_resp = file_service.ListFilesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_files(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_files_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = file_service.ListFilesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.FileServiceClient.list_files", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "ListFiles", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_file( + self, + ) -> Callable[[file_service.CreateFileRequest], file_service.CreateFileResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_file( + self, + ) -> Callable[[file_service.DeleteFileRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_file(self) -> Callable[[file_service.GetFileRequest], file.File]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFile(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_files( + self, + ) -> Callable[[file_service.ListFilesRequest], file_service.ListFilesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFiles(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseFileServiceRestTransport._BaseGetOperation, FileServiceRestStub + ): + def __hash__(self): + return hash("FileServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseFileServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = ( + _BaseFileServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseFileServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.FileServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FileServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.FileServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseFileServiceRestTransport._BaseListOperations, FileServiceRestStub + ): + def __hash__(self): + return hash("FileServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseFileServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseFileServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseFileServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.FileServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = FileServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.FileServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.FileService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("FileServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/rest_base.py new file mode 100644 index 000000000000..0d97db6b8db0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/file_service/transports/rest_base.py @@ -0,0 +1,323 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import file, file_service + +from .base import DEFAULT_CLIENT_INFO, FileServiceTransport + + +class _BaseFileServiceRestTransport(FileServiceTransport): + """Base REST backend transport for FileService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateFile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/files", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = file_service.CreateFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteFile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=files/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = file_service.DeleteFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFileServiceRestTransport._BaseDeleteFile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetFile: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=files/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = file_service.GetFileRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseFileServiceRestTransport._BaseGetFile._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFiles: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/files", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = file_service.ListFilesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseFileServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/__init__.py new file mode 100644 index 000000000000..221b8e46bdaa --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import GenerativeServiceAsyncClient +from .client import GenerativeServiceClient + +__all__ = ( + "GenerativeServiceClient", + "GenerativeServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/async_client.py new file mode 100644 index 000000000000..863c225af329 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/async_client.py @@ -0,0 +1,1366 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import generative_service, safety +from google.ai.generativelanguage_v1alpha.types import content +from google.ai.generativelanguage_v1alpha.types import content as gag_content + +from .client import GenerativeServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class GenerativeServiceAsyncClient: + """API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + """ + + _client: GenerativeServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = GenerativeServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = GenerativeServiceClient._DEFAULT_UNIVERSE + + cached_content_path = staticmethod(GenerativeServiceClient.cached_content_path) + parse_cached_content_path = staticmethod( + GenerativeServiceClient.parse_cached_content_path + ) + model_path = staticmethod(GenerativeServiceClient.model_path) + parse_model_path = staticmethod(GenerativeServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + GenerativeServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + GenerativeServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(GenerativeServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + GenerativeServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + GenerativeServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + GenerativeServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(GenerativeServiceClient.common_project_path) + parse_common_project_path = staticmethod( + GenerativeServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(GenerativeServiceClient.common_location_path) + parse_common_location_path = staticmethod( + GenerativeServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceAsyncClient: The constructed client. + """ + return GenerativeServiceClient.from_service_account_info.__func__(GenerativeServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceAsyncClient: The constructed client. + """ + return GenerativeServiceClient.from_service_account_file.__func__(GenerativeServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return GenerativeServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> GenerativeServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GenerativeServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = GenerativeServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + GenerativeServiceTransport, + Callable[..., GenerativeServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the generative service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GenerativeServiceTransport,Callable[..., GenerativeServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GenerativeServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = GenerativeServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "credentialsType": None, + }, + ) + + async def generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = await client.generate_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GenerateContentRequest, dict]]): + The request object. Request to generate a completion from + the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]`): + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateContentResponse: + Response from the model supporting multiple candidate + responses. + + Safety ratings and content filtering are reported for + both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_answer( + self, + request: Optional[Union[generative_service.GenerateAnswerRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + safety_settings: Optional[MutableSequence[safety.SafetySetting]] = None, + answer_style: Optional[ + generative_service.GenerateAnswerRequest.AnswerStyle + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.GenerateAnswerResponse: + r"""Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_generate_answer(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = await client.generate_answer(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest, dict]]): + The request object. Request to generate a grounded answer from the + ``Model``. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the grounded response. + + Format: ``model=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]`): + Required. The content of the current conversation with + the ``Model``. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and + the last ``Content`` in the list containing the + question. + + Note: ``GenerateAnswer`` only supports queries in + English. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + safety_settings (:class:`MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]`): + Optional. A list of unique ``SafetySetting`` instances + for blocking unsafe content. + + This will be enforced on the + ``GenerateAnswerRequest.contents`` and + ``GenerateAnswerResponse.candidate``. There should not + be more than one setting for each ``SafetyCategory`` + type. The API will block any contents and responses that + fail to meet the thresholds set by these settings. This + list overrides the default settings for each + ``SafetyCategory`` specified in the safety_settings. If + there is no ``SafetySetting`` for a given + ``SafetyCategory`` provided in the list, the API will + use the default safety setting for that category. Harm + categories HARM_CATEGORY_HATE_SPEECH, + HARM_CATEGORY_SEXUALLY_EXPLICIT, + HARM_CATEGORY_DANGEROUS_CONTENT, + HARM_CATEGORY_HARASSMENT are supported. Refer to the + `guide `__ + for detailed information on available safety settings. + Also refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in + your AI applications. + + This corresponds to the ``safety_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + answer_style (:class:`google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest.AnswerStyle`): + Required. Style in which answers + should be returned. + + This corresponds to the ``answer_style`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateAnswerResponse: + Response from the model for a + grounded answer. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents, safety_settings, answer_style]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.GenerateAnswerRequest): + request = generative_service.GenerateAnswerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if answer_style is not None: + request.answer_style = answer_style + if contents: + request.contents.extend(contents) + if safety_settings: + request.safety_settings.extend(safety_settings) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_answer + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[generative_service.GenerateContentResponse]]: + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = await client.stream_generate_content(request=request) + + # Handle the response + async for response in stream: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GenerateContentRequest, dict]]): + The request object. Request to generate a completion from + the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]`): + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.ai.generativelanguage_v1alpha.types.GenerateContentResponse]: + Response from the model supporting multiple candidate + responses. + + Safety ratings and content filtering are reported for + both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.stream_generate_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def embed_content( + self, + request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None, + *, + model: Optional[str] = None, + content: Optional[gag_content.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_embed_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_content(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest, dict]]): + The request object. Request containing the ``Content`` for the model to + embed. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`google.ai.generativelanguage_v1alpha.types.Content`): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.EmbedContentResponse: + The response to an EmbedContentRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.EmbedContentRequest): + request = generative_service.EmbedContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.embed_content + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_embed_contents( + self, + request: Optional[ + Union[generative_service.BatchEmbedContentsRequest, dict] + ] = None, + *, + model: Optional[str] = None, + requests: Optional[ + MutableSequence[generative_service.EmbedContentRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1alpha.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = await client.batch_embed_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsRequest, dict]]): + The request object. Batch request to get embeddings from + the model for a list of prompts. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest]`): + Required. Embed requests for the batch. The model in + each of these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsResponse: + The response to a BatchEmbedContentsRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.BatchEmbedContentsRequest): + request = generative_service.BatchEmbedContentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_embed_contents + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_tokens( + self, + request: Optional[Union[generative_service.CountTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.CountTokensResponse: + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_count_tokens(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = await client.count_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CountTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (:class:`MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]`): + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is + set. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CountTokensResponse: + A response from CountTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.CountTokensRequest): + request = generative_service.CountTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents: + request.contents.extend(contents) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.count_tokens + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bidi_generate_content( + self, + requests: Optional[ + AsyncIterator[generative_service.BidiGenerateContentClientMessage] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[generative_service.BidiGenerateContentServerMessage]]: + r"""Low-Latency bidirectional streaming API that supports + audio and video streaming inputs can produce multimodal + output streams (audio and text). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_bidi_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + setup = generativelanguage_v1alpha.BidiGenerateContentSetup() + setup.model = "model_value" + + request = generativelanguage_v1alpha.BidiGenerateContentClientMessage( + setup=setup, + ) + + # This method expects an iterator which contains + # 'generativelanguage_v1alpha.BidiGenerateContentClientMessage' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.bidi_generate_content(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.ai.generativelanguage_v1alpha.types.BidiGenerateContentClientMessage`]): + The request object AsyncIterator. Messages sent by the client in the + BidiGenerateContent call. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentServerMessage]: + Response message for the + BidiGenerateContent call. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.bidi_generate_content + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "GenerativeServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GenerativeServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/client.py new file mode 100644 index 000000000000..60be25f450c8 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/client.py @@ -0,0 +1,1788 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Iterable, + Iterator, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import generative_service, safety +from google.ai.generativelanguage_v1alpha.types import content +from google.ai.generativelanguage_v1alpha.types import content as gag_content + +from .transports.base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .transports.grpc import GenerativeServiceGrpcTransport +from .transports.grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport +from .transports.rest import GenerativeServiceRestTransport + + +class GenerativeServiceClientMeta(type): + """Metaclass for the GenerativeService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[GenerativeServiceTransport]] + _transport_registry["grpc"] = GenerativeServiceGrpcTransport + _transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport + _transport_registry["rest"] = GenerativeServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[GenerativeServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class GenerativeServiceClient(metaclass=GenerativeServiceClientMeta): + """API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + GenerativeServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> GenerativeServiceTransport: + """Returns the transport used by the client instance. + + Returns: + GenerativeServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cached_content_path( + id: str, + ) -> str: + """Returns a fully-qualified cached_content string.""" + return "cachedContents/{id}".format( + id=id, + ) + + @staticmethod + def parse_cached_content_path(path: str) -> Dict[str, str]: + """Parses a cached_content path into its component segments.""" + m = re.match(r"^cachedContents/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = GenerativeServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = GenerativeServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + GenerativeServiceTransport, + Callable[..., GenerativeServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the generative service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,GenerativeServiceTransport,Callable[..., GenerativeServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GenerativeServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = GenerativeServiceClient._read_environment_variables() + self._client_cert_source = GenerativeServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = GenerativeServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, GenerativeServiceTransport) + if transport_provided: + # transport is a GenerativeServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(GenerativeServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or GenerativeServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[GenerativeServiceTransport], + Callable[..., GenerativeServiceTransport], + ] = ( + GenerativeServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GenerativeServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.GenerativeServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "credentialsType": None, + }, + ) + + def generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = client.generate_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GenerateContentRequest, dict]): + The request object. Request to generate a completion from + the model. + model (str): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateContentResponse: + Response from the model supporting multiple candidate + responses. + + Safety ratings and content filtering are reported for + both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_answer( + self, + request: Optional[Union[generative_service.GenerateAnswerRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + safety_settings: Optional[MutableSequence[safety.SafetySetting]] = None, + answer_style: Optional[ + generative_service.GenerateAnswerRequest.AnswerStyle + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.GenerateAnswerResponse: + r"""Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_generate_answer(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = client.generate_answer(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest, dict]): + The request object. Request to generate a grounded answer from the + ``Model``. + model (str): + Required. The name of the ``Model`` to use for + generating the grounded response. + + Format: ``model=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Required. The content of the current conversation with + the ``Model``. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and + the last ``Content`` in the list containing the + question. + + Note: ``GenerateAnswer`` only supports queries in + English. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + safety_settings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances + for blocking unsafe content. + + This will be enforced on the + ``GenerateAnswerRequest.contents`` and + ``GenerateAnswerResponse.candidate``. There should not + be more than one setting for each ``SafetyCategory`` + type. The API will block any contents and responses that + fail to meet the thresholds set by these settings. This + list overrides the default settings for each + ``SafetyCategory`` specified in the safety_settings. If + there is no ``SafetySetting`` for a given + ``SafetyCategory`` provided in the list, the API will + use the default safety setting for that category. Harm + categories HARM_CATEGORY_HATE_SPEECH, + HARM_CATEGORY_SEXUALLY_EXPLICIT, + HARM_CATEGORY_DANGEROUS_CONTENT, + HARM_CATEGORY_HARASSMENT are supported. Refer to the + `guide `__ + for detailed information on available safety settings. + Also refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in + your AI applications. + + This corresponds to the ``safety_settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + answer_style (google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest.AnswerStyle): + Required. Style in which answers + should be returned. + + This corresponds to the ``answer_style`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateAnswerResponse: + Response from the model for a + grounded answer. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents, safety_settings, answer_style]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.GenerateAnswerRequest): + request = generative_service.GenerateAnswerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + if safety_settings is not None: + request.safety_settings = safety_settings + if answer_style is not None: + request.answer_style = answer_style + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_answer] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_generate_content( + self, + request: Optional[ + Union[generative_service.GenerateContentRequest, dict] + ] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[generative_service.GenerateContentResponse]: + r"""Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = client.stream_generate_content(request=request) + + # Handle the response + for response in stream: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GenerateContentRequest, dict]): + The request object. Request to generate a completion from + the model. + model (str): + Required. The name of the ``Model`` to use for + generating the completion. + + Format: ``models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Required. The content of the current conversation with + the model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.ai.generativelanguage_v1alpha.types.GenerateContentResponse]: + Response from the model supporting multiple candidate + responses. + + Safety ratings and content filtering are reported for + both prompt in + GenerateContentResponse.prompt_feedback and for each + candidate in finish_reason and in safety_ratings. The + API: - Returns either all requested candidates or + none of them - Returns no candidates at all only if + there was something wrong with the prompt (check + prompt_feedback) - Reports feedback on each candidate + in finish_reason and safety_ratings. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.GenerateContentRequest): + request = generative_service.GenerateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stream_generate_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def embed_content( + self, + request: Optional[Union[generative_service.EmbedContentRequest, dict]] = None, + *, + model: Optional[str] = None, + content: Optional[gag_content.Content] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_embed_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = client.embed_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest, dict]): + The request object. Request containing the ``Content`` for the model to + embed. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (google.ai.generativelanguage_v1alpha.types.Content): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.EmbedContentResponse: + The response to an EmbedContentRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.EmbedContentRequest): + request = generative_service.EmbedContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.embed_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_embed_contents( + self, + request: Optional[ + Union[generative_service.BatchEmbedContentsRequest, dict] + ] = None, + *, + model: Optional[str] = None, + requests: Optional[ + MutableSequence[generative_service.EmbedContentRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1alpha.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = client.batch_embed_contents(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsRequest, dict]): + The request object. Batch request to get embeddings from + the model for a list of prompts. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest]): + Required. Embed requests for the batch. The model in + each of these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsResponse: + The response to a BatchEmbedContentsRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, requests]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.BatchEmbedContentsRequest): + request = generative_service.BatchEmbedContentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_embed_contents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_tokens( + self, + request: Optional[Union[generative_service.CountTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + contents: Optional[MutableSequence[content.Content]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.CountTokensResponse: + r"""Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_count_tokens(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = client.count_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CountTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is + set. + + This corresponds to the ``contents`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CountTokensResponse: + A response from CountTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, contents]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, generative_service.CountTokensRequest): + request = generative_service.CountTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if contents is not None: + request.contents = contents + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bidi_generate_content( + self, + requests: Optional[ + Iterator[generative_service.BidiGenerateContentClientMessage] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[generative_service.BidiGenerateContentServerMessage]: + r"""Low-Latency bidirectional streaming API that supports + audio and video streaming inputs can produce multimodal + output streams (audio and text). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_bidi_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + setup = generativelanguage_v1alpha.BidiGenerateContentSetup() + setup.model = "model_value" + + request = generativelanguage_v1alpha.BidiGenerateContentClientMessage( + setup=setup, + ) + + # This method expects an iterator which contains + # 'generativelanguage_v1alpha.BidiGenerateContentClientMessage' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.bidi_generate_content(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentClientMessage]): + The request object iterator. Messages sent by the client in the + BidiGenerateContent call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentServerMessage]: + Response message for the + BidiGenerateContent call. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bidi_generate_content] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "GenerativeServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("GenerativeServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/README.rst new file mode 100644 index 000000000000..b8180cab1e5c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`GenerativeServiceTransport` is the ABC for all transports. +- public child `GenerativeServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `GenerativeServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseGenerativeServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `GenerativeServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/__init__.py new file mode 100644 index 000000000000..1321d30dc967 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import GenerativeServiceTransport +from .grpc import GenerativeServiceGrpcTransport +from .grpc_asyncio import GenerativeServiceGrpcAsyncIOTransport +from .rest import GenerativeServiceRestInterceptor, GenerativeServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[GenerativeServiceTransport]] +_transport_registry["grpc"] = GenerativeServiceGrpcTransport +_transport_registry["grpc_asyncio"] = GenerativeServiceGrpcAsyncIOTransport +_transport_registry["rest"] = GenerativeServiceRestTransport + +__all__ = ( + "GenerativeServiceTransport", + "GenerativeServiceGrpcTransport", + "GenerativeServiceGrpcAsyncIOTransport", + "GenerativeServiceRestTransport", + "GenerativeServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/base.py new file mode 100644 index 000000000000..f35e8e7c53df --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/base.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import generative_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class GenerativeServiceTransport(abc.ABC): + """Abstract transport class for GenerativeService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_content: gapic_v1.method.wrap_method( + self.generate_content, + default_timeout=None, + client_info=client_info, + ), + self.generate_answer: gapic_v1.method.wrap_method( + self.generate_answer, + default_timeout=None, + client_info=client_info, + ), + self.stream_generate_content: gapic_v1.method.wrap_method( + self.stream_generate_content, + default_timeout=None, + client_info=client_info, + ), + self.embed_content: gapic_v1.method.wrap_method( + self.embed_content, + default_timeout=None, + client_info=client_info, + ), + self.batch_embed_contents: gapic_v1.method.wrap_method( + self.batch_embed_contents, + default_timeout=None, + client_info=client_info, + ), + self.count_tokens: gapic_v1.method.wrap_method( + self.count_tokens, + default_timeout=None, + client_info=client_info, + ), + self.bidi_generate_content: gapic_v1.method.wrap_method( + self.bidi_generate_content, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Union[ + generative_service.GenerateContentResponse, + Awaitable[generative_service.GenerateContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + Union[ + generative_service.GenerateAnswerResponse, + Awaitable[generative_service.GenerateAnswerResponse], + ], + ]: + raise NotImplementedError() + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Union[ + generative_service.GenerateContentResponse, + Awaitable[generative_service.GenerateContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + Union[ + generative_service.EmbedContentResponse, + Awaitable[generative_service.EmbedContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + Union[ + generative_service.BatchEmbedContentsResponse, + Awaitable[generative_service.BatchEmbedContentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], + Union[ + generative_service.CountTokensResponse, + Awaitable[generative_service.CountTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def bidi_generate_content( + self, + ) -> Callable[ + [generative_service.BidiGenerateContentClientMessage], + Union[ + generative_service.BidiGenerateContentServerMessage, + Awaitable[generative_service.BidiGenerateContentServerMessage], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("GenerativeServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/grpc.py new file mode 100644 index 000000000000..e17edf1c6a1d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/grpc.py @@ -0,0 +1,591 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class GenerativeServiceGrpcTransport(GenerativeServiceTransport): + """gRPC backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + r"""Return a callable for the generate content method over gRPC. + + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. + + Returns: + Callable[[~.GenerateContentRequest], + ~.GenerateContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_content" not in self._stubs: + self._stubs["generate_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/GenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["generate_content"] + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + generative_service.GenerateAnswerResponse, + ]: + r"""Return a callable for the generate answer method over gRPC. + + Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + Returns: + Callable[[~.GenerateAnswerRequest], + ~.GenerateAnswerResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_answer" not in self._stubs: + self._stubs["generate_answer"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/GenerateAnswer", + request_serializer=generative_service.GenerateAnswerRequest.serialize, + response_deserializer=generative_service.GenerateAnswerResponse.deserialize, + ) + return self._stubs["generate_answer"] + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + r"""Return a callable for the stream generate content method over gRPC. + + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + ~.GenerateContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_generate_content" not in self._stubs: + self._stubs["stream_generate_content"] = self._logged_channel.unary_stream( + "/google.ai.generativelanguage.v1alpha.GenerativeService/StreamGenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["stream_generate_content"] + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + generative_service.EmbedContentResponse, + ]: + r"""Return a callable for the embed content method over gRPC. + + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. + + Returns: + Callable[[~.EmbedContentRequest], + ~.EmbedContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_content" not in self._stubs: + self._stubs["embed_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/EmbedContent", + request_serializer=generative_service.EmbedContentRequest.serialize, + response_deserializer=generative_service.EmbedContentResponse.deserialize, + ) + return self._stubs["embed_content"] + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + generative_service.BatchEmbedContentsResponse, + ]: + r"""Return a callable for the batch embed contents method over gRPC. + + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. + + Returns: + Callable[[~.BatchEmbedContentsRequest], + ~.BatchEmbedContentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_contents" not in self._stubs: + self._stubs["batch_embed_contents"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/BatchEmbedContents", + request_serializer=generative_service.BatchEmbedContentsRequest.serialize, + response_deserializer=generative_service.BatchEmbedContentsResponse.deserialize, + ) + return self._stubs["batch_embed_contents"] + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], generative_service.CountTokensResponse + ]: + r"""Return a callable for the count tokens method over gRPC. + + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. + + Returns: + Callable[[~.CountTokensRequest], + ~.CountTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_tokens" not in self._stubs: + self._stubs["count_tokens"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/CountTokens", + request_serializer=generative_service.CountTokensRequest.serialize, + response_deserializer=generative_service.CountTokensResponse.deserialize, + ) + return self._stubs["count_tokens"] + + @property + def bidi_generate_content( + self, + ) -> Callable[ + [generative_service.BidiGenerateContentClientMessage], + generative_service.BidiGenerateContentServerMessage, + ]: + r"""Return a callable for the bidi generate content method over gRPC. + + Low-Latency bidirectional streaming API that supports + audio and video streaming inputs can produce multimodal + output streams (audio and text). + + Returns: + Callable[[~.BidiGenerateContentClientMessage], + ~.BidiGenerateContentServerMessage]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bidi_generate_content" not in self._stubs: + self._stubs["bidi_generate_content"] = self._logged_channel.stream_stream( + "/google.ai.generativelanguage.v1alpha.GenerativeService/BidiGenerateContent", + request_serializer=generative_service.BidiGenerateContentClientMessage.serialize, + response_deserializer=generative_service.BidiGenerateContentServerMessage.deserialize, + ) + return self._stubs["bidi_generate_content"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("GenerativeServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..584719dd4572 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/grpc_asyncio.py @@ -0,0 +1,654 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport +from .grpc import GenerativeServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class GenerativeServiceGrpcAsyncIOTransport(GenerativeServiceTransport): + """gRPC AsyncIO backend transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Awaitable[generative_service.GenerateContentResponse], + ]: + r"""Return a callable for the generate content method over gRPC. + + Generates a model response given an input + ``GenerateContentRequest``. Refer to the `text generation + guide `__ + for detailed usage information. Input capabilities differ + between models, including tuned models. Refer to the `model + guide `__ + and `tuning + guide `__ + for details. + + Returns: + Callable[[~.GenerateContentRequest], + Awaitable[~.GenerateContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_content" not in self._stubs: + self._stubs["generate_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/GenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["generate_content"] + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + Awaitable[generative_service.GenerateAnswerResponse], + ]: + r"""Return a callable for the generate answer method over gRPC. + + Generates a grounded answer from the model given an input + ``GenerateAnswerRequest``. + + Returns: + Callable[[~.GenerateAnswerRequest], + Awaitable[~.GenerateAnswerResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_answer" not in self._stubs: + self._stubs["generate_answer"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/GenerateAnswer", + request_serializer=generative_service.GenerateAnswerRequest.serialize, + response_deserializer=generative_service.GenerateAnswerResponse.deserialize, + ) + return self._stubs["generate_answer"] + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + Awaitable[generative_service.GenerateContentResponse], + ]: + r"""Return a callable for the stream generate content method over gRPC. + + Generates a `streamed + response `__ + from the model given an input ``GenerateContentRequest``. + + Returns: + Callable[[~.GenerateContentRequest], + Awaitable[~.GenerateContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_generate_content" not in self._stubs: + self._stubs["stream_generate_content"] = self._logged_channel.unary_stream( + "/google.ai.generativelanguage.v1alpha.GenerativeService/StreamGenerateContent", + request_serializer=generative_service.GenerateContentRequest.serialize, + response_deserializer=generative_service.GenerateContentResponse.deserialize, + ) + return self._stubs["stream_generate_content"] + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + Awaitable[generative_service.EmbedContentResponse], + ]: + r"""Return a callable for the embed content method over gRPC. + + Generates a text embedding vector from the input ``Content`` + using the specified `Gemini Embedding + model `__. + + Returns: + Callable[[~.EmbedContentRequest], + Awaitable[~.EmbedContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_content" not in self._stubs: + self._stubs["embed_content"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/EmbedContent", + request_serializer=generative_service.EmbedContentRequest.serialize, + response_deserializer=generative_service.EmbedContentResponse.deserialize, + ) + return self._stubs["embed_content"] + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + Awaitable[generative_service.BatchEmbedContentsResponse], + ]: + r"""Return a callable for the batch embed contents method over gRPC. + + Generates multiple embedding vectors from the input ``Content`` + which consists of a batch of strings represented as + ``EmbedContentRequest`` objects. + + Returns: + Callable[[~.BatchEmbedContentsRequest], + Awaitable[~.BatchEmbedContentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_contents" not in self._stubs: + self._stubs["batch_embed_contents"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/BatchEmbedContents", + request_serializer=generative_service.BatchEmbedContentsRequest.serialize, + response_deserializer=generative_service.BatchEmbedContentsResponse.deserialize, + ) + return self._stubs["batch_embed_contents"] + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], + Awaitable[generative_service.CountTokensResponse], + ]: + r"""Return a callable for the count tokens method over gRPC. + + Runs a model's tokenizer on input ``Content`` and returns the + token count. Refer to the `tokens + guide `__ to learn + more about tokens. + + Returns: + Callable[[~.CountTokensRequest], + Awaitable[~.CountTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_tokens" not in self._stubs: + self._stubs["count_tokens"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.GenerativeService/CountTokens", + request_serializer=generative_service.CountTokensRequest.serialize, + response_deserializer=generative_service.CountTokensResponse.deserialize, + ) + return self._stubs["count_tokens"] + + @property + def bidi_generate_content( + self, + ) -> Callable[ + [generative_service.BidiGenerateContentClientMessage], + Awaitable[generative_service.BidiGenerateContentServerMessage], + ]: + r"""Return a callable for the bidi generate content method over gRPC. + + Low-Latency bidirectional streaming API that supports + audio and video streaming inputs can produce multimodal + output streams (audio and text). + + Returns: + Callable[[~.BidiGenerateContentClientMessage], + Awaitable[~.BidiGenerateContentServerMessage]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bidi_generate_content" not in self._stubs: + self._stubs["bidi_generate_content"] = self._logged_channel.stream_stream( + "/google.ai.generativelanguage.v1alpha.GenerativeService/BidiGenerateContent", + request_serializer=generative_service.BidiGenerateContentClientMessage.serialize, + response_deserializer=generative_service.BidiGenerateContentServerMessage.deserialize, + ) + return self._stubs["bidi_generate_content"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_content: self._wrap_method( + self.generate_content, + default_timeout=None, + client_info=client_info, + ), + self.generate_answer: self._wrap_method( + self.generate_answer, + default_timeout=None, + client_info=client_info, + ), + self.stream_generate_content: self._wrap_method( + self.stream_generate_content, + default_timeout=None, + client_info=client_info, + ), + self.embed_content: self._wrap_method( + self.embed_content, + default_timeout=None, + client_info=client_info, + ), + self.batch_embed_contents: self._wrap_method( + self.batch_embed_contents, + default_timeout=None, + client_info=client_info, + ), + self.count_tokens: self._wrap_method( + self.count_tokens, + default_timeout=None, + client_info=client_info, + ), + self.bidi_generate_content: self._wrap_method( + self.bidi_generate_content, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("GenerativeServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/rest.py new file mode 100644 index 000000000000..97a44bb5c61c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/rest.py @@ -0,0 +1,1903 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import generative_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseGenerativeServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class GenerativeServiceRestInterceptor: + """Interceptor for GenerativeService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the GenerativeServiceRestTransport. + + .. code-block:: python + class MyCustomGenerativeServiceInterceptor(GenerativeServiceRestInterceptor): + def pre_batch_embed_contents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_embed_contents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_count_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_embed_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_embed_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_answer(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_answer(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_content(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_stream_generate_content(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_stream_generate_content(self, response): + logging.log(f"Received response: {response}") + return response + + transport = GenerativeServiceRestTransport(interceptor=MyCustomGenerativeServiceInterceptor()) + client = GenerativeServiceClient(transport=transport) + + + """ + + def pre_batch_embed_contents( + self, + request: generative_service.BatchEmbedContentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.BatchEmbedContentsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for batch_embed_contents + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_batch_embed_contents( + self, response: generative_service.BatchEmbedContentsResponse + ) -> generative_service.BatchEmbedContentsResponse: + """Post-rpc interceptor for batch_embed_contents + + DEPRECATED. Please use the `post_batch_embed_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. This `post_batch_embed_contents` interceptor runs + before the `post_batch_embed_contents_with_metadata` interceptor. + """ + return response + + def post_batch_embed_contents_with_metadata( + self, + response: generative_service.BatchEmbedContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.BatchEmbedContentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_embed_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_batch_embed_contents_with_metadata` + interceptor in new development instead of the `post_batch_embed_contents` interceptor. + When both interceptors are used, this `post_batch_embed_contents_with_metadata` interceptor runs after the + `post_batch_embed_contents` interceptor. The (possibly modified) response returned by + `post_batch_embed_contents` will be passed to + `post_batch_embed_contents_with_metadata`. + """ + return response, metadata + + def pre_count_tokens( + self, + request: generative_service.CountTokensRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.CountTokensRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for count_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_count_tokens( + self, response: generative_service.CountTokensResponse + ) -> generative_service.CountTokensResponse: + """Post-rpc interceptor for count_tokens + + DEPRECATED. Please use the `post_count_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. This `post_count_tokens` interceptor runs + before the `post_count_tokens_with_metadata` interceptor. + """ + return response + + def post_count_tokens_with_metadata( + self, + response: generative_service.CountTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.CountTokensResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for count_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_count_tokens_with_metadata` + interceptor in new development instead of the `post_count_tokens` interceptor. + When both interceptors are used, this `post_count_tokens_with_metadata` interceptor runs after the + `post_count_tokens` interceptor. The (possibly modified) response returned by + `post_count_tokens` will be passed to + `post_count_tokens_with_metadata`. + """ + return response, metadata + + def pre_embed_content( + self, + request: generative_service.EmbedContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.EmbedContentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for embed_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_embed_content( + self, response: generative_service.EmbedContentResponse + ) -> generative_service.EmbedContentResponse: + """Post-rpc interceptor for embed_content + + DEPRECATED. Please use the `post_embed_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. This `post_embed_content` interceptor runs + before the `post_embed_content_with_metadata` interceptor. + """ + return response + + def post_embed_content_with_metadata( + self, + response: generative_service.EmbedContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.EmbedContentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for embed_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_embed_content_with_metadata` + interceptor in new development instead of the `post_embed_content` interceptor. + When both interceptors are used, this `post_embed_content_with_metadata` interceptor runs after the + `post_embed_content` interceptor. The (possibly modified) response returned by + `post_embed_content` will be passed to + `post_embed_content_with_metadata`. + """ + return response, metadata + + def pre_generate_answer( + self, + request: generative_service.GenerateAnswerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateAnswerRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for generate_answer + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_generate_answer( + self, response: generative_service.GenerateAnswerResponse + ) -> generative_service.GenerateAnswerResponse: + """Post-rpc interceptor for generate_answer + + DEPRECATED. Please use the `post_generate_answer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. This `post_generate_answer` interceptor runs + before the `post_generate_answer_with_metadata` interceptor. + """ + return response + + def post_generate_answer_with_metadata( + self, + response: generative_service.GenerateAnswerResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateAnswerResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_answer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_generate_answer_with_metadata` + interceptor in new development instead of the `post_generate_answer` interceptor. + When both interceptors are used, this `post_generate_answer_with_metadata` interceptor runs after the + `post_generate_answer` interceptor. The (possibly modified) response returned by + `post_generate_answer` will be passed to + `post_generate_answer_with_metadata`. + """ + return response, metadata + + def pre_generate_content( + self, + request: generative_service.GenerateContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateContentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for generate_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_generate_content( + self, response: generative_service.GenerateContentResponse + ) -> generative_service.GenerateContentResponse: + """Post-rpc interceptor for generate_content + + DEPRECATED. Please use the `post_generate_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. This `post_generate_content` interceptor runs + before the `post_generate_content_with_metadata` interceptor. + """ + return response + + def post_generate_content_with_metadata( + self, + response: generative_service.GenerateContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateContentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_generate_content_with_metadata` + interceptor in new development instead of the `post_generate_content` interceptor. + When both interceptors are used, this `post_generate_content_with_metadata` interceptor runs after the + `post_generate_content` interceptor. The (possibly modified) response returned by + `post_generate_content` will be passed to + `post_generate_content_with_metadata`. + """ + return response, metadata + + def pre_stream_generate_content( + self, + request: generative_service.GenerateContentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateContentRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for stream_generate_content + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_stream_generate_content( + self, response: rest_streaming.ResponseIterator + ) -> rest_streaming.ResponseIterator: + """Post-rpc interceptor for stream_generate_content + + DEPRECATED. Please use the `post_stream_generate_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. This `post_stream_generate_content` interceptor runs + before the `post_stream_generate_content_with_metadata` interceptor. + """ + return response + + def post_stream_generate_content_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for stream_generate_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_stream_generate_content_with_metadata` + interceptor in new development instead of the `post_stream_generate_content` interceptor. + When both interceptors are used, this `post_stream_generate_content_with_metadata` interceptor runs after the + `post_stream_generate_content` interceptor. The (possibly modified) response returned by + `post_stream_generate_content` will be passed to + `post_stream_generate_content_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the GenerativeService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the GenerativeService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class GenerativeServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: GenerativeServiceRestInterceptor + + +class GenerativeServiceRestTransport(_BaseGenerativeServiceRestTransport): + """REST backend synchronous transport for GenerativeService. + + API for using Large Models that generate multimodal content + and have additional capabilities beyond text generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[GenerativeServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or GenerativeServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchEmbedContents( + _BaseGenerativeServiceRestTransport._BaseBatchEmbedContents, + GenerativeServiceRestStub, + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.BatchEmbedContents") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: generative_service.BatchEmbedContentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.BatchEmbedContentsResponse: + r"""Call the batch embed contents method over HTTP. + + Args: + request (~.generative_service.BatchEmbedContentsRequest): + The request object. Batch request to get embeddings from + the model for a list of prompts. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.generative_service.BatchEmbedContentsResponse: + The response to a ``BatchEmbedContentsRequest``. + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseBatchEmbedContents._get_http_options() + ) + + request, metadata = self._interceptor.pre_batch_embed_contents( + request, metadata + ) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseBatchEmbedContents._get_transcoded_request( + http_options, request + ) + + body = _BaseGenerativeServiceRestTransport._BaseBatchEmbedContents._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseBatchEmbedContents._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.BatchEmbedContents", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "BatchEmbedContents", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._BatchEmbedContents._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.BatchEmbedContentsResponse() + pb_resp = generative_service.BatchEmbedContentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_batch_embed_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_embed_contents_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + generative_service.BatchEmbedContentsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.batch_embed_contents", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "BatchEmbedContents", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BidiGenerateContent( + _BaseGenerativeServiceRestTransport._BaseBidiGenerateContent, + GenerativeServiceRestStub, + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.BidiGenerateContent") + + def __call__( + self, + request: generative_service.BidiGenerateContentClientMessage, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method BidiGenerateContent is not available over REST transport" + ) + + class _CountTokens( + _BaseGenerativeServiceRestTransport._BaseCountTokens, GenerativeServiceRestStub + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.CountTokens") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: generative_service.CountTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.CountTokensResponse: + r"""Call the count tokens method over HTTP. + + Args: + request (~.generative_service.CountTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.generative_service.CountTokensResponse: + A response from ``CountTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseCountTokens._get_http_options() + ) + + request, metadata = self._interceptor.pre_count_tokens(request, metadata) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseCountTokens._get_transcoded_request( + http_options, request + ) + + body = _BaseGenerativeServiceRestTransport._BaseCountTokens._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseCountTokens._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.CountTokens", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "CountTokens", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._CountTokens._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.CountTokensResponse() + pb_resp = generative_service.CountTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_count_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_tokens_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = generative_service.CountTokensResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.count_tokens", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "CountTokens", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _EmbedContent( + _BaseGenerativeServiceRestTransport._BaseEmbedContent, GenerativeServiceRestStub + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.EmbedContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: generative_service.EmbedContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.EmbedContentResponse: + r"""Call the embed content method over HTTP. + + Args: + request (~.generative_service.EmbedContentRequest): + The request object. Request containing the ``Content`` for the model to + embed. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.generative_service.EmbedContentResponse: + The response to an ``EmbedContentRequest``. + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseEmbedContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_embed_content(request, metadata) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseEmbedContent._get_transcoded_request( + http_options, request + ) + + body = _BaseGenerativeServiceRestTransport._BaseEmbedContent._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseEmbedContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.EmbedContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "EmbedContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._EmbedContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.EmbedContentResponse() + pb_resp = generative_service.EmbedContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_embed_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_content_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = generative_service.EmbedContentResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.embed_content", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "EmbedContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GenerateAnswer( + _BaseGenerativeServiceRestTransport._BaseGenerateAnswer, + GenerativeServiceRestStub, + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.GenerateAnswer") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: generative_service.GenerateAnswerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.GenerateAnswerResponse: + r"""Call the generate answer method over HTTP. + + Args: + request (~.generative_service.GenerateAnswerRequest): + The request object. Request to generate a grounded answer from the + ``Model``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.generative_service.GenerateAnswerResponse: + Response from the model for a + grounded answer. + + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseGenerateAnswer._get_http_options() + ) + + request, metadata = self._interceptor.pre_generate_answer(request, metadata) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseGenerateAnswer._get_transcoded_request( + http_options, request + ) + + body = _BaseGenerativeServiceRestTransport._BaseGenerateAnswer._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseGenerateAnswer._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.GenerateAnswer", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "GenerateAnswer", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._GenerateAnswer._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.GenerateAnswerResponse() + pb_resp = generative_service.GenerateAnswerResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_generate_answer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_answer_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + generative_service.GenerateAnswerResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.generate_answer", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "GenerateAnswer", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GenerateContent( + _BaseGenerativeServiceRestTransport._BaseGenerateContent, + GenerativeServiceRestStub, + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.GenerateContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: generative_service.GenerateContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> generative_service.GenerateContentResponse: + r"""Call the generate content method over HTTP. + + Args: + request (~.generative_service.GenerateContentRequest): + The request object. Request to generate a completion from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.generative_service.GenerateContentResponse: + Response from the model supporting multiple candidate + responses. + + Safety ratings and content filtering are reported for + both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each + candidate in ``finish_reason`` and in + ``safety_ratings``. The API: + + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check + ``prompt_feedback``) + - Reports feedback on each candidate in + ``finish_reason`` and ``safety_ratings``. + + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseGenerateContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_generate_content( + request, metadata + ) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseGenerateContent._get_transcoded_request( + http_options, request + ) + + body = _BaseGenerativeServiceRestTransport._BaseGenerateContent._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseGenerateContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.GenerateContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "GenerateContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._GenerateContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = generative_service.GenerateContentResponse() + pb_resp = generative_service.GenerateContentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_generate_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_content_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + generative_service.GenerateContentResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.generate_content", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "GenerateContent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _StreamGenerateContent( + _BaseGenerativeServiceRestTransport._BaseStreamGenerateContent, + GenerativeServiceRestStub, + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.StreamGenerateContent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + stream=True, + ) + return response + + def __call__( + self, + request: generative_service.GenerateContentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> rest_streaming.ResponseIterator: + r"""Call the stream generate content method over HTTP. + + Args: + request (~.generative_service.GenerateContentRequest): + The request object. Request to generate a completion from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.generative_service.GenerateContentResponse: + Response from the model supporting multiple candidate + responses. + + Safety ratings and content filtering are reported for + both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each + candidate in ``finish_reason`` and in + ``safety_ratings``. The API: + + - Returns either all requested candidates or none of + them + - Returns no candidates at all only if there was + something wrong with the prompt (check + ``prompt_feedback``) + - Reports feedback on each candidate in + ``finish_reason`` and ``safety_ratings``. + + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseStreamGenerateContent._get_http_options() + ) + + request, metadata = self._interceptor.pre_stream_generate_content( + request, metadata + ) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseStreamGenerateContent._get_transcoded_request( + http_options, request + ) + + body = _BaseGenerativeServiceRestTransport._BaseStreamGenerateContent._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseStreamGenerateContent._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.StreamGenerateContent", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "StreamGenerateContent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + GenerativeServiceRestTransport._StreamGenerateContent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = rest_streaming.ResponseIterator( + response, generative_service.GenerateContentResponse + ) + + resp = self._interceptor.post_stream_generate_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stream_generate_content_with_metadata( + resp, response_metadata + ) + return resp + + @property + def batch_embed_contents( + self, + ) -> Callable[ + [generative_service.BatchEmbedContentsRequest], + generative_service.BatchEmbedContentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchEmbedContents(self._session, self._host, self._interceptor) # type: ignore + + @property + def bidi_generate_content( + self, + ) -> Callable[ + [generative_service.BidiGenerateContentClientMessage], + generative_service.BidiGenerateContentServerMessage, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BidiGenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def count_tokens( + self, + ) -> Callable[ + [generative_service.CountTokensRequest], generative_service.CountTokensResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def embed_content( + self, + ) -> Callable[ + [generative_service.EmbedContentRequest], + generative_service.EmbedContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EmbedContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_answer( + self, + ) -> Callable[ + [generative_service.GenerateAnswerRequest], + generative_service.GenerateAnswerResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAnswer(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def stream_generate_content( + self, + ) -> Callable[ + [generative_service.GenerateContentRequest], + generative_service.GenerateContentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamGenerateContent(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseGenerativeServiceRestTransport._BaseGetOperation, GenerativeServiceRestStub + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseGenerativeServiceRestTransport._BaseListOperations, + GenerativeServiceRestStub, + ): + def __hash__(self): + return hash("GenerativeServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseGenerativeServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseGenerativeServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseGenerativeServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.GenerativeServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = GenerativeServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("GenerativeServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/rest_base.py new file mode 100644 index 000000000000..f622ef84db0d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/generative_service/transports/rest_base.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import generative_service + +from .base import DEFAULT_CLIENT_INFO, GenerativeServiceTransport + + +class _BaseGenerativeServiceRestTransport(GenerativeServiceTransport): + """Base REST backend transport for GenerativeService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseBatchEmbedContents: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:batchEmbedContents", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = generative_service.BatchEmbedContentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseGenerativeServiceRestTransport._BaseBatchEmbedContents._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBidiGenerateContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseCountTokens: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:countTokens", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = generative_service.CountTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseGenerativeServiceRestTransport._BaseCountTokens._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseEmbedContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:embedContent", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = generative_service.EmbedContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseGenerativeServiceRestTransport._BaseEmbedContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGenerateAnswer: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:generateAnswer", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = generative_service.GenerateAnswerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseGenerativeServiceRestTransport._BaseGenerateAnswer._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGenerateContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:generateContent", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{model=tunedModels/*}:generateContent", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = generative_service.GenerateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseGenerativeServiceRestTransport._BaseGenerateContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseStreamGenerateContent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:streamGenerateContent", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{model=tunedModels/*}:streamGenerateContent", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = generative_service.GenerateContentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseGenerativeServiceRestTransport._BaseStreamGenerateContent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseGenerativeServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/__init__.py new file mode 100644 index 000000000000..86afc5d9278d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ModelServiceAsyncClient +from .client import ModelServiceClient + +__all__ = ( + "ModelServiceClient", + "ModelServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/async_client.py new file mode 100644 index 000000000000..1844d64aea7f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/async_client.py @@ -0,0 +1,1261 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.model_service import pagers +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +from .client import ModelServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class ModelServiceAsyncClient: + """Provides methods for getting metadata information about + Generative Models. + """ + + _client: ModelServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ModelServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ModelServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ModelServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ModelServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(ModelServiceClient.model_path) + parse_model_path = staticmethod(ModelServiceClient.parse_model_path) + tuned_model_path = staticmethod(ModelServiceClient.tuned_model_path) + parse_tuned_model_path = staticmethod(ModelServiceClient.parse_tuned_model_path) + common_billing_account_path = staticmethod( + ModelServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ModelServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ModelServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(ModelServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(ModelServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + ModelServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ModelServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ModelServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ModelServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ModelServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_info.__func__(ModelServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceAsyncClient: The constructed client. + """ + return ModelServiceClient.from_service_account_file.__func__(ModelServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ModelServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ModelServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ModelServiceTransport, Callable[..., ModelServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ModelServiceTransport,Callable[..., ModelServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ModelServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ModelServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "credentialsType": None, + }, + ) + + async def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> model.Model: + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetModelRequest, dict]]): + The request object. Request for getting information about + a specific Model. + name (:class:`str`): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.GetModelRequest): + request = model_service.GetModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListModelsAsyncPager: + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListModelsRequest, dict]]): + The request object. Request for listing all Models. + page_size (:class:`int`): + The maximum number of ``Models`` to return (per page). + + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (:class:`str`): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListModelsAsyncPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.ListModelsRequest): + request = model_service.ListModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_models + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListModelsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_tuned_model( + self, + request: Optional[Union[model_service.GetTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tuned_model.TunedModel: + r"""Gets information about a specific TunedModel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetTunedModelRequest, dict]]): + The request object. Request for getting information about + a specific Model. + name (:class:`str`): + Required. The resource name of the model. + + Format: ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.GetTunedModelRequest): + request = model_service.GetTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_tuned_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_tuned_models( + self, + request: Optional[Union[model_service.ListTunedModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTunedModelsAsyncPager: + r"""Lists created tuned models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListTunedModelsRequest, dict]]): + The request object. Request for listing TunedModels. + page_size (:class:`int`): + Optional. The maximum number of ``TunedModels`` to + return (per page). The service may return fewer tuned + models. + + If unspecified, at most 10 tuned models will be + returned. This method returns at most 1000 models per + page, even if you pass a larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (:class:`str`): + Optional. A page token, received from a previous + ``ListTunedModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListTunedModels`` must match the call that provided + the page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListTunedModelsAsyncPager: + Response from ListTunedModels containing a paginated + list of Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.ListTunedModelsRequest): + request = model_service.ListTunedModelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_tuned_models + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTunedModelsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_tuned_model( + self, + request: Optional[Union[model_service.CreateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + tuned_model_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. + + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateTunedModelRequest( + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreateTunedModelRequest, dict]]): + The request object. Request to create a TunedModel. + tuned_model (:class:`google.ai.generativelanguage_v1alpha.types.TunedModel`): + Required. The tuned model to create. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tuned_model_id (:class:`str`): + Optional. The unique id for the tuned model if + specified. This value should be up to 40 characters, the + first character must be a letter, the last could be a + letter or a number. The id must match the regular + expression: ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. + + This corresponds to the ``tuned_model_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.ai.generativelanguage_v1alpha.types.TunedModel` + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, tuned_model_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.CreateTunedModelRequest): + request = model_service.CreateTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if tuned_model_id is not None: + request.tuned_model_id = tuned_model_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_tuned_model + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gag_tuned_model.TunedModel, + metadata_type=model_service.CreateTunedModelMetadata, + ) + + # Done; return the response. + return response + + async def update_tuned_model( + self, + request: Optional[Union[model_service.UpdateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_tuned_model.TunedModel: + r"""Updates a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateTunedModelRequest( + ) + + # Make the request + response = await client.update_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.UpdateTunedModelRequest, dict]]): + The request object. Request to update a TunedModel. + tuned_model (:class:`google.ai.generativelanguage_v1alpha.types.TunedModel`): + Required. The tuned model to update. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.UpdateTunedModelRequest): + request = model_service.UpdateTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_tuned_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tuned_model.name", request.tuned_model.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_tuned_model( + self, + request: Optional[Union[model_service.DeleteTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + await client.delete_tuned_model(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeleteTunedModelRequest, dict]]): + The request object. Request to delete a TunedModel. + name (:class:`str`): + Required. The resource name of the model. Format: + ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.DeleteTunedModelRequest): + request = model_service.DeleteTunedModelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_tuned_model + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ModelServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/client.py new file mode 100644 index 000000000000..3973fb52d16e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/client.py @@ -0,0 +1,1683 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.model_service import pagers +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +from .transports.base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .transports.grpc import ModelServiceGrpcTransport +from .transports.grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .transports.rest import ModelServiceRestTransport + + +class ModelServiceClientMeta(type): + """Metaclass for the ModelService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] + _transport_registry["grpc"] = ModelServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ModelServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ModelServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ModelServiceClient(metaclass=ModelServiceClientMeta): + """Provides methods for getting metadata information about + Generative Models. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ModelServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ModelServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def tuned_model_path( + tuned_model: str, + ) -> str: + """Returns a fully-qualified tuned_model string.""" + return "tunedModels/{tuned_model}".format( + tuned_model=tuned_model, + ) + + @staticmethod + def parse_tuned_model_path(path: str) -> Dict[str, str]: + """Parses a tuned_model path into its component segments.""" + m = re.match(r"^tunedModels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ModelServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ModelServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ModelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ModelServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ModelServiceTransport, Callable[..., ModelServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ModelServiceTransport,Callable[..., ModelServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ModelServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ModelServiceClient._read_environment_variables() + self._client_cert_source = ModelServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ModelServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ModelServiceTransport) + if transport_provided: + # transport is a ModelServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ModelServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or ModelServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ModelServiceTransport], Callable[..., ModelServiceTransport] + ] = ( + ModelServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ModelServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.ModelServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "credentialsType": None, + }, + ) + + def get_model( + self, + request: Optional[Union[model_service.GetModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> model.Model: + r"""Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetModelRequest, dict]): + The request object. Request for getting information about + a specific Model. + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Model: + Information about a Generative + Language Model. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.GetModelRequest): + request = model_service.GetModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_models( + self, + request: Optional[Union[model_service.ListModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListModelsPager: + r"""Lists the + ```Model``\ s `__ + available through the Gemini API. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListModelsRequest, dict]): + The request object. Request for listing all Models. + page_size (int): + The maximum number of ``Models`` to return (per page). + + If unspecified, 50 models will be returned per page. + This method returns at most 1000 models per page, even + if you pass a larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (str): + A page token, received from a previous ``ListModels`` + call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the + page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListModelsPager: + Response from ListModel containing a paginated list of + Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.ListModelsRequest): + request = model_service.ListModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_models] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListModelsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_tuned_model( + self, + request: Optional[Union[model_service.GetTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tuned_model.TunedModel: + r"""Gets information about a specific TunedModel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetTunedModelRequest, dict]): + The request object. Request for getting information about + a specific Model. + name (str): + Required. The resource name of the model. + + Format: ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.GetTunedModelRequest): + request = model_service.GetTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_tuned_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_tuned_models( + self, + request: Optional[Union[model_service.ListTunedModelsRequest, dict]] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTunedModelsPager: + r"""Lists created tuned models. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListTunedModelsRequest, dict]): + The request object. Request for listing TunedModels. + page_size (int): + Optional. The maximum number of ``TunedModels`` to + return (per page). The service may return fewer tuned + models. + + If unspecified, at most 10 tuned models will be + returned. This method returns at most 1000 models per + page, even if you pass a larger page_size. + + This corresponds to the ``page_size`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + page_token (str): + Optional. A page token, received from a previous + ``ListTunedModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListTunedModels`` must match the call that provided + the page token. + + This corresponds to the ``page_token`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListTunedModelsPager: + Response from ListTunedModels containing a paginated + list of Models. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([page_size, page_token]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.ListTunedModelsRequest): + request = model_service.ListTunedModelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_tuned_models] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTunedModelsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_tuned_model( + self, + request: Optional[Union[model_service.CreateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + tuned_model_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. + + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateTunedModelRequest( + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreateTunedModelRequest, dict]): + The request object. Request to create a TunedModel. + tuned_model (google.ai.generativelanguage_v1alpha.types.TunedModel): + Required. The tuned model to create. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + tuned_model_id (str): + Optional. The unique id for the tuned model if + specified. This value should be up to 40 characters, the + first character must be a letter, the last could be a + letter or a number. The id must match the regular + expression: ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. + + This corresponds to the ``tuned_model_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.ai.generativelanguage_v1alpha.types.TunedModel` + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, tuned_model_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.CreateTunedModelRequest): + request = model_service.CreateTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if tuned_model_id is not None: + request.tuned_model_id = tuned_model_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_tuned_model] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gag_tuned_model.TunedModel, + metadata_type=model_service.CreateTunedModelMetadata, + ) + + # Done; return the response. + return response + + def update_tuned_model( + self, + request: Optional[Union[model_service.UpdateTunedModelRequest, dict]] = None, + *, + tuned_model: Optional[gag_tuned_model.TunedModel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_tuned_model.TunedModel: + r"""Updates a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateTunedModelRequest( + ) + + # Make the request + response = client.update_tuned_model(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.UpdateTunedModelRequest, dict]): + The request object. Request to update a TunedModel. + tuned_model (google.ai.generativelanguage_v1alpha.types.TunedModel): + Required. The tuned model to update. + This corresponds to the ``tuned_model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([tuned_model, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.UpdateTunedModelRequest): + request = model_service.UpdateTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if tuned_model is not None: + request.tuned_model = tuned_model + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_tuned_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("tuned_model.name", request.tuned_model.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_tuned_model( + self, + request: Optional[Union[model_service.DeleteTunedModelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a tuned model. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + client.delete_tuned_model(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeleteTunedModelRequest, dict]): + The request object. Request to delete a TunedModel. + name (str): + Required. The resource name of the model. Format: + ``tunedModels/my-model-id`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, model_service.DeleteTunedModelRequest): + request = model_service.DeleteTunedModelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_tuned_model] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "ModelServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/pagers.py new file mode 100644 index 000000000000..c2266d4db067 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/pagers.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ai.generativelanguage_v1alpha.types import model, model_service, tuned_model + + +class ListModelsPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., model_service.ListModelsResponse], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListModelsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[model.Model]: + for page in self.pages: + yield from page.models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListModelsAsyncPager: + """A pager for iterating through ``list_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListModels`` requests and continue to iterate + through the ``models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[model_service.ListModelsResponse]], + request: model_service.ListModelsRequest, + response: model_service.ListModelsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListModelsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = model_service.ListModelsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[model_service.ListModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[model.Model]: + async def async_generator(): + async for page in self.pages: + for response in page.models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTunedModelsPager: + """A pager for iterating through ``list_tuned_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListTunedModelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``tuned_models`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTunedModels`` requests and continue to iterate + through the ``tuned_models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListTunedModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., model_service.ListTunedModelsResponse], + request: model_service.ListTunedModelsRequest, + response: model_service.ListTunedModelsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListTunedModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListTunedModelsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = model_service.ListTunedModelsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[model_service.ListTunedModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[tuned_model.TunedModel]: + for page in self.pages: + yield from page.tuned_models + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTunedModelsAsyncPager: + """A pager for iterating through ``list_tuned_models`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListTunedModelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``tuned_models`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTunedModels`` requests and continue to iterate + through the ``tuned_models`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListTunedModelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[model_service.ListTunedModelsResponse]], + request: model_service.ListTunedModelsRequest, + response: model_service.ListTunedModelsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListTunedModelsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListTunedModelsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = model_service.ListTunedModelsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[model_service.ListTunedModelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[tuned_model.TunedModel]: + async def async_generator(): + async for page in self.pages: + for response in page.tuned_models: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/README.rst new file mode 100644 index 000000000000..05dddc4c34ad --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ModelServiceTransport` is the ABC for all transports. +- public child `ModelServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ModelServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseModelServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ModelServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/__init__.py new file mode 100644 index 000000000000..64fd23752c6d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ModelServiceTransport +from .grpc import ModelServiceGrpcTransport +from .grpc_asyncio import ModelServiceGrpcAsyncIOTransport +from .rest import ModelServiceRestInterceptor, ModelServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ModelServiceTransport]] +_transport_registry["grpc"] = ModelServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ModelServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ModelServiceRestTransport + +__all__ = ( + "ModelServiceTransport", + "ModelServiceGrpcTransport", + "ModelServiceGrpcAsyncIOTransport", + "ModelServiceRestTransport", + "ModelServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/base.py new file mode 100644 index 000000000000..234dba88faef --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/base.py @@ -0,0 +1,290 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class ModelServiceTransport(abc.ABC): + """Abstract transport class for ModelService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_model: gapic_v1.method.wrap_method( + self.get_model, + default_timeout=None, + client_info=client_info, + ), + self.list_models: gapic_v1.method.wrap_method( + self.list_models, + default_timeout=None, + client_info=client_info, + ), + self.get_tuned_model: gapic_v1.method.wrap_method( + self.get_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.list_tuned_models: gapic_v1.method.wrap_method( + self.list_tuned_models, + default_timeout=None, + client_info=client_info, + ), + self.create_tuned_model: gapic_v1.method.wrap_method( + self.create_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.update_tuned_model: gapic_v1.method.wrap_method( + self.update_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.delete_tuned_model: gapic_v1.method.wrap_method( + self.delete_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_model( + self, + ) -> Callable[ + [model_service.GetModelRequest], Union[model.Model, Awaitable[model.Model]] + ]: + raise NotImplementedError() + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], + Union[ + model_service.ListModelsResponse, + Awaitable[model_service.ListModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_tuned_model( + self, + ) -> Callable[ + [model_service.GetTunedModelRequest], + Union[tuned_model.TunedModel, Awaitable[tuned_model.TunedModel]], + ]: + raise NotImplementedError() + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], + Union[ + model_service.ListTunedModelsResponse, + Awaitable[model_service.ListTunedModelsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_tuned_model( + self, + ) -> Callable[ + [model_service.CreateTunedModelRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_tuned_model( + self, + ) -> Callable[ + [model_service.UpdateTunedModelRequest], + Union[gag_tuned_model.TunedModel, Awaitable[gag_tuned_model.TunedModel]], + ]: + raise NotImplementedError() + + @property + def delete_tuned_model( + self, + ) -> Callable[ + [model_service.DeleteTunedModelRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("ModelServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/grpc.py new file mode 100644 index 000000000000..501f86f0dde2 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/grpc.py @@ -0,0 +1,582 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class ModelServiceGrpcTransport(ModelServiceTransport): + """gRPC backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. + + Returns: + Callable[[~.GetModelRequest], + ~.Model]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + r"""Return a callable for the list models method over gRPC. + + Lists the + ```Model``\ s `__ + available through the Gemini API. + + Returns: + Callable[[~.ListModelsRequest], + ~.ListModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + @property + def get_tuned_model( + self, + ) -> Callable[[model_service.GetTunedModelRequest], tuned_model.TunedModel]: + r"""Return a callable for the get tuned model method over gRPC. + + Gets information about a specific TunedModel. + + Returns: + Callable[[~.GetTunedModelRequest], + ~.TunedModel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tuned_model" not in self._stubs: + self._stubs["get_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/GetTunedModel", + request_serializer=model_service.GetTunedModelRequest.serialize, + response_deserializer=tuned_model.TunedModel.deserialize, + ) + return self._stubs["get_tuned_model"] + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], model_service.ListTunedModelsResponse + ]: + r"""Return a callable for the list tuned models method over gRPC. + + Lists created tuned models. + + Returns: + Callable[[~.ListTunedModelsRequest], + ~.ListTunedModelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tuned_models" not in self._stubs: + self._stubs["list_tuned_models"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/ListTunedModels", + request_serializer=model_service.ListTunedModelsRequest.serialize, + response_deserializer=model_service.ListTunedModelsResponse.deserialize, + ) + return self._stubs["list_tuned_models"] + + @property + def create_tuned_model( + self, + ) -> Callable[[model_service.CreateTunedModelRequest], operations_pb2.Operation]: + r"""Return a callable for the create tuned model method over gRPC. + + Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. + + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + Returns: + Callable[[~.CreateTunedModelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tuned_model" not in self._stubs: + self._stubs["create_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/CreateTunedModel", + request_serializer=model_service.CreateTunedModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_tuned_model"] + + @property + def update_tuned_model( + self, + ) -> Callable[[model_service.UpdateTunedModelRequest], gag_tuned_model.TunedModel]: + r"""Return a callable for the update tuned model method over gRPC. + + Updates a tuned model. + + Returns: + Callable[[~.UpdateTunedModelRequest], + ~.TunedModel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tuned_model" not in self._stubs: + self._stubs["update_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/UpdateTunedModel", + request_serializer=model_service.UpdateTunedModelRequest.serialize, + response_deserializer=gag_tuned_model.TunedModel.deserialize, + ) + return self._stubs["update_tuned_model"] + + @property + def delete_tuned_model( + self, + ) -> Callable[[model_service.DeleteTunedModelRequest], empty_pb2.Empty]: + r"""Return a callable for the delete tuned model method over gRPC. + + Deletes a tuned model. + + Returns: + Callable[[~.DeleteTunedModelRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tuned_model" not in self._stubs: + self._stubs["delete_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/DeleteTunedModel", + request_serializer=model_service.DeleteTunedModelRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_tuned_model"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("ModelServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a97df75a5c45 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/grpc_asyncio.py @@ -0,0 +1,655 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport +from .grpc import ModelServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class ModelServiceGrpcAsyncIOTransport(ModelServiceTransport): + """gRPC AsyncIO backend transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_model( + self, + ) -> Callable[[model_service.GetModelRequest], Awaitable[model.Model]]: + r"""Return a callable for the get model method over gRPC. + + Gets information about a specific ``Model`` such as its version + number, token limits, + `parameters `__ + and other metadata. Refer to the `Gemini models + guide `__ + for detailed model information. + + Returns: + Callable[[~.GetModelRequest], + Awaitable[~.Model]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_model" not in self._stubs: + self._stubs["get_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/GetModel", + request_serializer=model_service.GetModelRequest.serialize, + response_deserializer=model.Model.deserialize, + ) + return self._stubs["get_model"] + + @property + def list_models( + self, + ) -> Callable[ + [model_service.ListModelsRequest], Awaitable[model_service.ListModelsResponse] + ]: + r"""Return a callable for the list models method over gRPC. + + Lists the + ```Model``\ s `__ + available through the Gemini API. + + Returns: + Callable[[~.ListModelsRequest], + Awaitable[~.ListModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_models" not in self._stubs: + self._stubs["list_models"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/ListModels", + request_serializer=model_service.ListModelsRequest.serialize, + response_deserializer=model_service.ListModelsResponse.deserialize, + ) + return self._stubs["list_models"] + + @property + def get_tuned_model( + self, + ) -> Callable[ + [model_service.GetTunedModelRequest], Awaitable[tuned_model.TunedModel] + ]: + r"""Return a callable for the get tuned model method over gRPC. + + Gets information about a specific TunedModel. + + Returns: + Callable[[~.GetTunedModelRequest], + Awaitable[~.TunedModel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_tuned_model" not in self._stubs: + self._stubs["get_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/GetTunedModel", + request_serializer=model_service.GetTunedModelRequest.serialize, + response_deserializer=tuned_model.TunedModel.deserialize, + ) + return self._stubs["get_tuned_model"] + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], + Awaitable[model_service.ListTunedModelsResponse], + ]: + r"""Return a callable for the list tuned models method over gRPC. + + Lists created tuned models. + + Returns: + Callable[[~.ListTunedModelsRequest], + Awaitable[~.ListTunedModelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_tuned_models" not in self._stubs: + self._stubs["list_tuned_models"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/ListTunedModels", + request_serializer=model_service.ListTunedModelsRequest.serialize, + response_deserializer=model_service.ListTunedModelsResponse.deserialize, + ) + return self._stubs["list_tuned_models"] + + @property + def create_tuned_model( + self, + ) -> Callable[ + [model_service.CreateTunedModelRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create tuned model method over gRPC. + + Creates a tuned model. Check intermediate tuning progress (if + any) through the [google.longrunning.Operations] service. + + Access status and results through the Operations service. + Example: GET /v1/tunedModels/az2mb0bpw6i/operations/000-111-222 + + Returns: + Callable[[~.CreateTunedModelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_tuned_model" not in self._stubs: + self._stubs["create_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/CreateTunedModel", + request_serializer=model_service.CreateTunedModelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_tuned_model"] + + @property + def update_tuned_model( + self, + ) -> Callable[ + [model_service.UpdateTunedModelRequest], Awaitable[gag_tuned_model.TunedModel] + ]: + r"""Return a callable for the update tuned model method over gRPC. + + Updates a tuned model. + + Returns: + Callable[[~.UpdateTunedModelRequest], + Awaitable[~.TunedModel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_tuned_model" not in self._stubs: + self._stubs["update_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/UpdateTunedModel", + request_serializer=model_service.UpdateTunedModelRequest.serialize, + response_deserializer=gag_tuned_model.TunedModel.deserialize, + ) + return self._stubs["update_tuned_model"] + + @property + def delete_tuned_model( + self, + ) -> Callable[[model_service.DeleteTunedModelRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete tuned model method over gRPC. + + Deletes a tuned model. + + Returns: + Callable[[~.DeleteTunedModelRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_tuned_model" not in self._stubs: + self._stubs["delete_tuned_model"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.ModelService/DeleteTunedModel", + request_serializer=model_service.DeleteTunedModelRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_tuned_model"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_model: self._wrap_method( + self.get_model, + default_timeout=None, + client_info=client_info, + ), + self.list_models: self._wrap_method( + self.list_models, + default_timeout=None, + client_info=client_info, + ), + self.get_tuned_model: self._wrap_method( + self.get_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.list_tuned_models: self._wrap_method( + self.list_tuned_models, + default_timeout=None, + client_info=client_info, + ), + self.create_tuned_model: self._wrap_method( + self.create_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.update_tuned_model: self._wrap_method( + self.update_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.delete_tuned_model: self._wrap_method( + self.delete_tuned_model, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("ModelServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/rest.py new file mode 100644 index 000000000000..da30e2ee76f2 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/rest.py @@ -0,0 +1,1983 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseModelServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class ModelServiceRestInterceptor: + """Interceptor for ModelService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ModelServiceRestTransport. + + .. code-block:: python + class MyCustomModelServiceInterceptor(ModelServiceRestInterceptor): + def pre_create_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_tuned_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_tuned_model(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_tuned_models(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_tuned_models(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_tuned_model(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_tuned_model(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ModelServiceRestTransport(interceptor=MyCustomModelServiceInterceptor()) + client = ModelServiceClient(transport=transport) + + + """ + + def pre_create_tuned_model( + self, + request: model_service.CreateTunedModelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.CreateTunedModelRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_create_tuned_model( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_tuned_model + + DEPRECATED. Please use the `post_create_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. This `post_create_tuned_model` interceptor runs + before the `post_create_tuned_model_with_metadata` interceptor. + """ + return response + + def post_create_tuned_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_create_tuned_model_with_metadata` + interceptor in new development instead of the `post_create_tuned_model` interceptor. + When both interceptors are used, this `post_create_tuned_model_with_metadata` interceptor runs after the + `post_create_tuned_model` interceptor. The (possibly modified) response returned by + `post_create_tuned_model` will be passed to + `post_create_tuned_model_with_metadata`. + """ + return response, metadata + + def pre_delete_tuned_model( + self, + request: model_service.DeleteTunedModelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.DeleteTunedModelRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def pre_get_model( + self, + request: model_service.GetModelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[model_service.GetModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_model(self, response: model.Model) -> model.Model: + """Post-rpc interceptor for get_model + + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. + """ + return response + + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + + def pre_get_tuned_model( + self, + request: model_service.GetTunedModelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.GetTunedModelRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_tuned_model( + self, response: tuned_model.TunedModel + ) -> tuned_model.TunedModel: + """Post-rpc interceptor for get_tuned_model + + DEPRECATED. Please use the `post_get_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. This `post_get_tuned_model` interceptor runs + before the `post_get_tuned_model_with_metadata` interceptor. + """ + return response + + def post_get_tuned_model_with_metadata( + self, + response: tuned_model.TunedModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tuned_model.TunedModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_tuned_model_with_metadata` + interceptor in new development instead of the `post_get_tuned_model` interceptor. + When both interceptors are used, this `post_get_tuned_model_with_metadata` interceptor runs after the + `post_get_tuned_model` interceptor. The (possibly modified) response returned by + `post_get_tuned_model` will be passed to + `post_get_tuned_model_with_metadata`. + """ + return response, metadata + + def pre_list_models( + self, + request: model_service.ListModelsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListModelsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_models( + self, response: model_service.ListModelsResponse + ) -> model_service.ListModelsResponse: + """Post-rpc interceptor for list_models + + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. + """ + return response + + def post_list_models_with_metadata( + self, + response: model_service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + + def pre_list_tuned_models( + self, + request: model_service.ListTunedModelsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListTunedModelsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_tuned_models + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_tuned_models( + self, response: model_service.ListTunedModelsResponse + ) -> model_service.ListTunedModelsResponse: + """Post-rpc interceptor for list_tuned_models + + DEPRECATED. Please use the `post_list_tuned_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. This `post_list_tuned_models` interceptor runs + before the `post_list_tuned_models_with_metadata` interceptor. + """ + return response + + def post_list_tuned_models_with_metadata( + self, + response: model_service.ListTunedModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListTunedModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_tuned_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_tuned_models_with_metadata` + interceptor in new development instead of the `post_list_tuned_models` interceptor. + When both interceptors are used, this `post_list_tuned_models_with_metadata` interceptor runs after the + `post_list_tuned_models` interceptor. The (possibly modified) response returned by + `post_list_tuned_models` will be passed to + `post_list_tuned_models_with_metadata`. + """ + return response, metadata + + def pre_update_tuned_model( + self, + request: model_service.UpdateTunedModelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.UpdateTunedModelRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_tuned_model + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_update_tuned_model( + self, response: gag_tuned_model.TunedModel + ) -> gag_tuned_model.TunedModel: + """Post-rpc interceptor for update_tuned_model + + DEPRECATED. Please use the `post_update_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. This `post_update_tuned_model` interceptor runs + before the `post_update_tuned_model_with_metadata` interceptor. + """ + return response + + def post_update_tuned_model_with_metadata( + self, + response: gag_tuned_model.TunedModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_tuned_model.TunedModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_update_tuned_model_with_metadata` + interceptor in new development instead of the `post_update_tuned_model` interceptor. + When both interceptors are used, this `post_update_tuned_model_with_metadata` interceptor runs after the + `post_update_tuned_model` interceptor. The (possibly modified) response returned by + `post_update_tuned_model` will be passed to + `post_update_tuned_model_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the ModelService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the ModelService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ModelServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ModelServiceRestInterceptor + + +class ModelServiceRestTransport(_BaseModelServiceRestTransport): + """REST backend synchronous transport for ModelService. + + Provides methods for getting metadata information about + Generative Models. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ModelServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ModelServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1alpha", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateTunedModel( + _BaseModelServiceRestTransport._BaseCreateTunedModel, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.CreateTunedModel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: model_service.CreateTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create tuned model method over HTTP. + + Args: + request (~.model_service.CreateTunedModelRequest): + The request object. Request to create a TunedModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseCreateTunedModel._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_tuned_model( + request, metadata + ) + transcoded_request = _BaseModelServiceRestTransport._BaseCreateTunedModel._get_transcoded_request( + http_options, request + ) + + body = _BaseModelServiceRestTransport._BaseCreateTunedModel._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseModelServiceRestTransport._BaseCreateTunedModel._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.CreateTunedModel", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "CreateTunedModel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._CreateTunedModel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tuned_model_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceClient.create_tuned_model", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "CreateTunedModel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteTunedModel( + _BaseModelServiceRestTransport._BaseDeleteTunedModel, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.DeleteTunedModel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: model_service.DeleteTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete tuned model method over HTTP. + + Args: + request (~.model_service.DeleteTunedModelRequest): + The request object. Request to delete a TunedModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseDeleteTunedModel._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_tuned_model( + request, metadata + ) + transcoded_request = _BaseModelServiceRestTransport._BaseDeleteTunedModel._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseModelServiceRestTransport._BaseDeleteTunedModel._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.DeleteTunedModel", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "DeleteTunedModel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._DeleteTunedModel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetModel(_BaseModelServiceRestTransport._BaseGetModel, ModelServiceRestStub): + def __hash__(self): + return hash("ModelServiceRestTransport.GetModel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: model_service.GetModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> model.Model: + r"""Call the get model method over HTTP. + + Args: + request (~.model_service.GetModelRequest): + The request object. Request for getting information about + a specific Model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.model.Model: + Information about a Generative + Language Model. + + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseGetModel._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_model(request, metadata) + transcoded_request = ( + _BaseModelServiceRestTransport._BaseGetModel._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseModelServiceRestTransport._BaseGetModel._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.GetModel", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "GetModel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._GetModel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model.Model() + pb_resp = model.Model.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = model.Model.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceClient.get_model", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "GetModel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetTunedModel( + _BaseModelServiceRestTransport._BaseGetTunedModel, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.GetTunedModel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: model_service.GetTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> tuned_model.TunedModel: + r"""Call the get tuned model method over HTTP. + + Args: + request (~.model_service.GetTunedModelRequest): + The request object. Request for getting information about + a specific Model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.tuned_model.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseGetTunedModel._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_tuned_model(request, metadata) + transcoded_request = _BaseModelServiceRestTransport._BaseGetTunedModel._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseModelServiceRestTransport._BaseGetTunedModel._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.GetTunedModel", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "GetTunedModel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._GetTunedModel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = tuned_model.TunedModel() + pb_resp = tuned_model.TunedModel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tuned_model_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = tuned_model.TunedModel.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceClient.get_tuned_model", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "GetTunedModel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListModels( + _BaseModelServiceRestTransport._BaseListModels, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.ListModels") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: model_service.ListModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> model_service.ListModelsResponse: + r"""Call the list models method over HTTP. + + Args: + request (~.model_service.ListModelsRequest): + The request object. Request for listing all Models. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.model_service.ListModelsResponse: + Response from ``ListModel`` containing a paginated list + of Models. + + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseListModels._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_models(request, metadata) + transcoded_request = ( + _BaseModelServiceRestTransport._BaseListModels._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseModelServiceRestTransport._BaseListModels._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.ListModels", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "ListModels", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._ListModels._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model_service.ListModelsResponse() + pb_resp = model_service.ListModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = model_service.ListModelsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceClient.list_models", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "ListModels", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTunedModels( + _BaseModelServiceRestTransport._BaseListTunedModels, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.ListTunedModels") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: model_service.ListTunedModelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> model_service.ListTunedModelsResponse: + r"""Call the list tuned models method over HTTP. + + Args: + request (~.model_service.ListTunedModelsRequest): + The request object. Request for listing TunedModels. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.model_service.ListTunedModelsResponse: + Response from ``ListTunedModels`` containing a paginated + list of Models. + + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseListTunedModels._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_tuned_models( + request, metadata + ) + transcoded_request = _BaseModelServiceRestTransport._BaseListTunedModels._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseModelServiceRestTransport._BaseListTunedModels._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.ListTunedModels", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "ListTunedModels", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._ListTunedModels._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = model_service.ListTunedModelsResponse() + pb_resp = model_service.ListTunedModelsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_tuned_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tuned_models_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = model_service.ListTunedModelsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceClient.list_tuned_models", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "ListTunedModels", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateTunedModel( + _BaseModelServiceRestTransport._BaseUpdateTunedModel, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.UpdateTunedModel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: model_service.UpdateTunedModelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_tuned_model.TunedModel: + r"""Call the update tuned model method over HTTP. + + Args: + request (~.model_service.UpdateTunedModelRequest): + The request object. Request to update a TunedModel. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gag_tuned_model.TunedModel: + A fine-tuned model created using + ModelService.CreateTunedModel. + + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseUpdateTunedModel._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_tuned_model( + request, metadata + ) + transcoded_request = _BaseModelServiceRestTransport._BaseUpdateTunedModel._get_transcoded_request( + http_options, request + ) + + body = _BaseModelServiceRestTransport._BaseUpdateTunedModel._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseModelServiceRestTransport._BaseUpdateTunedModel._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.UpdateTunedModel", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "UpdateTunedModel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._UpdateTunedModel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_tuned_model.TunedModel() + pb_resp = gag_tuned_model.TunedModel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tuned_model_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gag_tuned_model.TunedModel.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceClient.update_tuned_model", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "UpdateTunedModel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_tuned_model( + self, + ) -> Callable[[model_service.CreateTunedModelRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_tuned_model( + self, + ) -> Callable[[model_service.DeleteTunedModelRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_model(self) -> Callable[[model_service.GetModelRequest], model.Model]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_tuned_model( + self, + ) -> Callable[[model_service.GetTunedModelRequest], tuned_model.TunedModel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_models( + self, + ) -> Callable[[model_service.ListModelsRequest], model_service.ListModelsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_tuned_models( + self, + ) -> Callable[ + [model_service.ListTunedModelsRequest], model_service.ListTunedModelsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTunedModels(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_tuned_model( + self, + ) -> Callable[[model_service.UpdateTunedModelRequest], gag_tuned_model.TunedModel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTunedModel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseModelServiceRestTransport._BaseGetOperation, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseModelServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseModelServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseModelServiceRestTransport._BaseListOperations, ModelServiceRestStub + ): + def __hash__(self): + return hash("ModelServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseModelServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseModelServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseModelServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.ModelServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ModelServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.ModelService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ModelServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/rest_base.py new file mode 100644 index 000000000000..da731c43b6d6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/model_service/transports/rest_base.py @@ -0,0 +1,476 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +from .base import DEFAULT_CLIENT_INFO, ModelServiceTransport + + +class _BaseModelServiceRestTransport(ModelServiceTransport): + """Base REST backend transport for ModelService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateTunedModel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/tunedModels", + "body": "tuned_model", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.CreateTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseModelServiceRestTransport._BaseCreateTunedModel._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteTunedModel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=tunedModels/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.DeleteTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseModelServiceRestTransport._BaseDeleteTunedModel._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetModel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=models/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.GetModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseModelServiceRestTransport._BaseGetModel._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetTunedModel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.GetTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseModelServiceRestTransport._BaseGetTunedModel._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListModels: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/models", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.ListModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTunedModels: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/tunedModels", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.ListTunedModelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateTunedModel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{tuned_model.name=tunedModels/*}", + "body": "tuned_model", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = model_service.UpdateTunedModelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseModelServiceRestTransport._BaseUpdateTunedModel._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseModelServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/__init__.py new file mode 100644 index 000000000000..ff0f9217fa0e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PermissionServiceAsyncClient +from .client import PermissionServiceClient + +__all__ = ( + "PermissionServiceClient", + "PermissionServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/async_client.py new file mode 100644 index 000000000000..7be8e8f26253 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/async_client.py @@ -0,0 +1,1150 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.permission_service import pagers +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +from .client import PermissionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PermissionServiceTransport +from .transports.grpc_asyncio import PermissionServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class PermissionServiceAsyncClient: + """Provides methods for managing permissions to PaLM API + resources. + """ + + _client: PermissionServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PermissionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PermissionServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PermissionServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PermissionServiceClient._DEFAULT_UNIVERSE + + permission_path = staticmethod(PermissionServiceClient.permission_path) + parse_permission_path = staticmethod(PermissionServiceClient.parse_permission_path) + common_billing_account_path = staticmethod( + PermissionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PermissionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PermissionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PermissionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PermissionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PermissionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PermissionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PermissionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PermissionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PermissionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceAsyncClient: The constructed client. + """ + return PermissionServiceClient.from_service_account_info.__func__(PermissionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceAsyncClient: The constructed client. + """ + return PermissionServiceClient.from_service_account_file.__func__(PermissionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PermissionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PermissionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PermissionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PermissionServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PermissionServiceTransport, + Callable[..., PermissionServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the permission service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PermissionServiceTransport,Callable[..., PermissionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PermissionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PermissionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "credentialsType": None, + }, + ) + + async def create_permission( + self, + request: Optional[ + Union[permission_service.CreatePermissionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + permission: Optional[gag_permission.Permission] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_permission.Permission: + r"""Create a permission to a specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreatePermissionRequest, dict]]): + The request object. Request to create a ``Permission``. + parent (:class:`str`): + Required. The parent resource of the ``Permission``. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permission (:class:`google.ai.generativelanguage_v1alpha.types.Permission`): + Required. The permission to create. + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, permission]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.CreatePermissionRequest): + request = permission_service.CreatePermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if permission is not None: + request.permission = permission + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_permission + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_permission( + self, + request: Optional[Union[permission_service.GetPermissionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission.Permission: + r"""Gets information about a specific Permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetPermissionRequest, dict]]): + The request object. Request for getting information about a specific + ``Permission``. + name (:class:`str`): + Required. The resource name of the permission. + + Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.GetPermissionRequest): + request = permission_service.GetPermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_permission + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_permissions( + self, + request: Optional[ + Union[permission_service.ListPermissionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPermissionsAsyncPager: + r"""Lists permissions for the specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_permissions(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListPermissionsRequest, dict]]): + The request object. Request for listing permissions. + parent (:class:`str`): + Required. The parent resource of the permissions. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.permission_service.pagers.ListPermissionsAsyncPager: + Response from ListPermissions containing a paginated list of + permissions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.ListPermissionsRequest): + request = permission_service.ListPermissionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPermissionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_permission( + self, + request: Optional[ + Union[permission_service.UpdatePermissionRequest, dict] + ] = None, + *, + permission: Optional[gag_permission.Permission] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_permission.Permission: + r"""Updates the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_update_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdatePermissionRequest( + ) + + # Make the request + response = await client.update_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.UpdatePermissionRequest, dict]]): + The request object. Request to update the ``Permission``. + permission (:class:`google.ai.generativelanguage_v1alpha.types.Permission`): + Required. The permission to update. + + The permission's ``name`` field is used to identify the + permission to update. + + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Accepted ones: + + - role (``Permission.role`` field) + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([permission, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.UpdatePermissionRequest): + request = permission_service.UpdatePermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if permission is not None: + request.permission = permission + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_permission + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("permission.name", request.permission.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_permission( + self, + request: Optional[ + Union[permission_service.DeletePermissionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + await client.delete_permission(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeletePermissionRequest, dict]]): + The request object. Request to delete the ``Permission``. + name (:class:`str`): + Required. The resource name of the permission. Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.DeletePermissionRequest): + request = permission_service.DeletePermissionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_permission + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def transfer_ownership( + self, + request: Optional[ + Union[permission_service.TransferOwnershipRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission_service.TransferOwnershipResponse: + r"""Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = await client.transfer_ownership(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.TransferOwnershipRequest, dict]]): + The request object. Request to transfer the ownership of + the tuned model. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.TransferOwnershipResponse: + Response from TransferOwnership. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.TransferOwnershipRequest): + request = permission_service.TransferOwnershipRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.transfer_ownership + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PermissionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PermissionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/client.py new file mode 100644 index 000000000000..fcd82480e9d3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/client.py @@ -0,0 +1,1569 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.permission_service import pagers +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +from .transports.base import DEFAULT_CLIENT_INFO, PermissionServiceTransport +from .transports.grpc import PermissionServiceGrpcTransport +from .transports.grpc_asyncio import PermissionServiceGrpcAsyncIOTransport +from .transports.rest import PermissionServiceRestTransport + + +class PermissionServiceClientMeta(type): + """Metaclass for the PermissionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PermissionServiceTransport]] + _transport_registry["grpc"] = PermissionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PermissionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PermissionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PermissionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PermissionServiceClient(metaclass=PermissionServiceClientMeta): + """Provides methods for managing permissions to PaLM API + resources. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PermissionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PermissionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PermissionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def permission_path( + tuned_model: str, + permission: str, + ) -> str: + """Returns a fully-qualified permission string.""" + return "tunedModels/{tuned_model}/permissions/{permission}".format( + tuned_model=tuned_model, + permission=permission, + ) + + @staticmethod + def parse_permission_path(path: str) -> Dict[str, str]: + """Parses a permission path into its component segments.""" + m = re.match( + r"^tunedModels/(?P.+?)/permissions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PermissionServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PermissionServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PermissionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PermissionServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PermissionServiceTransport, + Callable[..., PermissionServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the permission service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PermissionServiceTransport,Callable[..., PermissionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PermissionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PermissionServiceClient._read_environment_variables() + self._client_cert_source = PermissionServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PermissionServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PermissionServiceTransport) + if transport_provided: + # transport is a PermissionServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PermissionServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PermissionServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PermissionServiceTransport], + Callable[..., PermissionServiceTransport], + ] = ( + PermissionServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PermissionServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.PermissionServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "credentialsType": None, + }, + ) + + def create_permission( + self, + request: Optional[ + Union[permission_service.CreatePermissionRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + permission: Optional[gag_permission.Permission] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_permission.Permission: + r"""Create a permission to a specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreatePermissionRequest, dict]): + The request object. Request to create a ``Permission``. + parent (str): + Required. The parent resource of the ``Permission``. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + permission (google.ai.generativelanguage_v1alpha.types.Permission): + Required. The permission to create. + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, permission]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.CreatePermissionRequest): + request = permission_service.CreatePermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if permission is not None: + request.permission = permission + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_permission( + self, + request: Optional[Union[permission_service.GetPermissionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission.Permission: + r"""Gets information about a specific Permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = client.get_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetPermissionRequest, dict]): + The request object. Request for getting information about a specific + ``Permission``. + name (str): + Required. The resource name of the permission. + + Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.GetPermissionRequest): + request = permission_service.GetPermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_permissions( + self, + request: Optional[ + Union[permission_service.ListPermissionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPermissionsPager: + r"""Lists permissions for the specific resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_permissions(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListPermissionsRequest, dict]): + The request object. Request for listing permissions. + parent (str): + Required. The parent resource of the permissions. + Formats: ``tunedModels/{tuned_model}`` + ``corpora/{corpus}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.permission_service.pagers.ListPermissionsPager: + Response from ListPermissions containing a paginated list of + permissions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.ListPermissionsRequest): + request = permission_service.ListPermissionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPermissionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_permission( + self, + request: Optional[ + Union[permission_service.UpdatePermissionRequest, dict] + ] = None, + *, + permission: Optional[gag_permission.Permission] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_permission.Permission: + r"""Updates the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_update_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdatePermissionRequest( + ) + + # Make the request + response = client.update_permission(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.UpdatePermissionRequest, dict]): + The request object. Request to update the ``Permission``. + permission (google.ai.generativelanguage_v1alpha.types.Permission): + Required. The permission to update. + + The permission's ``name`` field is used to identify the + permission to update. + + This corresponds to the ``permission`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Accepted ones: + + - role (``Permission.role`` field) + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([permission, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.UpdatePermissionRequest): + request = permission_service.UpdatePermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if permission is not None: + request.permission = permission + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("permission.name", request.permission.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_permission( + self, + request: Optional[ + Union[permission_service.DeletePermissionRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the permission. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + client.delete_permission(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeletePermissionRequest, dict]): + The request object. Request to delete the ``Permission``. + name (str): + Required. The resource name of the permission. Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.DeletePermissionRequest): + request = permission_service.DeletePermissionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_permission] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def transfer_ownership( + self, + request: Optional[ + Union[permission_service.TransferOwnershipRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission_service.TransferOwnershipResponse: + r"""Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = client.transfer_ownership(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.TransferOwnershipRequest, dict]): + The request object. Request to transfer the ownership of + the tuned model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.TransferOwnershipResponse: + Response from TransferOwnership. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, permission_service.TransferOwnershipRequest): + request = permission_service.TransferOwnershipRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.transfer_ownership] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PermissionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PermissionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/pagers.py new file mode 100644 index 000000000000..939d51890314 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/pagers.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ai.generativelanguage_v1alpha.types import permission, permission_service + + +class ListPermissionsPager: + """A pager for iterating through ``list_permissions`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListPermissionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``permissions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPermissions`` requests and continue to iterate + through the ``permissions`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListPermissionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., permission_service.ListPermissionsResponse], + request: permission_service.ListPermissionsRequest, + response: permission_service.ListPermissionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListPermissionsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListPermissionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = permission_service.ListPermissionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[permission_service.ListPermissionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[permission.Permission]: + for page in self.pages: + yield from page.permissions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPermissionsAsyncPager: + """A pager for iterating through ``list_permissions`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListPermissionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``permissions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPermissions`` requests and continue to iterate + through the ``permissions`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListPermissionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[permission_service.ListPermissionsResponse]], + request: permission_service.ListPermissionsRequest, + response: permission_service.ListPermissionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListPermissionsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListPermissionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = permission_service.ListPermissionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[permission_service.ListPermissionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[permission.Permission]: + async def async_generator(): + async for page in self.pages: + for response in page.permissions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/README.rst new file mode 100644 index 000000000000..240b1d751a3a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`PermissionServiceTransport` is the ABC for all transports. +- public child `PermissionServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `PermissionServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BasePermissionServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `PermissionServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/__init__.py new file mode 100644 index 000000000000..05310b85abe5 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PermissionServiceTransport +from .grpc import PermissionServiceGrpcTransport +from .grpc_asyncio import PermissionServiceGrpcAsyncIOTransport +from .rest import PermissionServiceRestInterceptor, PermissionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PermissionServiceTransport]] +_transport_registry["grpc"] = PermissionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PermissionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PermissionServiceRestTransport + +__all__ = ( + "PermissionServiceTransport", + "PermissionServiceGrpcTransport", + "PermissionServiceGrpcAsyncIOTransport", + "PermissionServiceRestTransport", + "PermissionServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/base.py new file mode 100644 index 000000000000..b7201aa373aa --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/base.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PermissionServiceTransport(abc.ABC): + """Abstract transport class for PermissionService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_permission: gapic_v1.method.wrap_method( + self.create_permission, + default_timeout=None, + client_info=client_info, + ), + self.get_permission: gapic_v1.method.wrap_method( + self.get_permission, + default_timeout=None, + client_info=client_info, + ), + self.list_permissions: gapic_v1.method.wrap_method( + self.list_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update_permission: gapic_v1.method.wrap_method( + self.update_permission, + default_timeout=None, + client_info=client_info, + ), + self.delete_permission: gapic_v1.method.wrap_method( + self.delete_permission, + default_timeout=None, + client_info=client_info, + ), + self.transfer_ownership: gapic_v1.method.wrap_method( + self.transfer_ownership, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], + Union[gag_permission.Permission, Awaitable[gag_permission.Permission]], + ]: + raise NotImplementedError() + + @property + def get_permission( + self, + ) -> Callable[ + [permission_service.GetPermissionRequest], + Union[permission.Permission, Awaitable[permission.Permission]], + ]: + raise NotImplementedError() + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + Union[ + permission_service.ListPermissionsResponse, + Awaitable[permission_service.ListPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], + Union[gag_permission.Permission, Awaitable[gag_permission.Permission]], + ]: + raise NotImplementedError() + + @property + def delete_permission( + self, + ) -> Callable[ + [permission_service.DeletePermissionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + Union[ + permission_service.TransferOwnershipResponse, + Awaitable[permission_service.TransferOwnershipResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PermissionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/grpc.py new file mode 100644 index 000000000000..c5f22f7aa880 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/grpc.py @@ -0,0 +1,541 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +from .base import DEFAULT_CLIENT_INFO, PermissionServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PermissionServiceGrpcTransport(PermissionServiceTransport): + """gRPC backend transport for PermissionService. + + Provides methods for managing permissions to PaLM API + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], gag_permission.Permission + ]: + r"""Return a callable for the create permission method over gRPC. + + Create a permission to a specific resource. + + Returns: + Callable[[~.CreatePermissionRequest], + ~.Permission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_permission" not in self._stubs: + self._stubs["create_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/CreatePermission", + request_serializer=permission_service.CreatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["create_permission"] + + @property + def get_permission( + self, + ) -> Callable[[permission_service.GetPermissionRequest], permission.Permission]: + r"""Return a callable for the get permission method over gRPC. + + Gets information about a specific Permission. + + Returns: + Callable[[~.GetPermissionRequest], + ~.Permission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_permission" not in self._stubs: + self._stubs["get_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/GetPermission", + request_serializer=permission_service.GetPermissionRequest.serialize, + response_deserializer=permission.Permission.deserialize, + ) + return self._stubs["get_permission"] + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + permission_service.ListPermissionsResponse, + ]: + r"""Return a callable for the list permissions method over gRPC. + + Lists permissions for the specific resource. + + Returns: + Callable[[~.ListPermissionsRequest], + ~.ListPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_permissions" not in self._stubs: + self._stubs["list_permissions"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/ListPermissions", + request_serializer=permission_service.ListPermissionsRequest.serialize, + response_deserializer=permission_service.ListPermissionsResponse.deserialize, + ) + return self._stubs["list_permissions"] + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], gag_permission.Permission + ]: + r"""Return a callable for the update permission method over gRPC. + + Updates the permission. + + Returns: + Callable[[~.UpdatePermissionRequest], + ~.Permission]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_permission" not in self._stubs: + self._stubs["update_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/UpdatePermission", + request_serializer=permission_service.UpdatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["update_permission"] + + @property + def delete_permission( + self, + ) -> Callable[[permission_service.DeletePermissionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete permission method over gRPC. + + Deletes the permission. + + Returns: + Callable[[~.DeletePermissionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_permission" not in self._stubs: + self._stubs["delete_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/DeletePermission", + request_serializer=permission_service.DeletePermissionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_permission"] + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + permission_service.TransferOwnershipResponse, + ]: + r"""Return a callable for the transfer ownership method over gRPC. + + Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + Returns: + Callable[[~.TransferOwnershipRequest], + ~.TransferOwnershipResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_ownership" not in self._stubs: + self._stubs["transfer_ownership"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/TransferOwnership", + request_serializer=permission_service.TransferOwnershipRequest.serialize, + response_deserializer=permission_service.TransferOwnershipResponse.deserialize, + ) + return self._stubs["transfer_ownership"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PermissionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..134d9d327043 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/grpc_asyncio.py @@ -0,0 +1,604 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +from .base import DEFAULT_CLIENT_INFO, PermissionServiceTransport +from .grpc import PermissionServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PermissionServiceGrpcAsyncIOTransport(PermissionServiceTransport): + """gRPC AsyncIO backend transport for PermissionService. + + Provides methods for managing permissions to PaLM API + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], + Awaitable[gag_permission.Permission], + ]: + r"""Return a callable for the create permission method over gRPC. + + Create a permission to a specific resource. + + Returns: + Callable[[~.CreatePermissionRequest], + Awaitable[~.Permission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_permission" not in self._stubs: + self._stubs["create_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/CreatePermission", + request_serializer=permission_service.CreatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["create_permission"] + + @property + def get_permission( + self, + ) -> Callable[ + [permission_service.GetPermissionRequest], Awaitable[permission.Permission] + ]: + r"""Return a callable for the get permission method over gRPC. + + Gets information about a specific Permission. + + Returns: + Callable[[~.GetPermissionRequest], + Awaitable[~.Permission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_permission" not in self._stubs: + self._stubs["get_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/GetPermission", + request_serializer=permission_service.GetPermissionRequest.serialize, + response_deserializer=permission.Permission.deserialize, + ) + return self._stubs["get_permission"] + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + Awaitable[permission_service.ListPermissionsResponse], + ]: + r"""Return a callable for the list permissions method over gRPC. + + Lists permissions for the specific resource. + + Returns: + Callable[[~.ListPermissionsRequest], + Awaitable[~.ListPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_permissions" not in self._stubs: + self._stubs["list_permissions"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/ListPermissions", + request_serializer=permission_service.ListPermissionsRequest.serialize, + response_deserializer=permission_service.ListPermissionsResponse.deserialize, + ) + return self._stubs["list_permissions"] + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], + Awaitable[gag_permission.Permission], + ]: + r"""Return a callable for the update permission method over gRPC. + + Updates the permission. + + Returns: + Callable[[~.UpdatePermissionRequest], + Awaitable[~.Permission]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_permission" not in self._stubs: + self._stubs["update_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/UpdatePermission", + request_serializer=permission_service.UpdatePermissionRequest.serialize, + response_deserializer=gag_permission.Permission.deserialize, + ) + return self._stubs["update_permission"] + + @property + def delete_permission( + self, + ) -> Callable[ + [permission_service.DeletePermissionRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete permission method over gRPC. + + Deletes the permission. + + Returns: + Callable[[~.DeletePermissionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_permission" not in self._stubs: + self._stubs["delete_permission"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/DeletePermission", + request_serializer=permission_service.DeletePermissionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_permission"] + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + Awaitable[permission_service.TransferOwnershipResponse], + ]: + r"""Return a callable for the transfer ownership method over gRPC. + + Transfers ownership of the tuned model. + This is the only way to change ownership of the tuned + model. The current owner will be downgraded to writer + role. + + Returns: + Callable[[~.TransferOwnershipRequest], + Awaitable[~.TransferOwnershipResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "transfer_ownership" not in self._stubs: + self._stubs["transfer_ownership"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PermissionService/TransferOwnership", + request_serializer=permission_service.TransferOwnershipRequest.serialize, + response_deserializer=permission_service.TransferOwnershipResponse.deserialize, + ) + return self._stubs["transfer_ownership"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_permission: self._wrap_method( + self.create_permission, + default_timeout=None, + client_info=client_info, + ), + self.get_permission: self._wrap_method( + self.get_permission, + default_timeout=None, + client_info=client_info, + ), + self.list_permissions: self._wrap_method( + self.list_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update_permission: self._wrap_method( + self.update_permission, + default_timeout=None, + client_info=client_info, + ), + self.delete_permission: self._wrap_method( + self.delete_permission, + default_timeout=None, + client_info=client_info, + ), + self.transfer_ownership: self._wrap_method( + self.transfer_ownership, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("PermissionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/rest.py new file mode 100644 index 000000000000..4043914bb97a --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/rest.py @@ -0,0 +1,1812 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BasePermissionServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class PermissionServiceRestInterceptor: + """Interceptor for PermissionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PermissionServiceRestTransport. + + .. code-block:: python + class MyCustomPermissionServiceInterceptor(PermissionServiceRestInterceptor): + def pre_create_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_permission(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_permission(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_transfer_ownership(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_transfer_ownership(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_permission(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_permission(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PermissionServiceRestTransport(interceptor=MyCustomPermissionServiceInterceptor()) + client = PermissionServiceClient(transport=transport) + + + """ + + def pre_create_permission( + self, + request: permission_service.CreatePermissionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.CreatePermissionRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_create_permission( + self, response: gag_permission.Permission + ) -> gag_permission.Permission: + """Post-rpc interceptor for create_permission + + DEPRECATED. Please use the `post_create_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. This `post_create_permission` interceptor runs + before the `post_create_permission_with_metadata` interceptor. + """ + return response + + def post_create_permission_with_metadata( + self, + response: gag_permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_create_permission_with_metadata` + interceptor in new development instead of the `post_create_permission` interceptor. + When both interceptors are used, this `post_create_permission_with_metadata` interceptor runs after the + `post_create_permission` interceptor. The (possibly modified) response returned by + `post_create_permission` will be passed to + `post_create_permission_with_metadata`. + """ + return response, metadata + + def pre_delete_permission( + self, + request: permission_service.DeletePermissionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.DeletePermissionRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def pre_get_permission( + self, + request: permission_service.GetPermissionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.GetPermissionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_get_permission( + self, response: permission.Permission + ) -> permission.Permission: + """Post-rpc interceptor for get_permission + + DEPRECATED. Please use the `post_get_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. This `post_get_permission` interceptor runs + before the `post_get_permission_with_metadata` interceptor. + """ + return response + + def post_get_permission_with_metadata( + self, + response: permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_get_permission_with_metadata` + interceptor in new development instead of the `post_get_permission` interceptor. + When both interceptors are used, this `post_get_permission_with_metadata` interceptor runs after the + `post_get_permission` interceptor. The (possibly modified) response returned by + `post_get_permission` will be passed to + `post_get_permission_with_metadata`. + """ + return response, metadata + + def pre_list_permissions( + self, + request: permission_service.ListPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.ListPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_list_permissions( + self, response: permission_service.ListPermissionsResponse + ) -> permission_service.ListPermissionsResponse: + """Post-rpc interceptor for list_permissions + + DEPRECATED. Please use the `post_list_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. This `post_list_permissions` interceptor runs + before the `post_list_permissions_with_metadata` interceptor. + """ + return response + + def post_list_permissions_with_metadata( + self, + response: permission_service.ListPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.ListPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_list_permissions_with_metadata` + interceptor in new development instead of the `post_list_permissions` interceptor. + When both interceptors are used, this `post_list_permissions_with_metadata` interceptor runs after the + `post_list_permissions` interceptor. The (possibly modified) response returned by + `post_list_permissions` will be passed to + `post_list_permissions_with_metadata`. + """ + return response, metadata + + def pre_transfer_ownership( + self, + request: permission_service.TransferOwnershipRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.TransferOwnershipRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for transfer_ownership + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_transfer_ownership( + self, response: permission_service.TransferOwnershipResponse + ) -> permission_service.TransferOwnershipResponse: + """Post-rpc interceptor for transfer_ownership + + DEPRECATED. Please use the `post_transfer_ownership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. This `post_transfer_ownership` interceptor runs + before the `post_transfer_ownership_with_metadata` interceptor. + """ + return response + + def post_transfer_ownership_with_metadata( + self, + response: permission_service.TransferOwnershipResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.TransferOwnershipResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for transfer_ownership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_transfer_ownership_with_metadata` + interceptor in new development instead of the `post_transfer_ownership` interceptor. + When both interceptors are used, this `post_transfer_ownership_with_metadata` interceptor runs after the + `post_transfer_ownership` interceptor. The (possibly modified) response returned by + `post_transfer_ownership` will be passed to + `post_transfer_ownership_with_metadata`. + """ + return response, metadata + + def pre_update_permission( + self, + request: permission_service.UpdatePermissionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.UpdatePermissionRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_permission + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_update_permission( + self, response: gag_permission.Permission + ) -> gag_permission.Permission: + """Post-rpc interceptor for update_permission + + DEPRECATED. Please use the `post_update_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. This `post_update_permission` interceptor runs + before the `post_update_permission_with_metadata` interceptor. + """ + return response + + def post_update_permission_with_metadata( + self, + response: gag_permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_update_permission_with_metadata` + interceptor in new development instead of the `post_update_permission` interceptor. + When both interceptors are used, this `post_update_permission_with_metadata` interceptor runs after the + `post_update_permission` interceptor. The (possibly modified) response returned by + `post_update_permission` will be passed to + `post_update_permission_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the PermissionService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the PermissionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PermissionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PermissionServiceRestInterceptor + + +class PermissionServiceRestTransport(_BasePermissionServiceRestTransport): + """REST backend synchronous transport for PermissionService. + + Provides methods for managing permissions to PaLM API + resources. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PermissionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PermissionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreatePermission( + _BasePermissionServiceRestTransport._BaseCreatePermission, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.CreatePermission") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: permission_service.CreatePermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_permission.Permission: + r"""Call the create permission method over HTTP. + + Args: + request (~.permission_service.CreatePermissionRequest): + The request object. Request to create a ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gag_permission.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseCreatePermission._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_permission( + request, metadata + ) + transcoded_request = _BasePermissionServiceRestTransport._BaseCreatePermission._get_transcoded_request( + http_options, request + ) + + body = _BasePermissionServiceRestTransport._BaseCreatePermission._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseCreatePermission._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.CreatePermission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "CreatePermission", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._CreatePermission._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_permission.Permission() + pb_resp = gag_permission.Permission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_permission_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gag_permission.Permission.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceClient.create_permission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "CreatePermission", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeletePermission( + _BasePermissionServiceRestTransport._BaseDeletePermission, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.DeletePermission") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: permission_service.DeletePermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete permission method over HTTP. + + Args: + request (~.permission_service.DeletePermissionRequest): + The request object. Request to delete the ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseDeletePermission._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_permission( + request, metadata + ) + transcoded_request = _BasePermissionServiceRestTransport._BaseDeletePermission._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseDeletePermission._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.DeletePermission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "DeletePermission", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._DeletePermission._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetPermission( + _BasePermissionServiceRestTransport._BaseGetPermission, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.GetPermission") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: permission_service.GetPermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission.Permission: + r"""Call the get permission method over HTTP. + + Args: + request (~.permission_service.GetPermissionRequest): + The request object. Request for getting information about a specific + ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.permission.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseGetPermission._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_permission(request, metadata) + transcoded_request = _BasePermissionServiceRestTransport._BaseGetPermission._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseGetPermission._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.GetPermission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "GetPermission", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._GetPermission._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = permission.Permission() + pb_resp = permission.Permission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_permission_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = permission.Permission.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceClient.get_permission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "GetPermission", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListPermissions( + _BasePermissionServiceRestTransport._BaseListPermissions, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.ListPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: permission_service.ListPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission_service.ListPermissionsResponse: + r"""Call the list permissions method over HTTP. + + Args: + request (~.permission_service.ListPermissionsRequest): + The request object. Request for listing permissions. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.permission_service.ListPermissionsResponse: + Response from ``ListPermissions`` containing a paginated + list of permissions. + + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseListPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_permissions( + request, metadata + ) + transcoded_request = _BasePermissionServiceRestTransport._BaseListPermissions._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseListPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.ListPermissions", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "ListPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._ListPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = permission_service.ListPermissionsResponse() + pb_resp = permission_service.ListPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_permissions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + permission_service.ListPermissionsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceClient.list_permissions", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "ListPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TransferOwnership( + _BasePermissionServiceRestTransport._BaseTransferOwnership, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.TransferOwnership") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: permission_service.TransferOwnershipRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> permission_service.TransferOwnershipResponse: + r"""Call the transfer ownership method over HTTP. + + Args: + request (~.permission_service.TransferOwnershipRequest): + The request object. Request to transfer the ownership of + the tuned model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.permission_service.TransferOwnershipResponse: + Response from ``TransferOwnership``. + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseTransferOwnership._get_http_options() + ) + + request, metadata = self._interceptor.pre_transfer_ownership( + request, metadata + ) + transcoded_request = _BasePermissionServiceRestTransport._BaseTransferOwnership._get_transcoded_request( + http_options, request + ) + + body = _BasePermissionServiceRestTransport._BaseTransferOwnership._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseTransferOwnership._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.TransferOwnership", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "TransferOwnership", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._TransferOwnership._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = permission_service.TransferOwnershipResponse() + pb_resp = permission_service.TransferOwnershipResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_transfer_ownership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_transfer_ownership_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + permission_service.TransferOwnershipResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceClient.transfer_ownership", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "TransferOwnership", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdatePermission( + _BasePermissionServiceRestTransport._BaseUpdatePermission, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.UpdatePermission") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: permission_service.UpdatePermissionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gag_permission.Permission: + r"""Call the update permission method over HTTP. + + Args: + request (~.permission_service.UpdatePermissionRequest): + The request object. Request to update the ``Permission``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gag_permission.Permission: + Permission resource grants user, + group or the rest of the world access to + the PaLM API resource (e.g. a tuned + model, corpus). + + A role is a collection of permitted + operations that allows users to perform + specific actions on PaLM API resources. + To make them available to users, groups, + or service accounts, you assign roles. + When you assign a role, you grant + permissions that the role contains. + + There are three concentric roles. Each + role is a superset of the previous + role's permitted operations: + + - reader can use the resource (e.g. + tuned model, corpus) for inference + - writer has reader's permissions and + additionally can edit and share + - owner has writer's permissions and + additionally can delete + + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseUpdatePermission._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_permission( + request, metadata + ) + transcoded_request = _BasePermissionServiceRestTransport._BaseUpdatePermission._get_transcoded_request( + http_options, request + ) + + body = _BasePermissionServiceRestTransport._BaseUpdatePermission._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseUpdatePermission._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.UpdatePermission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "UpdatePermission", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._UpdatePermission._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gag_permission.Permission() + pb_resp = gag_permission.Permission.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_permission_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gag_permission.Permission.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceClient.update_permission", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "UpdatePermission", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_permission( + self, + ) -> Callable[ + [permission_service.CreatePermissionRequest], gag_permission.Permission + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreatePermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_permission( + self, + ) -> Callable[[permission_service.DeletePermissionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_permission( + self, + ) -> Callable[[permission_service.GetPermissionRequest], permission.Permission]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetPermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_permissions( + self, + ) -> Callable[ + [permission_service.ListPermissionsRequest], + permission_service.ListPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def transfer_ownership( + self, + ) -> Callable[ + [permission_service.TransferOwnershipRequest], + permission_service.TransferOwnershipResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TransferOwnership(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_permission( + self, + ) -> Callable[ + [permission_service.UpdatePermissionRequest], gag_permission.Permission + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePermission(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BasePermissionServiceRestTransport._BaseGetOperation, PermissionServiceRestStub + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BasePermissionServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BasePermissionServiceRestTransport._BaseListOperations, + PermissionServiceRestStub, + ): + def __hash__(self): + return hash("PermissionServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BasePermissionServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BasePermissionServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePermissionServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PermissionServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PermissionServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PermissionService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PermissionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/rest_base.py new file mode 100644 index 000000000000..56e09878b023 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/permission_service/transports/rest_base.py @@ -0,0 +1,493 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +from .base import DEFAULT_CLIENT_INFO, PermissionServiceTransport + + +class _BasePermissionServiceRestTransport(PermissionServiceTransport): + """Base REST backend transport for PermissionService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreatePermission: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=tunedModels/*}/permissions", + "body": "permission", + }, + { + "method": "post", + "uri": "/v1alpha/{parent=corpora/*}/permissions", + "body": "permission", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = permission_service.CreatePermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePermissionServiceRestTransport._BaseCreatePermission._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeletePermission: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=tunedModels/*/permissions/*}", + }, + { + "method": "delete", + "uri": "/v1alpha/{name=corpora/*/permissions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = permission_service.DeletePermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePermissionServiceRestTransport._BaseDeletePermission._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetPermission: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/permissions/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=corpora/*/permissions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = permission_service.GetPermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePermissionServiceRestTransport._BaseGetPermission._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=tunedModels/*}/permissions", + }, + { + "method": "get", + "uri": "/v1alpha/{parent=corpora/*}/permissions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = permission_service.ListPermissionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePermissionServiceRestTransport._BaseListPermissions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseTransferOwnership: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=tunedModels/*}:transferOwnership", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = permission_service.TransferOwnershipRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePermissionServiceRestTransport._BaseTransferOwnership._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdatePermission: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{permission.name=tunedModels/*/permissions/*}", + "body": "permission", + }, + { + "method": "patch", + "uri": "/v1alpha/{permission.name=corpora/*/permissions/*}", + "body": "permission", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = permission_service.UpdatePermissionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePermissionServiceRestTransport._BaseUpdatePermission._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BasePermissionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/__init__.py new file mode 100644 index 000000000000..6c64cf5ad1c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PredictionServiceAsyncClient +from .client import PredictionServiceClient + +__all__ = ( + "PredictionServiceClient", + "PredictionServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/async_client.py new file mode 100644 index 000000000000..fe4f615d5413 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/async_client.py @@ -0,0 +1,535 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import prediction_service + +from .client import PredictionServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class PredictionServiceAsyncClient: + """A service for online predictions and explanations.""" + + _client: PredictionServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(PredictionServiceClient.model_path) + parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + PredictionServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PredictionServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + PredictionServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + PredictionServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + PredictionServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(PredictionServiceClient.common_project_path) + parse_common_project_path = staticmethod( + PredictionServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(PredictionServiceClient.common_location_path) + parse_common_location_path = staticmethod( + PredictionServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceAsyncClient: The constructed client. + """ + return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PredictionServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PredictionServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.PredictionServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "credentialsType": None, + }, + ) + + async def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_predict(): + # Create a client + client = generativelanguage_v1alpha.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1alpha.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1alpha.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.PredictRequest, dict]]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1alpha.PredictionService.Predict]. + model (:class:`str`): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (:class:`MutableSequence[google.protobuf.struct_pb2.Value]`): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PredictionServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/client.py new file mode 100644 index 000000000000..229e599a4943 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/client.py @@ -0,0 +1,966 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import prediction_service + +from .transports.base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .transports.grpc import PredictionServiceGrpcTransport +from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .transports.rest import PredictionServiceRestTransport + + +class PredictionServiceClientMeta(type): + """Metaclass for the PredictionService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[PredictionServiceTransport]] + _transport_registry["grpc"] = PredictionServiceGrpcTransport + _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport + _transport_registry["rest"] = PredictionServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PredictionServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PredictionServiceClient(metaclass=PredictionServiceClientMeta): + """A service for online predictions and explanations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PredictionServiceTransport: + """Returns the transport used by the client instance. + + Returns: + PredictionServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + PredictionServiceTransport, + Callable[..., PredictionServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the prediction service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PredictionServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PredictionServiceClient._read_environment_variables() + self._client_cert_source = PredictionServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PredictionServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PredictionServiceTransport) + if transport_provided: + # transport is a PredictionServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PredictionServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or PredictionServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PredictionServiceTransport], + Callable[..., PredictionServiceTransport], + ] = ( + PredictionServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PredictionServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.PredictionServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "credentialsType": None, + }, + ) + + def predict( + self, + request: Optional[Union[prediction_service.PredictRequest, dict]] = None, + *, + model: Optional[str] = None, + instances: Optional[MutableSequence[struct_pb2.Value]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> prediction_service.PredictResponse: + r"""Performs a prediction request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_predict(): + # Create a client + client = generativelanguage_v1alpha.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1alpha.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1alpha.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.PredictRequest, dict]): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1alpha.PredictionService.Predict]. + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the + input to the prediction call. + + This corresponds to the ``instances`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.PredictResponse: + Response message for [PredictionService.Predict]. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, instances]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, prediction_service.PredictRequest): + request = prediction_service.PredictRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if instances is not None: + request.instances.extend(instances) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.predict] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PredictionServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PredictionServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/README.rst new file mode 100644 index 000000000000..504aaca0a144 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`PredictionServiceTransport` is the ABC for all transports. +- public child `PredictionServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `PredictionServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BasePredictionServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `PredictionServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/__init__.py new file mode 100644 index 000000000000..d6d645ba1ff1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport +from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport +from .rest import PredictionServiceRestInterceptor, PredictionServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] +_transport_registry["grpc"] = PredictionServiceGrpcTransport +_transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport +_transport_registry["rest"] = PredictionServiceRestTransport + +__all__ = ( + "PredictionServiceTransport", + "PredictionServiceGrpcTransport", + "PredictionServiceGrpcAsyncIOTransport", + "PredictionServiceRestTransport", + "PredictionServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/base.py new file mode 100644 index 000000000000..e3b9053178c3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/base.py @@ -0,0 +1,196 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import prediction_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PredictionServiceTransport(abc.ABC): + """Abstract transport class for PredictionService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.predict: gapic_v1.method.wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Union[ + prediction_service.PredictResponse, + Awaitable[prediction_service.PredictResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PredictionServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/grpc.py new file mode 100644 index 000000000000..dd72b9c0b00e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/grpc.py @@ -0,0 +1,396 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PredictionServiceGrpcTransport(PredictionServiceTransport): + """gRPC backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + ~.PredictResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PredictionServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..cbab29697995 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/grpc_asyncio.py @@ -0,0 +1,429 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport +from .grpc import PredictionServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): + """gRPC AsyncIO backend transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], + Awaitable[prediction_service.PredictResponse], + ]: + r"""Return a callable for the predict method over gRPC. + + Performs a prediction request. + + Returns: + Callable[[~.PredictRequest], + Awaitable[~.PredictResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "predict" not in self._stubs: + self._stubs["predict"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.PredictionService/Predict", + request_serializer=prediction_service.PredictRequest.serialize, + response_deserializer=prediction_service.PredictResponse.deserialize, + ) + return self._stubs["predict"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.predict: self._wrap_method( + self.predict, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("PredictionServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/rest.py new file mode 100644 index 000000000000..3b01905389b6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/rest.py @@ -0,0 +1,729 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BasePredictionServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class PredictionServiceRestInterceptor: + """Interceptor for PredictionService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PredictionServiceRestTransport. + + .. code-block:: python + class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): + def pre_predict(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_predict(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) + client = PredictionServiceClient(transport=transport) + + + """ + + def pre_predict( + self, + request: prediction_service.PredictRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + prediction_service.PredictRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for predict + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_predict( + self, response: prediction_service.PredictResponse + ) -> prediction_service.PredictResponse: + """Post-rpc interceptor for predict + + DEPRECATED. Please use the `post_predict_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. This `post_predict` interceptor runs + before the `post_predict_with_metadata` interceptor. + """ + return response + + def post_predict_with_metadata( + self, + response: prediction_service.PredictResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + prediction_service.PredictResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for predict + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PredictionService server but before it is returned to user code. + + We recommend only using this `post_predict_with_metadata` + interceptor in new development instead of the `post_predict` interceptor. + When both interceptors are used, this `post_predict_with_metadata` interceptor runs after the + `post_predict` interceptor. The (possibly modified) response returned by + `post_predict` will be passed to + `post_predict_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the PredictionService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the PredictionService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PredictionServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PredictionServiceRestInterceptor + + +class PredictionServiceRestTransport(_BasePredictionServiceRestTransport): + """REST backend synchronous transport for PredictionService. + + A service for online predictions and explanations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PredictionServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PredictionServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Predict( + _BasePredictionServiceRestTransport._BasePredict, PredictionServiceRestStub + ): + def __hash__(self): + return hash("PredictionServiceRestTransport.Predict") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: prediction_service.PredictRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> prediction_service.PredictResponse: + r"""Call the predict method over HTTP. + + Args: + request (~.prediction_service.PredictRequest): + The request object. Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1alpha.PredictionService.Predict]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.prediction_service.PredictResponse: + Response message for [PredictionService.Predict]. + """ + + http_options = ( + _BasePredictionServiceRestTransport._BasePredict._get_http_options() + ) + + request, metadata = self._interceptor.pre_predict(request, metadata) + transcoded_request = _BasePredictionServiceRestTransport._BasePredict._get_transcoded_request( + http_options, request + ) + + body = ( + _BasePredictionServiceRestTransport._BasePredict._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BasePredictionServiceRestTransport._BasePredict._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PredictionServiceClient.Predict", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": "Predict", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PredictionServiceRestTransport._Predict._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = prediction_service.PredictResponse() + pb_resp = prediction_service.PredictResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_predict(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_predict_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = prediction_service.PredictResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PredictionServiceClient.predict", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": "Predict", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def predict( + self, + ) -> Callable[ + [prediction_service.PredictRequest], prediction_service.PredictResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Predict(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BasePredictionServiceRestTransport._BaseGetOperation, PredictionServiceRestStub + ): + def __hash__(self): + return hash("PredictionServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BasePredictionServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BasePredictionServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePredictionServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PredictionServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PredictionServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PredictionServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BasePredictionServiceRestTransport._BaseListOperations, + PredictionServiceRestStub, + ): + def __hash__(self): + return hash("PredictionServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BasePredictionServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BasePredictionServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePredictionServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.PredictionServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PredictionServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.PredictionServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.PredictionService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PredictionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/rest_base.py new file mode 100644 index 000000000000..8fb2d91436b3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/prediction_service/transports/rest_base.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import prediction_service + +from .base import DEFAULT_CLIENT_INFO, PredictionServiceTransport + + +class _BasePredictionServiceRestTransport(PredictionServiceTransport): + """Base REST backend transport for PredictionService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BasePredict: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:predict", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = prediction_service.PredictRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePredictionServiceRestTransport._BasePredict._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BasePredictionServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/__init__.py new file mode 100644 index 000000000000..8c3e3ba77075 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import RetrieverServiceAsyncClient +from .client import RetrieverServiceClient + +__all__ = ( + "RetrieverServiceClient", + "RetrieverServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/async_client.py new file mode 100644 index 000000000000..e04f95549d7b --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/async_client.py @@ -0,0 +1,2499 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.retriever_service import pagers +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +from .client import RetrieverServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport +from .transports.grpc_asyncio import RetrieverServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class RetrieverServiceAsyncClient: + """An API for semantic search over a corpus of user uploaded + content. + """ + + _client: RetrieverServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = RetrieverServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = RetrieverServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = RetrieverServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = RetrieverServiceClient._DEFAULT_UNIVERSE + + chunk_path = staticmethod(RetrieverServiceClient.chunk_path) + parse_chunk_path = staticmethod(RetrieverServiceClient.parse_chunk_path) + corpus_path = staticmethod(RetrieverServiceClient.corpus_path) + parse_corpus_path = staticmethod(RetrieverServiceClient.parse_corpus_path) + document_path = staticmethod(RetrieverServiceClient.document_path) + parse_document_path = staticmethod(RetrieverServiceClient.parse_document_path) + common_billing_account_path = staticmethod( + RetrieverServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + RetrieverServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(RetrieverServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + RetrieverServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + RetrieverServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + RetrieverServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(RetrieverServiceClient.common_project_path) + parse_common_project_path = staticmethod( + RetrieverServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(RetrieverServiceClient.common_location_path) + parse_common_location_path = staticmethod( + RetrieverServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceAsyncClient: The constructed client. + """ + return RetrieverServiceClient.from_service_account_info.__func__(RetrieverServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceAsyncClient: The constructed client. + """ + return RetrieverServiceClient.from_service_account_file.__func__(RetrieverServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return RetrieverServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> RetrieverServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RetrieverServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = RetrieverServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, RetrieverServiceTransport, Callable[..., RetrieverServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the retriever service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RetrieverServiceTransport,Callable[..., RetrieverServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RetrieverServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = RetrieverServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "credentialsType": None, + }, + ) + + async def create_corpus( + self, + request: Optional[Union[retriever_service.CreateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Creates an empty ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCorpusRequest( + ) + + # Make the request + response = await client.create_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreateCorpusRequest, dict]]): + The request object. Request to create a ``Corpus``. + corpus (:class:`google.ai.generativelanguage_v1alpha.types.Corpus`): + Required. The ``Corpus`` to create. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.CreateCorpusRequest): + request = retriever_service.CreateCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_corpus + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_corpus( + self, + request: Optional[Union[retriever_service.GetCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Gets information about a specific ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetCorpusRequest, dict]]): + The request object. Request for getting information about a specific + ``Corpus``. + name (:class:`str`): + Required. The name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.GetCorpusRequest): + request = retriever_service.GetCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_corpus + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_corpus( + self, + request: Optional[Union[retriever_service.UpdateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Updates a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_update_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCorpusRequest( + ) + + # Make the request + response = await client.update_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.UpdateCorpusRequest, dict]]): + The request object. Request to update a ``Corpus``. + corpus (:class:`google.ai.generativelanguage_v1alpha.types.Corpus`): + Required. The ``Corpus`` to update. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, this + only supports updating ``display_name``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.UpdateCorpusRequest): + request = retriever_service.UpdateCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_corpus + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("corpus.name", request.corpus.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_corpus( + self, + request: Optional[Union[retriever_service.DeleteCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + await client.delete_corpus(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeleteCorpusRequest, dict]]): + The request object. Request to delete a ``Corpus``. + name (:class:`str`): + Required. The resource name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.DeleteCorpusRequest): + request = retriever_service.DeleteCorpusRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_corpus + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_corpora( + self, + request: Optional[Union[retriever_service.ListCorporaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListCorporaAsyncPager: + r"""Lists all ``Corpora`` owned by the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_corpora(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListCorporaRequest, dict]]): + The request object. Request for listing ``Corpora``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListCorporaAsyncPager: + Response from ListCorpora containing a paginated list of Corpora. + The results are sorted by ascending + corpus.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.ListCorporaRequest): + request = retriever_service.ListCorporaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_corpora + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListCorporaAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def query_corpus( + self, + request: Optional[Union[retriever_service.QueryCorpusRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.QueryCorpusResponse: + r"""Performs semantic search over a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_query_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.QueryCorpusRequest, dict]]): + The request object. Request for querying a ``Corpus``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.QueryCorpusResponse: + Response from QueryCorpus containing a list of relevant + chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.QueryCorpusRequest): + request = retriever_service.QueryCorpusRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.query_corpus + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_document( + self, + request: Optional[Union[retriever_service.CreateDocumentRequest, dict]] = None, + *, + parent: Optional[str] = None, + document: Optional[retriever.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Creates an empty ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreateDocumentRequest, dict]]): + The request object. Request to create a ``Document``. + parent (:class:`str`): + Required. The name of the ``Corpus`` where this + ``Document`` will be created. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document (:class:`google.ai.generativelanguage_v1alpha.types.Document`): + Required. The ``Document`` to create. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.CreateDocumentRequest): + request = retriever_service.CreateDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_document + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_document( + self, + request: Optional[Union[retriever_service.GetDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Gets information about a specific ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetDocumentRequest, dict]]): + The request object. Request for getting information about a specific + ``Document``. + name (:class:`str`): + Required. The name of the ``Document`` to retrieve. + Example: ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.GetDocumentRequest): + request = retriever_service.GetDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_document + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_document( + self, + request: Optional[Union[retriever_service.UpdateDocumentRequest, dict]] = None, + *, + document: Optional[retriever.Document] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Updates a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_update_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.UpdateDocumentRequest, dict]]): + The request object. Request to update a ``Document``. + document (:class:`google.ai.generativelanguage_v1alpha.types.Document`): + Required. The ``Document`` to update. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, this + only supports updating ``display_name`` and + ``custom_metadata``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.UpdateDocumentRequest): + request = retriever_service.UpdateDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_document + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("document.name", request.document.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_document( + self, + request: Optional[Union[retriever_service.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeleteDocumentRequest, dict]]): + The request object. Request to delete a ``Document``. + name (:class:`str`): + Required. The resource name of the ``Document`` to + delete. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.DeleteDocumentRequest): + request = retriever_service.DeleteDocumentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_document + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_documents( + self, + request: Optional[Union[retriever_service.ListDocumentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDocumentsAsyncPager: + r"""Lists all ``Document``\ s in a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_documents(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListDocumentsRequest, dict]]): + The request object. Request for listing ``Document``\ s. + parent (:class:`str`): + Required. The name of the ``Corpus`` containing + ``Document``\ s. Example: ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListDocumentsAsyncPager: + Response from ListDocuments containing a paginated list of Documents. + The Documents are sorted by ascending + document.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.ListDocumentsRequest): + request = retriever_service.ListDocumentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_documents + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDocumentsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def query_document( + self, + request: Optional[Union[retriever_service.QueryDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.QueryDocumentResponse: + r"""Performs semantic search over a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_query_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_document(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.QueryDocumentRequest, dict]]): + The request object. Request for querying a ``Document``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.QueryDocumentResponse: + Response from QueryDocument containing a list of + relevant chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.QueryDocumentRequest): + request = retriever_service.QueryDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.query_document + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_chunk( + self, + request: Optional[Union[retriever_service.CreateChunkRequest, dict]] = None, + *, + parent: Optional[str] = None, + chunk: Optional[retriever.Chunk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Creates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_create_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = await client.create_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CreateChunkRequest, dict]]): + The request object. Request to create a ``Chunk``. + parent (:class:`str`): + Required. The name of the ``Document`` where this + ``Chunk`` will be created. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + chunk (:class:`google.ai.generativelanguage_v1alpha.types.Chunk`): + Required. The ``Chunk`` to create. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, chunk]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.CreateChunkRequest): + request = retriever_service.CreateChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if chunk is not None: + request.chunk = chunk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_chunk + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_create_chunks( + self, + request: Optional[ + Union[retriever_service.BatchCreateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.BatchCreateChunksResponse: + r"""Batch create ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_create_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.BatchCreateChunksRequest, dict]]): + The request object. Request to batch create ``Chunk``\ s. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchCreateChunksResponse: + Response from BatchCreateChunks containing a list of + created Chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.BatchCreateChunksRequest): + request = retriever_service.BatchCreateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_create_chunks + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_chunk( + self, + request: Optional[Union[retriever_service.GetChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Gets information about a specific ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_get_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GetChunkRequest, dict]]): + The request object. Request for getting information about a specific + ``Chunk``. + name (:class:`str`): + Required. The name of the ``Chunk`` to retrieve. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.GetChunkRequest): + request = retriever_service.GetChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_chunk + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_chunk( + self, + request: Optional[Union[retriever_service.UpdateChunkRequest, dict]] = None, + *, + chunk: Optional[retriever.Chunk] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Updates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_update_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = await client.update_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.UpdateChunkRequest, dict]]): + The request object. Request to update a ``Chunk``. + chunk (:class:`google.ai.generativelanguage_v1alpha.types.Chunk`): + Required. The ``Chunk`` to update. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Currently, this + only supports updating ``custom_metadata`` and ``data``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([chunk, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.UpdateChunkRequest): + request = retriever_service.UpdateChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if chunk is not None: + request.chunk = chunk + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_chunk + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("chunk.name", request.chunk.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_update_chunks( + self, + request: Optional[ + Union[retriever_service.BatchUpdateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.BatchUpdateChunksResponse: + r"""Batch update ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_update_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksRequest, dict]]): + The request object. Request to batch update ``Chunk``\ s. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksResponse: + Response from BatchUpdateChunks containing a list of + updated Chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.BatchUpdateChunksRequest): + request = retriever_service.BatchUpdateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_update_chunks + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_chunk( + self, + request: Optional[Union[retriever_service.DeleteChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + await client.delete_chunk(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.DeleteChunkRequest, dict]]): + The request object. Request to delete a ``Chunk``. + name (:class:`str`): + Required. The resource name of the ``Chunk`` to delete. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.DeleteChunkRequest): + request = retriever_service.DeleteChunkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_chunk + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def batch_delete_chunks( + self, + request: Optional[ + Union[retriever_service.BatchDeleteChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Batch delete ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1alpha.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + await client.batch_delete_chunks(request=request) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.BatchDeleteChunksRequest, dict]]): + The request object. Request to batch delete ``Chunk``\ s. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.BatchDeleteChunksRequest): + request = retriever_service.BatchDeleteChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_delete_chunks + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_chunks( + self, + request: Optional[Union[retriever_service.ListChunksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChunksAsyncPager: + r"""Lists all ``Chunk``\ s in a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_list_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.ListChunksRequest, dict]]): + The request object. Request for listing ``Chunk``\ s. + parent (:class:`str`): + Required. The name of the ``Document`` containing + ``Chunk``\ s. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListChunksAsyncPager: + Response from ListChunks containing a paginated list of Chunks. + The Chunks are sorted by ascending chunk.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.ListChunksRequest): + request = retriever_service.ListChunksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_chunks + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChunksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "RetrieverServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RetrieverServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/client.py new file mode 100644 index 000000000000..d36942c0641e --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/client.py @@ -0,0 +1,2912 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.retriever_service import pagers +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +from .transports.base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport +from .transports.grpc import RetrieverServiceGrpcTransport +from .transports.grpc_asyncio import RetrieverServiceGrpcAsyncIOTransport +from .transports.rest import RetrieverServiceRestTransport + + +class RetrieverServiceClientMeta(type): + """Metaclass for the RetrieverService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[RetrieverServiceTransport]] + _transport_registry["grpc"] = RetrieverServiceGrpcTransport + _transport_registry["grpc_asyncio"] = RetrieverServiceGrpcAsyncIOTransport + _transport_registry["rest"] = RetrieverServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RetrieverServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RetrieverServiceClient(metaclass=RetrieverServiceClientMeta): + """An API for semantic search over a corpus of user uploaded + content. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RetrieverServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RetrieverServiceTransport: + """Returns the transport used by the client instance. + + Returns: + RetrieverServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def chunk_path( + corpus: str, + document: str, + chunk: str, + ) -> str: + """Returns a fully-qualified chunk string.""" + return "corpora/{corpus}/documents/{document}/chunks/{chunk}".format( + corpus=corpus, + document=document, + chunk=chunk, + ) + + @staticmethod + def parse_chunk_path(path: str) -> Dict[str, str]: + """Parses a chunk path into its component segments.""" + m = re.match( + r"^corpora/(?P.+?)/documents/(?P.+?)/chunks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def corpus_path( + corpus: str, + ) -> str: + """Returns a fully-qualified corpus string.""" + return "corpora/{corpus}".format( + corpus=corpus, + ) + + @staticmethod + def parse_corpus_path(path: str) -> Dict[str, str]: + """Parses a corpus path into its component segments.""" + m = re.match(r"^corpora/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def document_path( + corpus: str, + document: str, + ) -> str: + """Returns a fully-qualified document string.""" + return "corpora/{corpus}/documents/{document}".format( + corpus=corpus, + document=document, + ) + + @staticmethod + def parse_document_path(path: str) -> Dict[str, str]: + """Parses a document path into its component segments.""" + m = re.match(r"^corpora/(?P.+?)/documents/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RetrieverServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RetrieverServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = RetrieverServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RetrieverServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, RetrieverServiceTransport, Callable[..., RetrieverServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the retriever service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RetrieverServiceTransport,Callable[..., RetrieverServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RetrieverServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = RetrieverServiceClient._read_environment_variables() + self._client_cert_source = RetrieverServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = RetrieverServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RetrieverServiceTransport) + if transport_provided: + # transport is a RetrieverServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(RetrieverServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RetrieverServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RetrieverServiceTransport], + Callable[..., RetrieverServiceTransport], + ] = ( + RetrieverServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RetrieverServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.RetrieverServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "credentialsType": None, + }, + ) + + def create_corpus( + self, + request: Optional[Union[retriever_service.CreateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Creates an empty ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCorpusRequest( + ) + + # Make the request + response = client.create_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreateCorpusRequest, dict]): + The request object. Request to create a ``Corpus``. + corpus (google.ai.generativelanguage_v1alpha.types.Corpus): + Required. The ``Corpus`` to create. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.CreateCorpusRequest): + request = retriever_service.CreateCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_corpus] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_corpus( + self, + request: Optional[Union[retriever_service.GetCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Gets information about a specific ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = client.get_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetCorpusRequest, dict]): + The request object. Request for getting information about a specific + ``Corpus``. + name (str): + Required. The name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.GetCorpusRequest): + request = retriever_service.GetCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_corpus( + self, + request: Optional[Union[retriever_service.UpdateCorpusRequest, dict]] = None, + *, + corpus: Optional[retriever.Corpus] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Updates a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_update_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCorpusRequest( + ) + + # Make the request + response = client.update_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.UpdateCorpusRequest, dict]): + The request object. Request to update a ``Corpus``. + corpus (google.ai.generativelanguage_v1alpha.types.Corpus): + Required. The ``Corpus`` to update. + This corresponds to the ``corpus`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this + only supports updating ``display_name``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Corpus: + A Corpus is a collection of Documents. + A project can create up to 5 corpora. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([corpus, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.UpdateCorpusRequest): + request = retriever_service.UpdateCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if corpus is not None: + request.corpus = corpus + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("corpus.name", request.corpus.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_corpus( + self, + request: Optional[Union[retriever_service.DeleteCorpusRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + client.delete_corpus(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeleteCorpusRequest, dict]): + The request object. Request to delete a ``Corpus``. + name (str): + Required. The resource name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.DeleteCorpusRequest): + request = retriever_service.DeleteCorpusRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_corpora( + self, + request: Optional[Union[retriever_service.ListCorporaRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListCorporaPager: + r"""Lists all ``Corpora`` owned by the user. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_corpora(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListCorporaRequest, dict]): + The request object. Request for listing ``Corpora``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListCorporaPager: + Response from ListCorpora containing a paginated list of Corpora. + The results are sorted by ascending + corpus.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.ListCorporaRequest): + request = retriever_service.ListCorporaRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_corpora] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListCorporaPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def query_corpus( + self, + request: Optional[Union[retriever_service.QueryCorpusRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.QueryCorpusResponse: + r"""Performs semantic search over a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_query_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_corpus(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.QueryCorpusRequest, dict]): + The request object. Request for querying a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.QueryCorpusResponse: + Response from QueryCorpus containing a list of relevant + chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.QueryCorpusRequest): + request = retriever_service.QueryCorpusRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_corpus] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_document( + self, + request: Optional[Union[retriever_service.CreateDocumentRequest, dict]] = None, + *, + parent: Optional[str] = None, + document: Optional[retriever.Document] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Creates an empty ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreateDocumentRequest, dict]): + The request object. Request to create a ``Document``. + parent (str): + Required. The name of the ``Corpus`` where this + ``Document`` will be created. Example: + ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + document (google.ai.generativelanguage_v1alpha.types.Document): + Required. The ``Document`` to create. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, document]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.CreateDocumentRequest): + request = retriever_service.CreateDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if document is not None: + request.document = document + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_document( + self, + request: Optional[Union[retriever_service.GetDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Gets information about a specific ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetDocumentRequest, dict]): + The request object. Request for getting information about a specific + ``Document``. + name (str): + Required. The name of the ``Document`` to retrieve. + Example: ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.GetDocumentRequest): + request = retriever_service.GetDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_document( + self, + request: Optional[Union[retriever_service.UpdateDocumentRequest, dict]] = None, + *, + document: Optional[retriever.Document] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Updates a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_update_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.UpdateDocumentRequest, dict]): + The request object. Request to update a ``Document``. + document (google.ai.generativelanguage_v1alpha.types.Document): + Required. The ``Document`` to update. + This corresponds to the ``document`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this + only supports updating ``display_name`` and + ``custom_metadata``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Document: + A Document is a collection of Chunks. + A Corpus can have a maximum of 10,000 Documents. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([document, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.UpdateDocumentRequest): + request = retriever_service.UpdateDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if document is not None: + request.document = document + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("document.name", request.document.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_document( + self, + request: Optional[Union[retriever_service.DeleteDocumentRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeleteDocumentRequest, dict]): + The request object. Request to delete a ``Document``. + name (str): + Required. The resource name of the ``Document`` to + delete. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.DeleteDocumentRequest): + request = retriever_service.DeleteDocumentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_documents( + self, + request: Optional[Union[retriever_service.ListDocumentsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDocumentsPager: + r"""Lists all ``Document``\ s in a ``Corpus``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_documents(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListDocumentsRequest, dict]): + The request object. Request for listing ``Document``\ s. + parent (str): + Required. The name of the ``Corpus`` containing + ``Document``\ s. Example: ``corpora/my-corpus-123`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListDocumentsPager: + Response from ListDocuments containing a paginated list of Documents. + The Documents are sorted by ascending + document.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.ListDocumentsRequest): + request = retriever_service.ListDocumentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_documents] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDocumentsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def query_document( + self, + request: Optional[Union[retriever_service.QueryDocumentRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.QueryDocumentResponse: + r"""Performs semantic search over a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_query_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_document(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.QueryDocumentRequest, dict]): + The request object. Request for querying a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.QueryDocumentResponse: + Response from QueryDocument containing a list of + relevant chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.QueryDocumentRequest): + request = retriever_service.QueryDocumentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_document] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_chunk( + self, + request: Optional[Union[retriever_service.CreateChunkRequest, dict]] = None, + *, + parent: Optional[str] = None, + chunk: Optional[retriever.Chunk] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Creates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_create_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = client.create_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CreateChunkRequest, dict]): + The request object. Request to create a ``Chunk``. + parent (str): + Required. The name of the ``Document`` where this + ``Chunk`` will be created. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + chunk (google.ai.generativelanguage_v1alpha.types.Chunk): + Required. The ``Chunk`` to create. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, chunk]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.CreateChunkRequest): + request = retriever_service.CreateChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if chunk is not None: + request.chunk = chunk + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_create_chunks( + self, + request: Optional[ + Union[retriever_service.BatchCreateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.BatchCreateChunksResponse: + r"""Batch create ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_create_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.BatchCreateChunksRequest, dict]): + The request object. Request to batch create ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchCreateChunksResponse: + Response from BatchCreateChunks containing a list of + created Chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.BatchCreateChunksRequest): + request = retriever_service.BatchCreateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_create_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_chunk( + self, + request: Optional[Union[retriever_service.GetChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Gets information about a specific ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_get_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = client.get_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GetChunkRequest, dict]): + The request object. Request for getting information about a specific + ``Chunk``. + name (str): + Required. The name of the ``Chunk`` to retrieve. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.GetChunkRequest): + request = retriever_service.GetChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_chunk( + self, + request: Optional[Union[retriever_service.UpdateChunkRequest, dict]] = None, + *, + chunk: Optional[retriever.Chunk] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Updates a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_update_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = client.update_chunk(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.UpdateChunkRequest, dict]): + The request object. Request to update a ``Chunk``. + chunk (google.ai.generativelanguage_v1alpha.types.Chunk): + Required. The ``Chunk`` to update. + This corresponds to the ``chunk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this + only supports updating ``custom_metadata`` and ``data``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.Chunk: + A Chunk is a subpart of a Document that is treated as an independent unit + for the purposes of vector representation and + storage. A Corpus can have a maximum of 1 million + Chunks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([chunk, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.UpdateChunkRequest): + request = retriever_service.UpdateChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if chunk is not None: + request.chunk = chunk + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("chunk.name", request.chunk.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_update_chunks( + self, + request: Optional[ + Union[retriever_service.BatchUpdateChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.BatchUpdateChunksResponse: + r"""Batch update ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_update_chunks(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksRequest, dict]): + The request object. Request to batch update ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksResponse: + Response from BatchUpdateChunks containing a list of + updated Chunks. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.BatchUpdateChunksRequest): + request = retriever_service.BatchUpdateChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_update_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_chunk( + self, + request: Optional[Union[retriever_service.DeleteChunkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a ``Chunk``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + client.delete_chunk(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.DeleteChunkRequest, dict]): + The request object. Request to delete a ``Chunk``. + name (str): + Required. The resource name of the ``Chunk`` to delete. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.DeleteChunkRequest): + request = retriever_service.DeleteChunkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_chunk] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_delete_chunks( + self, + request: Optional[ + Union[retriever_service.BatchDeleteChunksRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Batch delete ``Chunk``\ s. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1alpha.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + client.batch_delete_chunks(request=request) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.BatchDeleteChunksRequest, dict]): + The request object. Request to batch delete ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.BatchDeleteChunksRequest): + request = retriever_service.BatchDeleteChunksRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_delete_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_chunks( + self, + request: Optional[Union[retriever_service.ListChunksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChunksPager: + r"""Lists all ``Chunk``\ s in a ``Document``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_list_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.ListChunksRequest, dict]): + The request object. Request for listing ``Chunk``\ s. + parent (str): + Required. The name of the ``Document`` containing + ``Chunk``\ s. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListChunksPager: + Response from ListChunks containing a paginated list of Chunks. + The Chunks are sorted by ascending chunk.create_time. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, retriever_service.ListChunksRequest): + request = retriever_service.ListChunksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_chunks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChunksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RetrieverServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("RetrieverServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/pagers.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/pagers.py new file mode 100644 index 000000000000..c2428ed2dca7 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/pagers.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + + +class ListCorporaPager: + """A pager for iterating through ``list_corpora`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListCorporaResponse` object, and + provides an ``__iter__`` method to iterate through its + ``corpora`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListCorpora`` requests and continue to iterate + through the ``corpora`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListCorporaResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., retriever_service.ListCorporaResponse], + request: retriever_service.ListCorporaRequest, + response: retriever_service.ListCorporaResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListCorporaRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListCorporaResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = retriever_service.ListCorporaRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[retriever_service.ListCorporaResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[retriever.Corpus]: + for page in self.pages: + yield from page.corpora + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListCorporaAsyncPager: + """A pager for iterating through ``list_corpora`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListCorporaResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``corpora`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListCorpora`` requests and continue to iterate + through the ``corpora`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListCorporaResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[retriever_service.ListCorporaResponse]], + request: retriever_service.ListCorporaRequest, + response: retriever_service.ListCorporaResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListCorporaRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListCorporaResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = retriever_service.ListCorporaRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[retriever_service.ListCorporaResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[retriever.Corpus]: + async def async_generator(): + async for page in self.pages: + for response in page.corpora: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDocumentsPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListDocumentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., retriever_service.ListDocumentsResponse], + request: retriever_service.ListDocumentsRequest, + response: retriever_service.ListDocumentsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListDocumentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListDocumentsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = retriever_service.ListDocumentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[retriever_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[retriever.Document]: + for page in self.pages: + yield from page.documents + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDocumentsAsyncPager: + """A pager for iterating through ``list_documents`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListDocumentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``documents`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDocuments`` requests and continue to iterate + through the ``documents`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListDocumentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[retriever_service.ListDocumentsResponse]], + request: retriever_service.ListDocumentsRequest, + response: retriever_service.ListDocumentsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListDocumentsRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListDocumentsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = retriever_service.ListDocumentsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[retriever_service.ListDocumentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[retriever.Document]: + async def async_generator(): + async for page in self.pages: + for response in page.documents: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChunksPager: + """A pager for iterating through ``list_chunks`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListChunksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``chunks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChunks`` requests and continue to iterate + through the ``chunks`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListChunksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., retriever_service.ListChunksResponse], + request: retriever_service.ListChunksRequest, + response: retriever_service.ListChunksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListChunksRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListChunksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = retriever_service.ListChunksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[retriever_service.ListChunksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[retriever.Chunk]: + for page in self.pages: + yield from page.chunks + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListChunksAsyncPager: + """A pager for iterating through ``list_chunks`` requests. + + This class thinly wraps an initial + :class:`google.ai.generativelanguage_v1alpha.types.ListChunksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``chunks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChunks`` requests and continue to iterate + through the ``chunks`` field on the + corresponding responses. + + All the usual :class:`google.ai.generativelanguage_v1alpha.types.ListChunksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[retriever_service.ListChunksResponse]], + request: retriever_service.ListChunksRequest, + response: retriever_service.ListChunksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ai.generativelanguage_v1alpha.types.ListChunksRequest): + The initial request object. + response (google.ai.generativelanguage_v1alpha.types.ListChunksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = retriever_service.ListChunksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[retriever_service.ListChunksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[retriever.Chunk]: + async def async_generator(): + async for page in self.pages: + for response in page.chunks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/README.rst new file mode 100644 index 000000000000..5f241959ab33 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`RetrieverServiceTransport` is the ABC for all transports. +- public child `RetrieverServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `RetrieverServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseRetrieverServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `RetrieverServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/__init__.py new file mode 100644 index 000000000000..81046f334905 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RetrieverServiceTransport +from .grpc import RetrieverServiceGrpcTransport +from .grpc_asyncio import RetrieverServiceGrpcAsyncIOTransport +from .rest import RetrieverServiceRestInterceptor, RetrieverServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RetrieverServiceTransport]] +_transport_registry["grpc"] = RetrieverServiceGrpcTransport +_transport_registry["grpc_asyncio"] = RetrieverServiceGrpcAsyncIOTransport +_transport_registry["rest"] = RetrieverServiceRestTransport + +__all__ = ( + "RetrieverServiceTransport", + "RetrieverServiceGrpcTransport", + "RetrieverServiceGrpcAsyncIOTransport", + "RetrieverServiceRestTransport", + "RetrieverServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/base.py new file mode 100644 index 000000000000..92bec7927842 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/base.py @@ -0,0 +1,481 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class RetrieverServiceTransport(abc.ABC): + """Abstract transport class for RetrieverService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_corpus: gapic_v1.method.wrap_method( + self.create_corpus, + default_timeout=None, + client_info=client_info, + ), + self.get_corpus: gapic_v1.method.wrap_method( + self.get_corpus, + default_timeout=None, + client_info=client_info, + ), + self.update_corpus: gapic_v1.method.wrap_method( + self.update_corpus, + default_timeout=None, + client_info=client_info, + ), + self.delete_corpus: gapic_v1.method.wrap_method( + self.delete_corpus, + default_timeout=None, + client_info=client_info, + ), + self.list_corpora: gapic_v1.method.wrap_method( + self.list_corpora, + default_timeout=None, + client_info=client_info, + ), + self.query_corpus: gapic_v1.method.wrap_method( + self.query_corpus, + default_timeout=None, + client_info=client_info, + ), + self.create_document: gapic_v1.method.wrap_method( + self.create_document, + default_timeout=None, + client_info=client_info, + ), + self.get_document: gapic_v1.method.wrap_method( + self.get_document, + default_timeout=None, + client_info=client_info, + ), + self.update_document: gapic_v1.method.wrap_method( + self.update_document, + default_timeout=None, + client_info=client_info, + ), + self.delete_document: gapic_v1.method.wrap_method( + self.delete_document, + default_timeout=None, + client_info=client_info, + ), + self.list_documents: gapic_v1.method.wrap_method( + self.list_documents, + default_timeout=None, + client_info=client_info, + ), + self.query_document: gapic_v1.method.wrap_method( + self.query_document, + default_timeout=None, + client_info=client_info, + ), + self.create_chunk: gapic_v1.method.wrap_method( + self.create_chunk, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_chunks: gapic_v1.method.wrap_method( + self.batch_create_chunks, + default_timeout=None, + client_info=client_info, + ), + self.get_chunk: gapic_v1.method.wrap_method( + self.get_chunk, + default_timeout=None, + client_info=client_info, + ), + self.update_chunk: gapic_v1.method.wrap_method( + self.update_chunk, + default_timeout=None, + client_info=client_info, + ), + self.batch_update_chunks: gapic_v1.method.wrap_method( + self.batch_update_chunks, + default_timeout=None, + client_info=client_info, + ), + self.delete_chunk: gapic_v1.method.wrap_method( + self.delete_chunk, + default_timeout=None, + client_info=client_info, + ), + self.batch_delete_chunks: gapic_v1.method.wrap_method( + self.batch_delete_chunks, + default_timeout=None, + client_info=client_info, + ), + self.list_chunks: gapic_v1.method.wrap_method( + self.list_chunks, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_corpus( + self, + ) -> Callable[ + [retriever_service.CreateCorpusRequest], + Union[retriever.Corpus, Awaitable[retriever.Corpus]], + ]: + raise NotImplementedError() + + @property + def get_corpus( + self, + ) -> Callable[ + [retriever_service.GetCorpusRequest], + Union[retriever.Corpus, Awaitable[retriever.Corpus]], + ]: + raise NotImplementedError() + + @property + def update_corpus( + self, + ) -> Callable[ + [retriever_service.UpdateCorpusRequest], + Union[retriever.Corpus, Awaitable[retriever.Corpus]], + ]: + raise NotImplementedError() + + @property + def delete_corpus( + self, + ) -> Callable[ + [retriever_service.DeleteCorpusRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], + Union[ + retriever_service.ListCorporaResponse, + Awaitable[retriever_service.ListCorporaResponse], + ], + ]: + raise NotImplementedError() + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], + Union[ + retriever_service.QueryCorpusResponse, + Awaitable[retriever_service.QueryCorpusResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_document( + self, + ) -> Callable[ + [retriever_service.CreateDocumentRequest], + Union[retriever.Document, Awaitable[retriever.Document]], + ]: + raise NotImplementedError() + + @property + def get_document( + self, + ) -> Callable[ + [retriever_service.GetDocumentRequest], + Union[retriever.Document, Awaitable[retriever.Document]], + ]: + raise NotImplementedError() + + @property + def update_document( + self, + ) -> Callable[ + [retriever_service.UpdateDocumentRequest], + Union[retriever.Document, Awaitable[retriever.Document]], + ]: + raise NotImplementedError() + + @property + def delete_document( + self, + ) -> Callable[ + [retriever_service.DeleteDocumentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + Union[ + retriever_service.ListDocumentsResponse, + Awaitable[retriever_service.ListDocumentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + Union[ + retriever_service.QueryDocumentResponse, + Awaitable[retriever_service.QueryDocumentResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_chunk( + self, + ) -> Callable[ + [retriever_service.CreateChunkRequest], + Union[retriever.Chunk, Awaitable[retriever.Chunk]], + ]: + raise NotImplementedError() + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + Union[ + retriever_service.BatchCreateChunksResponse, + Awaitable[retriever_service.BatchCreateChunksResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_chunk( + self, + ) -> Callable[ + [retriever_service.GetChunkRequest], + Union[retriever.Chunk, Awaitable[retriever.Chunk]], + ]: + raise NotImplementedError() + + @property + def update_chunk( + self, + ) -> Callable[ + [retriever_service.UpdateChunkRequest], + Union[retriever.Chunk, Awaitable[retriever.Chunk]], + ]: + raise NotImplementedError() + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + Union[ + retriever_service.BatchUpdateChunksResponse, + Awaitable[retriever_service.BatchUpdateChunksResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_chunk( + self, + ) -> Callable[ + [retriever_service.DeleteChunkRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def batch_delete_chunks( + self, + ) -> Callable[ + [retriever_service.BatchDeleteChunksRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], + Union[ + retriever_service.ListChunksResponse, + Awaitable[retriever_service.ListChunksResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("RetrieverServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/grpc.py new file mode 100644 index 000000000000..5467ec9eb439 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/grpc.py @@ -0,0 +1,908 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class RetrieverServiceGrpcTransport(RetrieverServiceTransport): + """gRPC backend transport for RetrieverService. + + An API for semantic search over a corpus of user uploaded + content. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_corpus( + self, + ) -> Callable[[retriever_service.CreateCorpusRequest], retriever.Corpus]: + r"""Return a callable for the create corpus method over gRPC. + + Creates an empty ``Corpus``. + + Returns: + Callable[[~.CreateCorpusRequest], + ~.Corpus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_corpus" not in self._stubs: + self._stubs["create_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/CreateCorpus", + request_serializer=retriever_service.CreateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["create_corpus"] + + @property + def get_corpus( + self, + ) -> Callable[[retriever_service.GetCorpusRequest], retriever.Corpus]: + r"""Return a callable for the get corpus method over gRPC. + + Gets information about a specific ``Corpus``. + + Returns: + Callable[[~.GetCorpusRequest], + ~.Corpus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_corpus" not in self._stubs: + self._stubs["get_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/GetCorpus", + request_serializer=retriever_service.GetCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["get_corpus"] + + @property + def update_corpus( + self, + ) -> Callable[[retriever_service.UpdateCorpusRequest], retriever.Corpus]: + r"""Return a callable for the update corpus method over gRPC. + + Updates a ``Corpus``. + + Returns: + Callable[[~.UpdateCorpusRequest], + ~.Corpus]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_corpus" not in self._stubs: + self._stubs["update_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/UpdateCorpus", + request_serializer=retriever_service.UpdateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["update_corpus"] + + @property + def delete_corpus( + self, + ) -> Callable[[retriever_service.DeleteCorpusRequest], empty_pb2.Empty]: + r"""Return a callable for the delete corpus method over gRPC. + + Deletes a ``Corpus``. + + Returns: + Callable[[~.DeleteCorpusRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_corpus" not in self._stubs: + self._stubs["delete_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/DeleteCorpus", + request_serializer=retriever_service.DeleteCorpusRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_corpus"] + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], retriever_service.ListCorporaResponse + ]: + r"""Return a callable for the list corpora method over gRPC. + + Lists all ``Corpora`` owned by the user. + + Returns: + Callable[[~.ListCorporaRequest], + ~.ListCorporaResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_corpora" not in self._stubs: + self._stubs["list_corpora"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/ListCorpora", + request_serializer=retriever_service.ListCorporaRequest.serialize, + response_deserializer=retriever_service.ListCorporaResponse.deserialize, + ) + return self._stubs["list_corpora"] + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], retriever_service.QueryCorpusResponse + ]: + r"""Return a callable for the query corpus method over gRPC. + + Performs semantic search over a ``Corpus``. + + Returns: + Callable[[~.QueryCorpusRequest], + ~.QueryCorpusResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_corpus" not in self._stubs: + self._stubs["query_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/QueryCorpus", + request_serializer=retriever_service.QueryCorpusRequest.serialize, + response_deserializer=retriever_service.QueryCorpusResponse.deserialize, + ) + return self._stubs["query_corpus"] + + @property + def create_document( + self, + ) -> Callable[[retriever_service.CreateDocumentRequest], retriever.Document]: + r"""Return a callable for the create document method over gRPC. + + Creates an empty ``Document``. + + Returns: + Callable[[~.CreateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_document" not in self._stubs: + self._stubs["create_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/CreateDocument", + request_serializer=retriever_service.CreateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["create_document"] + + @property + def get_document( + self, + ) -> Callable[[retriever_service.GetDocumentRequest], retriever.Document]: + r"""Return a callable for the get document method over gRPC. + + Gets information about a specific ``Document``. + + Returns: + Callable[[~.GetDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_document" not in self._stubs: + self._stubs["get_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/GetDocument", + request_serializer=retriever_service.GetDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["get_document"] + + @property + def update_document( + self, + ) -> Callable[[retriever_service.UpdateDocumentRequest], retriever.Document]: + r"""Return a callable for the update document method over gRPC. + + Updates a ``Document``. + + Returns: + Callable[[~.UpdateDocumentRequest], + ~.Document]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_document" not in self._stubs: + self._stubs["update_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/UpdateDocument", + request_serializer=retriever_service.UpdateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["update_document"] + + @property + def delete_document( + self, + ) -> Callable[[retriever_service.DeleteDocumentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a ``Document``. + + Returns: + Callable[[~.DeleteDocumentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_document" not in self._stubs: + self._stubs["delete_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/DeleteDocument", + request_serializer=retriever_service.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_document"] + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + retriever_service.ListDocumentsResponse, + ]: + r"""Return a callable for the list documents method over gRPC. + + Lists all ``Document``\ s in a ``Corpus``. + + Returns: + Callable[[~.ListDocumentsRequest], + ~.ListDocumentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/ListDocuments", + request_serializer=retriever_service.ListDocumentsRequest.serialize, + response_deserializer=retriever_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + retriever_service.QueryDocumentResponse, + ]: + r"""Return a callable for the query document method over gRPC. + + Performs semantic search over a ``Document``. + + Returns: + Callable[[~.QueryDocumentRequest], + ~.QueryDocumentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_document" not in self._stubs: + self._stubs["query_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/QueryDocument", + request_serializer=retriever_service.QueryDocumentRequest.serialize, + response_deserializer=retriever_service.QueryDocumentResponse.deserialize, + ) + return self._stubs["query_document"] + + @property + def create_chunk( + self, + ) -> Callable[[retriever_service.CreateChunkRequest], retriever.Chunk]: + r"""Return a callable for the create chunk method over gRPC. + + Creates a ``Chunk``. + + Returns: + Callable[[~.CreateChunkRequest], + ~.Chunk]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_chunk" not in self._stubs: + self._stubs["create_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/CreateChunk", + request_serializer=retriever_service.CreateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["create_chunk"] + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + retriever_service.BatchCreateChunksResponse, + ]: + r"""Return a callable for the batch create chunks method over gRPC. + + Batch create ``Chunk``\ s. + + Returns: + Callable[[~.BatchCreateChunksRequest], + ~.BatchCreateChunksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_chunks" not in self._stubs: + self._stubs["batch_create_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/BatchCreateChunks", + request_serializer=retriever_service.BatchCreateChunksRequest.serialize, + response_deserializer=retriever_service.BatchCreateChunksResponse.deserialize, + ) + return self._stubs["batch_create_chunks"] + + @property + def get_chunk( + self, + ) -> Callable[[retriever_service.GetChunkRequest], retriever.Chunk]: + r"""Return a callable for the get chunk method over gRPC. + + Gets information about a specific ``Chunk``. + + Returns: + Callable[[~.GetChunkRequest], + ~.Chunk]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_chunk" not in self._stubs: + self._stubs["get_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/GetChunk", + request_serializer=retriever_service.GetChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["get_chunk"] + + @property + def update_chunk( + self, + ) -> Callable[[retriever_service.UpdateChunkRequest], retriever.Chunk]: + r"""Return a callable for the update chunk method over gRPC. + + Updates a ``Chunk``. + + Returns: + Callable[[~.UpdateChunkRequest], + ~.Chunk]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_chunk" not in self._stubs: + self._stubs["update_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/UpdateChunk", + request_serializer=retriever_service.UpdateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["update_chunk"] + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + retriever_service.BatchUpdateChunksResponse, + ]: + r"""Return a callable for the batch update chunks method over gRPC. + + Batch update ``Chunk``\ s. + + Returns: + Callable[[~.BatchUpdateChunksRequest], + ~.BatchUpdateChunksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_update_chunks" not in self._stubs: + self._stubs["batch_update_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/BatchUpdateChunks", + request_serializer=retriever_service.BatchUpdateChunksRequest.serialize, + response_deserializer=retriever_service.BatchUpdateChunksResponse.deserialize, + ) + return self._stubs["batch_update_chunks"] + + @property + def delete_chunk( + self, + ) -> Callable[[retriever_service.DeleteChunkRequest], empty_pb2.Empty]: + r"""Return a callable for the delete chunk method over gRPC. + + Deletes a ``Chunk``. + + Returns: + Callable[[~.DeleteChunkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_chunk" not in self._stubs: + self._stubs["delete_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/DeleteChunk", + request_serializer=retriever_service.DeleteChunkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_chunk"] + + @property + def batch_delete_chunks( + self, + ) -> Callable[[retriever_service.BatchDeleteChunksRequest], empty_pb2.Empty]: + r"""Return a callable for the batch delete chunks method over gRPC. + + Batch delete ``Chunk``\ s. + + Returns: + Callable[[~.BatchDeleteChunksRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_chunks" not in self._stubs: + self._stubs["batch_delete_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/BatchDeleteChunks", + request_serializer=retriever_service.BatchDeleteChunksRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_chunks"] + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], retriever_service.ListChunksResponse + ]: + r"""Return a callable for the list chunks method over gRPC. + + Lists all ``Chunk``\ s in a ``Document``. + + Returns: + Callable[[~.ListChunksRequest], + ~.ListChunksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_chunks" not in self._stubs: + self._stubs["list_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/ListChunks", + request_serializer=retriever_service.ListChunksRequest.serialize, + response_deserializer=retriever_service.ListChunksResponse.deserialize, + ) + return self._stubs["list_chunks"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("RetrieverServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..19135680af8d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/grpc_asyncio.py @@ -0,0 +1,1048 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport +from .grpc import RetrieverServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class RetrieverServiceGrpcAsyncIOTransport(RetrieverServiceTransport): + """gRPC AsyncIO backend transport for RetrieverService. + + An API for semantic search over a corpus of user uploaded + content. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_corpus( + self, + ) -> Callable[[retriever_service.CreateCorpusRequest], Awaitable[retriever.Corpus]]: + r"""Return a callable for the create corpus method over gRPC. + + Creates an empty ``Corpus``. + + Returns: + Callable[[~.CreateCorpusRequest], + Awaitable[~.Corpus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_corpus" not in self._stubs: + self._stubs["create_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/CreateCorpus", + request_serializer=retriever_service.CreateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["create_corpus"] + + @property + def get_corpus( + self, + ) -> Callable[[retriever_service.GetCorpusRequest], Awaitable[retriever.Corpus]]: + r"""Return a callable for the get corpus method over gRPC. + + Gets information about a specific ``Corpus``. + + Returns: + Callable[[~.GetCorpusRequest], + Awaitable[~.Corpus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_corpus" not in self._stubs: + self._stubs["get_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/GetCorpus", + request_serializer=retriever_service.GetCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["get_corpus"] + + @property + def update_corpus( + self, + ) -> Callable[[retriever_service.UpdateCorpusRequest], Awaitable[retriever.Corpus]]: + r"""Return a callable for the update corpus method over gRPC. + + Updates a ``Corpus``. + + Returns: + Callable[[~.UpdateCorpusRequest], + Awaitable[~.Corpus]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_corpus" not in self._stubs: + self._stubs["update_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/UpdateCorpus", + request_serializer=retriever_service.UpdateCorpusRequest.serialize, + response_deserializer=retriever.Corpus.deserialize, + ) + return self._stubs["update_corpus"] + + @property + def delete_corpus( + self, + ) -> Callable[[retriever_service.DeleteCorpusRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete corpus method over gRPC. + + Deletes a ``Corpus``. + + Returns: + Callable[[~.DeleteCorpusRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_corpus" not in self._stubs: + self._stubs["delete_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/DeleteCorpus", + request_serializer=retriever_service.DeleteCorpusRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_corpus"] + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], + Awaitable[retriever_service.ListCorporaResponse], + ]: + r"""Return a callable for the list corpora method over gRPC. + + Lists all ``Corpora`` owned by the user. + + Returns: + Callable[[~.ListCorporaRequest], + Awaitable[~.ListCorporaResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_corpora" not in self._stubs: + self._stubs["list_corpora"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/ListCorpora", + request_serializer=retriever_service.ListCorporaRequest.serialize, + response_deserializer=retriever_service.ListCorporaResponse.deserialize, + ) + return self._stubs["list_corpora"] + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], + Awaitable[retriever_service.QueryCorpusResponse], + ]: + r"""Return a callable for the query corpus method over gRPC. + + Performs semantic search over a ``Corpus``. + + Returns: + Callable[[~.QueryCorpusRequest], + Awaitable[~.QueryCorpusResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_corpus" not in self._stubs: + self._stubs["query_corpus"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/QueryCorpus", + request_serializer=retriever_service.QueryCorpusRequest.serialize, + response_deserializer=retriever_service.QueryCorpusResponse.deserialize, + ) + return self._stubs["query_corpus"] + + @property + def create_document( + self, + ) -> Callable[ + [retriever_service.CreateDocumentRequest], Awaitable[retriever.Document] + ]: + r"""Return a callable for the create document method over gRPC. + + Creates an empty ``Document``. + + Returns: + Callable[[~.CreateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_document" not in self._stubs: + self._stubs["create_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/CreateDocument", + request_serializer=retriever_service.CreateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["create_document"] + + @property + def get_document( + self, + ) -> Callable[ + [retriever_service.GetDocumentRequest], Awaitable[retriever.Document] + ]: + r"""Return a callable for the get document method over gRPC. + + Gets information about a specific ``Document``. + + Returns: + Callable[[~.GetDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_document" not in self._stubs: + self._stubs["get_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/GetDocument", + request_serializer=retriever_service.GetDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["get_document"] + + @property + def update_document( + self, + ) -> Callable[ + [retriever_service.UpdateDocumentRequest], Awaitable[retriever.Document] + ]: + r"""Return a callable for the update document method over gRPC. + + Updates a ``Document``. + + Returns: + Callable[[~.UpdateDocumentRequest], + Awaitable[~.Document]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_document" not in self._stubs: + self._stubs["update_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/UpdateDocument", + request_serializer=retriever_service.UpdateDocumentRequest.serialize, + response_deserializer=retriever.Document.deserialize, + ) + return self._stubs["update_document"] + + @property + def delete_document( + self, + ) -> Callable[ + [retriever_service.DeleteDocumentRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete document method over gRPC. + + Deletes a ``Document``. + + Returns: + Callable[[~.DeleteDocumentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_document" not in self._stubs: + self._stubs["delete_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/DeleteDocument", + request_serializer=retriever_service.DeleteDocumentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_document"] + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + Awaitable[retriever_service.ListDocumentsResponse], + ]: + r"""Return a callable for the list documents method over gRPC. + + Lists all ``Document``\ s in a ``Corpus``. + + Returns: + Callable[[~.ListDocumentsRequest], + Awaitable[~.ListDocumentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_documents" not in self._stubs: + self._stubs["list_documents"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/ListDocuments", + request_serializer=retriever_service.ListDocumentsRequest.serialize, + response_deserializer=retriever_service.ListDocumentsResponse.deserialize, + ) + return self._stubs["list_documents"] + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + Awaitable[retriever_service.QueryDocumentResponse], + ]: + r"""Return a callable for the query document method over gRPC. + + Performs semantic search over a ``Document``. + + Returns: + Callable[[~.QueryDocumentRequest], + Awaitable[~.QueryDocumentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_document" not in self._stubs: + self._stubs["query_document"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/QueryDocument", + request_serializer=retriever_service.QueryDocumentRequest.serialize, + response_deserializer=retriever_service.QueryDocumentResponse.deserialize, + ) + return self._stubs["query_document"] + + @property + def create_chunk( + self, + ) -> Callable[[retriever_service.CreateChunkRequest], Awaitable[retriever.Chunk]]: + r"""Return a callable for the create chunk method over gRPC. + + Creates a ``Chunk``. + + Returns: + Callable[[~.CreateChunkRequest], + Awaitable[~.Chunk]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_chunk" not in self._stubs: + self._stubs["create_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/CreateChunk", + request_serializer=retriever_service.CreateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["create_chunk"] + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + Awaitable[retriever_service.BatchCreateChunksResponse], + ]: + r"""Return a callable for the batch create chunks method over gRPC. + + Batch create ``Chunk``\ s. + + Returns: + Callable[[~.BatchCreateChunksRequest], + Awaitable[~.BatchCreateChunksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_create_chunks" not in self._stubs: + self._stubs["batch_create_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/BatchCreateChunks", + request_serializer=retriever_service.BatchCreateChunksRequest.serialize, + response_deserializer=retriever_service.BatchCreateChunksResponse.deserialize, + ) + return self._stubs["batch_create_chunks"] + + @property + def get_chunk( + self, + ) -> Callable[[retriever_service.GetChunkRequest], Awaitable[retriever.Chunk]]: + r"""Return a callable for the get chunk method over gRPC. + + Gets information about a specific ``Chunk``. + + Returns: + Callable[[~.GetChunkRequest], + Awaitable[~.Chunk]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_chunk" not in self._stubs: + self._stubs["get_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/GetChunk", + request_serializer=retriever_service.GetChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["get_chunk"] + + @property + def update_chunk( + self, + ) -> Callable[[retriever_service.UpdateChunkRequest], Awaitable[retriever.Chunk]]: + r"""Return a callable for the update chunk method over gRPC. + + Updates a ``Chunk``. + + Returns: + Callable[[~.UpdateChunkRequest], + Awaitable[~.Chunk]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_chunk" not in self._stubs: + self._stubs["update_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/UpdateChunk", + request_serializer=retriever_service.UpdateChunkRequest.serialize, + response_deserializer=retriever.Chunk.deserialize, + ) + return self._stubs["update_chunk"] + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + Awaitable[retriever_service.BatchUpdateChunksResponse], + ]: + r"""Return a callable for the batch update chunks method over gRPC. + + Batch update ``Chunk``\ s. + + Returns: + Callable[[~.BatchUpdateChunksRequest], + Awaitable[~.BatchUpdateChunksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_update_chunks" not in self._stubs: + self._stubs["batch_update_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/BatchUpdateChunks", + request_serializer=retriever_service.BatchUpdateChunksRequest.serialize, + response_deserializer=retriever_service.BatchUpdateChunksResponse.deserialize, + ) + return self._stubs["batch_update_chunks"] + + @property + def delete_chunk( + self, + ) -> Callable[[retriever_service.DeleteChunkRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete chunk method over gRPC. + + Deletes a ``Chunk``. + + Returns: + Callable[[~.DeleteChunkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_chunk" not in self._stubs: + self._stubs["delete_chunk"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/DeleteChunk", + request_serializer=retriever_service.DeleteChunkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_chunk"] + + @property + def batch_delete_chunks( + self, + ) -> Callable[ + [retriever_service.BatchDeleteChunksRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the batch delete chunks method over gRPC. + + Batch delete ``Chunk``\ s. + + Returns: + Callable[[~.BatchDeleteChunksRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_delete_chunks" not in self._stubs: + self._stubs["batch_delete_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/BatchDeleteChunks", + request_serializer=retriever_service.BatchDeleteChunksRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["batch_delete_chunks"] + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], + Awaitable[retriever_service.ListChunksResponse], + ]: + r"""Return a callable for the list chunks method over gRPC. + + Lists all ``Chunk``\ s in a ``Document``. + + Returns: + Callable[[~.ListChunksRequest], + Awaitable[~.ListChunksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_chunks" not in self._stubs: + self._stubs["list_chunks"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.RetrieverService/ListChunks", + request_serializer=retriever_service.ListChunksRequest.serialize, + response_deserializer=retriever_service.ListChunksResponse.deserialize, + ) + return self._stubs["list_chunks"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_corpus: self._wrap_method( + self.create_corpus, + default_timeout=None, + client_info=client_info, + ), + self.get_corpus: self._wrap_method( + self.get_corpus, + default_timeout=None, + client_info=client_info, + ), + self.update_corpus: self._wrap_method( + self.update_corpus, + default_timeout=None, + client_info=client_info, + ), + self.delete_corpus: self._wrap_method( + self.delete_corpus, + default_timeout=None, + client_info=client_info, + ), + self.list_corpora: self._wrap_method( + self.list_corpora, + default_timeout=None, + client_info=client_info, + ), + self.query_corpus: self._wrap_method( + self.query_corpus, + default_timeout=None, + client_info=client_info, + ), + self.create_document: self._wrap_method( + self.create_document, + default_timeout=None, + client_info=client_info, + ), + self.get_document: self._wrap_method( + self.get_document, + default_timeout=None, + client_info=client_info, + ), + self.update_document: self._wrap_method( + self.update_document, + default_timeout=None, + client_info=client_info, + ), + self.delete_document: self._wrap_method( + self.delete_document, + default_timeout=None, + client_info=client_info, + ), + self.list_documents: self._wrap_method( + self.list_documents, + default_timeout=None, + client_info=client_info, + ), + self.query_document: self._wrap_method( + self.query_document, + default_timeout=None, + client_info=client_info, + ), + self.create_chunk: self._wrap_method( + self.create_chunk, + default_timeout=None, + client_info=client_info, + ), + self.batch_create_chunks: self._wrap_method( + self.batch_create_chunks, + default_timeout=None, + client_info=client_info, + ), + self.get_chunk: self._wrap_method( + self.get_chunk, + default_timeout=None, + client_info=client_info, + ), + self.update_chunk: self._wrap_method( + self.update_chunk, + default_timeout=None, + client_info=client_info, + ), + self.batch_update_chunks: self._wrap_method( + self.batch_update_chunks, + default_timeout=None, + client_info=client_info, + ), + self.delete_chunk: self._wrap_method( + self.delete_chunk, + default_timeout=None, + client_info=client_info, + ), + self.batch_delete_chunks: self._wrap_method( + self.batch_delete_chunks, + default_timeout=None, + client_info=client_info, + ), + self.list_chunks: self._wrap_method( + self.list_chunks, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("RetrieverServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/rest.py new file mode 100644 index 000000000000..6ee1d5ae4c83 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/rest.py @@ -0,0 +1,4515 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseRetrieverServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class RetrieverServiceRestInterceptor: + """Interceptor for RetrieverService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RetrieverServiceRestTransport. + + .. code-block:: python + class MyCustomRetrieverServiceInterceptor(RetrieverServiceRestInterceptor): + def pre_batch_create_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_create_chunks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_batch_delete_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_batch_update_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_update_chunks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_chunk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_chunk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_chunks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_chunks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_corpora(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_corpora(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_documents(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_documents(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_document(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_chunk(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_chunk(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_corpus(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_corpus(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_document(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_document(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RetrieverServiceRestTransport(interceptor=MyCustomRetrieverServiceInterceptor()) + client = RetrieverServiceClient(transport=transport) + + + """ + + def pre_batch_create_chunks( + self, + request: retriever_service.BatchCreateChunksRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchCreateChunksRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for batch_create_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_batch_create_chunks( + self, response: retriever_service.BatchCreateChunksResponse + ) -> retriever_service.BatchCreateChunksResponse: + """Post-rpc interceptor for batch_create_chunks + + DEPRECATED. Please use the `post_batch_create_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_batch_create_chunks` interceptor runs + before the `post_batch_create_chunks_with_metadata` interceptor. + """ + return response + + def post_batch_create_chunks_with_metadata( + self, + response: retriever_service.BatchCreateChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchCreateChunksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_create_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_batch_create_chunks_with_metadata` + interceptor in new development instead of the `post_batch_create_chunks` interceptor. + When both interceptors are used, this `post_batch_create_chunks_with_metadata` interceptor runs after the + `post_batch_create_chunks` interceptor. The (possibly modified) response returned by + `post_batch_create_chunks` will be passed to + `post_batch_create_chunks_with_metadata`. + """ + return response, metadata + + def pre_batch_delete_chunks( + self, + request: retriever_service.BatchDeleteChunksRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchDeleteChunksRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for batch_delete_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_batch_update_chunks( + self, + request: retriever_service.BatchUpdateChunksRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchUpdateChunksRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for batch_update_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_batch_update_chunks( + self, response: retriever_service.BatchUpdateChunksResponse + ) -> retriever_service.BatchUpdateChunksResponse: + """Post-rpc interceptor for batch_update_chunks + + DEPRECATED. Please use the `post_batch_update_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_batch_update_chunks` interceptor runs + before the `post_batch_update_chunks_with_metadata` interceptor. + """ + return response + + def post_batch_update_chunks_with_metadata( + self, + response: retriever_service.BatchUpdateChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchUpdateChunksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_update_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_batch_update_chunks_with_metadata` + interceptor in new development instead of the `post_batch_update_chunks` interceptor. + When both interceptors are used, this `post_batch_update_chunks_with_metadata` interceptor runs after the + `post_batch_update_chunks` interceptor. The (possibly modified) response returned by + `post_batch_update_chunks` will be passed to + `post_batch_update_chunks_with_metadata`. + """ + return response, metadata + + def pre_create_chunk( + self, + request: retriever_service.CreateChunkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.CreateChunkRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_create_chunk(self, response: retriever.Chunk) -> retriever.Chunk: + """Post-rpc interceptor for create_chunk + + DEPRECATED. Please use the `post_create_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_create_chunk` interceptor runs + before the `post_create_chunk_with_metadata` interceptor. + """ + return response + + def post_create_chunk_with_metadata( + self, + response: retriever.Chunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_create_chunk_with_metadata` + interceptor in new development instead of the `post_create_chunk` interceptor. + When both interceptors are used, this `post_create_chunk_with_metadata` interceptor runs after the + `post_create_chunk` interceptor. The (possibly modified) response returned by + `post_create_chunk` will be passed to + `post_create_chunk_with_metadata`. + """ + return response, metadata + + def pre_create_corpus( + self, + request: retriever_service.CreateCorpusRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.CreateCorpusRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_create_corpus(self, response: retriever.Corpus) -> retriever.Corpus: + """Post-rpc interceptor for create_corpus + + DEPRECATED. Please use the `post_create_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_create_corpus` interceptor runs + before the `post_create_corpus_with_metadata` interceptor. + """ + return response + + def post_create_corpus_with_metadata( + self, + response: retriever.Corpus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Corpus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_create_corpus_with_metadata` + interceptor in new development instead of the `post_create_corpus` interceptor. + When both interceptors are used, this `post_create_corpus_with_metadata` interceptor runs after the + `post_create_corpus` interceptor. The (possibly modified) response returned by + `post_create_corpus` will be passed to + `post_create_corpus_with_metadata`. + """ + return response, metadata + + def pre_create_document( + self, + request: retriever_service.CreateDocumentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.CreateDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_create_document(self, response: retriever.Document) -> retriever.Document: + """Post-rpc interceptor for create_document + + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. + """ + return response + + def post_create_document_with_metadata( + self, + response: retriever.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + + def pre_delete_chunk( + self, + request: retriever_service.DeleteChunkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.DeleteChunkRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_delete_corpus( + self, + request: retriever_service.DeleteCorpusRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.DeleteCorpusRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_delete_document( + self, + request: retriever_service.DeleteDocumentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.DeleteDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def pre_get_chunk( + self, + request: retriever_service.GetChunkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.GetChunkRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_chunk(self, response: retriever.Chunk) -> retriever.Chunk: + """Post-rpc interceptor for get_chunk + + DEPRECATED. Please use the `post_get_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_get_chunk` interceptor runs + before the `post_get_chunk_with_metadata` interceptor. + """ + return response + + def post_get_chunk_with_metadata( + self, + response: retriever.Chunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_get_chunk_with_metadata` + interceptor in new development instead of the `post_get_chunk` interceptor. + When both interceptors are used, this `post_get_chunk_with_metadata` interceptor runs after the + `post_get_chunk` interceptor. The (possibly modified) response returned by + `post_get_chunk` will be passed to + `post_get_chunk_with_metadata`. + """ + return response, metadata + + def pre_get_corpus( + self, + request: retriever_service.GetCorpusRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.GetCorpusRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_corpus(self, response: retriever.Corpus) -> retriever.Corpus: + """Post-rpc interceptor for get_corpus + + DEPRECATED. Please use the `post_get_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_get_corpus` interceptor runs + before the `post_get_corpus_with_metadata` interceptor. + """ + return response + + def post_get_corpus_with_metadata( + self, + response: retriever.Corpus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Corpus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_get_corpus_with_metadata` + interceptor in new development instead of the `post_get_corpus` interceptor. + When both interceptors are used, this `post_get_corpus_with_metadata` interceptor runs after the + `post_get_corpus` interceptor. The (possibly modified) response returned by + `post_get_corpus` will be passed to + `post_get_corpus_with_metadata`. + """ + return response, metadata + + def pre_get_document( + self, + request: retriever_service.GetDocumentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.GetDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_document(self, response: retriever.Document) -> retriever.Document: + """Post-rpc interceptor for get_document + + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. + """ + return response + + def post_get_document_with_metadata( + self, + response: retriever.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + + def pre_list_chunks( + self, + request: retriever_service.ListChunksRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListChunksRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_chunks + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_chunks( + self, response: retriever_service.ListChunksResponse + ) -> retriever_service.ListChunksResponse: + """Post-rpc interceptor for list_chunks + + DEPRECATED. Please use the `post_list_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_list_chunks` interceptor runs + before the `post_list_chunks_with_metadata` interceptor. + """ + return response + + def post_list_chunks_with_metadata( + self, + response: retriever_service.ListChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListChunksResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_list_chunks_with_metadata` + interceptor in new development instead of the `post_list_chunks` interceptor. + When both interceptors are used, this `post_list_chunks_with_metadata` interceptor runs after the + `post_list_chunks` interceptor. The (possibly modified) response returned by + `post_list_chunks` will be passed to + `post_list_chunks_with_metadata`. + """ + return response, metadata + + def pre_list_corpora( + self, + request: retriever_service.ListCorporaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListCorporaRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_corpora + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_corpora( + self, response: retriever_service.ListCorporaResponse + ) -> retriever_service.ListCorporaResponse: + """Post-rpc interceptor for list_corpora + + DEPRECATED. Please use the `post_list_corpora_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_list_corpora` interceptor runs + before the `post_list_corpora_with_metadata` interceptor. + """ + return response + + def post_list_corpora_with_metadata( + self, + response: retriever_service.ListCorporaResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListCorporaResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_corpora + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_list_corpora_with_metadata` + interceptor in new development instead of the `post_list_corpora` interceptor. + When both interceptors are used, this `post_list_corpora_with_metadata` interceptor runs after the + `post_list_corpora` interceptor. The (possibly modified) response returned by + `post_list_corpora` will be passed to + `post_list_corpora_with_metadata`. + """ + return response, metadata + + def pre_list_documents( + self, + request: retriever_service.ListDocumentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListDocumentsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_documents + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_documents( + self, response: retriever_service.ListDocumentsResponse + ) -> retriever_service.ListDocumentsResponse: + """Post-rpc interceptor for list_documents + + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. + """ + return response + + def post_list_documents_with_metadata( + self, + response: retriever_service.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + + def pre_query_corpus( + self, + request: retriever_service.QueryCorpusRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.QueryCorpusRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for query_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_query_corpus( + self, response: retriever_service.QueryCorpusResponse + ) -> retriever_service.QueryCorpusResponse: + """Post-rpc interceptor for query_corpus + + DEPRECATED. Please use the `post_query_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_query_corpus` interceptor runs + before the `post_query_corpus_with_metadata` interceptor. + """ + return response + + def post_query_corpus_with_metadata( + self, + response: retriever_service.QueryCorpusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.QueryCorpusResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for query_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_query_corpus_with_metadata` + interceptor in new development instead of the `post_query_corpus` interceptor. + When both interceptors are used, this `post_query_corpus_with_metadata` interceptor runs after the + `post_query_corpus` interceptor. The (possibly modified) response returned by + `post_query_corpus` will be passed to + `post_query_corpus_with_metadata`. + """ + return response, metadata + + def pre_query_document( + self, + request: retriever_service.QueryDocumentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.QueryDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for query_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_query_document( + self, response: retriever_service.QueryDocumentResponse + ) -> retriever_service.QueryDocumentResponse: + """Post-rpc interceptor for query_document + + DEPRECATED. Please use the `post_query_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_query_document` interceptor runs + before the `post_query_document_with_metadata` interceptor. + """ + return response + + def post_query_document_with_metadata( + self, + response: retriever_service.QueryDocumentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.QueryDocumentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for query_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_query_document_with_metadata` + interceptor in new development instead of the `post_query_document` interceptor. + When both interceptors are used, this `post_query_document_with_metadata` interceptor runs after the + `post_query_document` interceptor. The (possibly modified) response returned by + `post_query_document` will be passed to + `post_query_document_with_metadata`. + """ + return response, metadata + + def pre_update_chunk( + self, + request: retriever_service.UpdateChunkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.UpdateChunkRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_chunk + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_update_chunk(self, response: retriever.Chunk) -> retriever.Chunk: + """Post-rpc interceptor for update_chunk + + DEPRECATED. Please use the `post_update_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_update_chunk` interceptor runs + before the `post_update_chunk_with_metadata` interceptor. + """ + return response + + def post_update_chunk_with_metadata( + self, + response: retriever.Chunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_update_chunk_with_metadata` + interceptor in new development instead of the `post_update_chunk` interceptor. + When both interceptors are used, this `post_update_chunk_with_metadata` interceptor runs after the + `post_update_chunk` interceptor. The (possibly modified) response returned by + `post_update_chunk` will be passed to + `post_update_chunk_with_metadata`. + """ + return response, metadata + + def pre_update_corpus( + self, + request: retriever_service.UpdateCorpusRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.UpdateCorpusRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_corpus + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_update_corpus(self, response: retriever.Corpus) -> retriever.Corpus: + """Post-rpc interceptor for update_corpus + + DEPRECATED. Please use the `post_update_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_update_corpus` interceptor runs + before the `post_update_corpus_with_metadata` interceptor. + """ + return response + + def post_update_corpus_with_metadata( + self, + response: retriever.Corpus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Corpus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_update_corpus_with_metadata` + interceptor in new development instead of the `post_update_corpus` interceptor. + When both interceptors are used, this `post_update_corpus_with_metadata` interceptor runs after the + `post_update_corpus` interceptor. The (possibly modified) response returned by + `post_update_corpus` will be passed to + `post_update_corpus_with_metadata`. + """ + return response, metadata + + def pre_update_document( + self, + request: retriever_service.UpdateDocumentRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.UpdateDocumentRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_document + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_update_document(self, response: retriever.Document) -> retriever.Document: + """Post-rpc interceptor for update_document + + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. + """ + return response + + def post_update_document_with_metadata( + self, + response: retriever.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the RetrieverService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the RetrieverService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class RetrieverServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RetrieverServiceRestInterceptor + + +class RetrieverServiceRestTransport(_BaseRetrieverServiceRestTransport): + """REST backend synchronous transport for RetrieverService. + + An API for semantic search over a corpus of user uploaded + content. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RetrieverServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RetrieverServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchCreateChunks( + _BaseRetrieverServiceRestTransport._BaseBatchCreateChunks, + RetrieverServiceRestStub, + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.BatchCreateChunks") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.BatchCreateChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.BatchCreateChunksResponse: + r"""Call the batch create chunks method over HTTP. + + Args: + request (~.retriever_service.BatchCreateChunksRequest): + The request object. Request to batch create ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.BatchCreateChunksResponse: + Response from ``BatchCreateChunks`` containing a list of + created ``Chunk``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseBatchCreateChunks._get_http_options() + ) + + request, metadata = self._interceptor.pre_batch_create_chunks( + request, metadata + ) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseBatchCreateChunks._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseBatchCreateChunks._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseBatchCreateChunks._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.BatchCreateChunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "BatchCreateChunks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._BatchCreateChunks._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.BatchCreateChunksResponse() + pb_resp = retriever_service.BatchCreateChunksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_batch_create_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_chunks_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + retriever_service.BatchCreateChunksResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.batch_create_chunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "BatchCreateChunks", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BatchDeleteChunks( + _BaseRetrieverServiceRestTransport._BaseBatchDeleteChunks, + RetrieverServiceRestStub, + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.BatchDeleteChunks") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.BatchDeleteChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the batch delete chunks method over HTTP. + + Args: + request (~.retriever_service.BatchDeleteChunksRequest): + The request object. Request to batch delete ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseBatchDeleteChunks._get_http_options() + ) + + request, metadata = self._interceptor.pre_batch_delete_chunks( + request, metadata + ) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseBatchDeleteChunks._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseBatchDeleteChunks._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseBatchDeleteChunks._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.BatchDeleteChunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "BatchDeleteChunks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._BatchDeleteChunks._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _BatchUpdateChunks( + _BaseRetrieverServiceRestTransport._BaseBatchUpdateChunks, + RetrieverServiceRestStub, + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.BatchUpdateChunks") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.BatchUpdateChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.BatchUpdateChunksResponse: + r"""Call the batch update chunks method over HTTP. + + Args: + request (~.retriever_service.BatchUpdateChunksRequest): + The request object. Request to batch update ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.BatchUpdateChunksResponse: + Response from ``BatchUpdateChunks`` containing a list of + updated ``Chunk``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseBatchUpdateChunks._get_http_options() + ) + + request, metadata = self._interceptor.pre_batch_update_chunks( + request, metadata + ) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseBatchUpdateChunks._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseBatchUpdateChunks._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseBatchUpdateChunks._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.BatchUpdateChunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "BatchUpdateChunks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._BatchUpdateChunks._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.BatchUpdateChunksResponse() + pb_resp = retriever_service.BatchUpdateChunksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_batch_update_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_chunks_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + retriever_service.BatchUpdateChunksResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.batch_update_chunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "BatchUpdateChunks", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateChunk( + _BaseRetrieverServiceRestTransport._BaseCreateChunk, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.CreateChunk") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.CreateChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Call the create chunk method over HTTP. + + Args: + request (~.retriever_service.CreateChunkRequest): + The request object. Request to create a ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Chunk: + A ``Chunk`` is a subpart of a ``Document`` that is + treated as an independent unit for the purposes of + vector representation and storage. A ``Corpus`` can have + a maximum of 1 million ``Chunk``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseCreateChunk._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_chunk(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseCreateChunk._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseCreateChunk._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseCreateChunk._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.CreateChunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "CreateChunk", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._CreateChunk._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Chunk() + pb_resp = retriever.Chunk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_chunk_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Chunk.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.create_chunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "CreateChunk", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateCorpus( + _BaseRetrieverServiceRestTransport._BaseCreateCorpus, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.CreateCorpus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.CreateCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Call the create corpus method over HTTP. + + Args: + request (~.retriever_service.CreateCorpusRequest): + The request object. Request to create a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Corpus: + A ``Corpus`` is a collection of ``Document``\ s. A + project can create up to 5 corpora. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseCreateCorpus._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_corpus(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseCreateCorpus._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseCreateCorpus._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseCreateCorpus._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.CreateCorpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "CreateCorpus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._CreateCorpus._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Corpus() + pb_resp = retriever.Corpus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_corpus_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Corpus.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.create_corpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "CreateCorpus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDocument( + _BaseRetrieverServiceRestTransport._BaseCreateDocument, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.CreateDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.CreateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Call the create document method over HTTP. + + Args: + request (~.retriever_service.CreateDocumentRequest): + The request object. Request to create a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Document: + A ``Document`` is a collection of ``Chunk``\ s. A + ``Corpus`` can have a maximum of 10,000 ``Document``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseCreateDocument._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_document(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseCreateDocument._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseCreateDocument._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseCreateDocument._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.CreateDocument", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "CreateDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._CreateDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Document() + pb_resp = retriever.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Document.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.create_document", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "CreateDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteChunk( + _BaseRetrieverServiceRestTransport._BaseDeleteChunk, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.DeleteChunk") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.DeleteChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete chunk method over HTTP. + + Args: + request (~.retriever_service.DeleteChunkRequest): + The request object. Request to delete a ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseDeleteChunk._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_chunk(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseDeleteChunk._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseDeleteChunk._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.DeleteChunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "DeleteChunk", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._DeleteChunk._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteCorpus( + _BaseRetrieverServiceRestTransport._BaseDeleteCorpus, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.DeleteCorpus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.DeleteCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete corpus method over HTTP. + + Args: + request (~.retriever_service.DeleteCorpusRequest): + The request object. Request to delete a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseDeleteCorpus._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_corpus(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseDeleteCorpus._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseDeleteCorpus._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.DeleteCorpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "DeleteCorpus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._DeleteCorpus._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDocument( + _BaseRetrieverServiceRestTransport._BaseDeleteDocument, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.DeleteDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.DeleteDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete document method over HTTP. + + Args: + request (~.retriever_service.DeleteDocumentRequest): + The request object. Request to delete a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseDeleteDocument._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_document(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseDeleteDocument._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseDeleteDocument._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.DeleteDocument", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "DeleteDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._DeleteDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetChunk( + _BaseRetrieverServiceRestTransport._BaseGetChunk, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.GetChunk") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.GetChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Call the get chunk method over HTTP. + + Args: + request (~.retriever_service.GetChunkRequest): + The request object. Request for getting information about a specific + ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Chunk: + A ``Chunk`` is a subpart of a ``Document`` that is + treated as an independent unit for the purposes of + vector representation and storage. A ``Corpus`` can have + a maximum of 1 million ``Chunk``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseGetChunk._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_chunk(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseGetChunk._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseRetrieverServiceRestTransport._BaseGetChunk._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.GetChunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetChunk", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._GetChunk._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Chunk() + pb_resp = retriever.Chunk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_chunk_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Chunk.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.get_chunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetChunk", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetCorpus( + _BaseRetrieverServiceRestTransport._BaseGetCorpus, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.GetCorpus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.GetCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Call the get corpus method over HTTP. + + Args: + request (~.retriever_service.GetCorpusRequest): + The request object. Request for getting information about a specific + ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Corpus: + A ``Corpus`` is a collection of ``Document``\ s. A + project can create up to 5 corpora. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseGetCorpus._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_corpus(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseGetCorpus._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseGetCorpus._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.GetCorpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetCorpus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._GetCorpus._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Corpus() + pb_resp = retriever.Corpus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_corpus_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Corpus.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.get_corpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetCorpus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDocument( + _BaseRetrieverServiceRestTransport._BaseGetDocument, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.GetDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.GetDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Call the get document method over HTTP. + + Args: + request (~.retriever_service.GetDocumentRequest): + The request object. Request for getting information about a specific + ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Document: + A ``Document`` is a collection of ``Chunk``\ s. A + ``Corpus`` can have a maximum of 10,000 ``Document``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseGetDocument._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_document(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseGetDocument._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseGetDocument._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.GetDocument", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._GetDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Document() + pb_resp = retriever.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Document.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.get_document", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListChunks( + _BaseRetrieverServiceRestTransport._BaseListChunks, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.ListChunks") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.ListChunksRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.ListChunksResponse: + r"""Call the list chunks method over HTTP. + + Args: + request (~.retriever_service.ListChunksRequest): + The request object. Request for listing ``Chunk``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.ListChunksResponse: + Response from ``ListChunks`` containing a paginated list + of ``Chunk``\ s. The ``Chunk``\ s are sorted by + ascending ``chunk.create_time``. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseListChunks._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_chunks(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseListChunks._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseListChunks._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.ListChunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListChunks", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._ListChunks._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.ListChunksResponse() + pb_resp = retriever_service.ListChunksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_chunks_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever_service.ListChunksResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.list_chunks", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListChunks", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListCorpora( + _BaseRetrieverServiceRestTransport._BaseListCorpora, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.ListCorpora") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.ListCorporaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.ListCorporaResponse: + r"""Call the list corpora method over HTTP. + + Args: + request (~.retriever_service.ListCorporaRequest): + The request object. Request for listing ``Corpora``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.ListCorporaResponse: + Response from ``ListCorpora`` containing a paginated + list of ``Corpora``. The results are sorted by ascending + ``corpus.create_time``. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseListCorpora._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_corpora(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseListCorpora._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseListCorpora._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.ListCorpora", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListCorpora", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._ListCorpora._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.ListCorporaResponse() + pb_resp = retriever_service.ListCorporaResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_corpora(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_corpora_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever_service.ListCorporaResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.list_corpora", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListCorpora", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDocuments( + _BaseRetrieverServiceRestTransport._BaseListDocuments, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.ListDocuments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: retriever_service.ListDocumentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.ListDocumentsResponse: + r"""Call the list documents method over HTTP. + + Args: + request (~.retriever_service.ListDocumentsRequest): + The request object. Request for listing ``Document``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.ListDocumentsResponse: + Response from ``ListDocuments`` containing a paginated + list of ``Document``\ s. The ``Document``\ s are sorted + by ascending ``document.create_time``. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseListDocuments._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_documents(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseListDocuments._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseListDocuments._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.ListDocuments", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListDocuments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._ListDocuments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.ListDocumentsResponse() + pb_resp = retriever_service.ListDocumentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever_service.ListDocumentsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.list_documents", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListDocuments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _QueryCorpus( + _BaseRetrieverServiceRestTransport._BaseQueryCorpus, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.QueryCorpus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.QueryCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.QueryCorpusResponse: + r"""Call the query corpus method over HTTP. + + Args: + request (~.retriever_service.QueryCorpusRequest): + The request object. Request for querying a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.QueryCorpusResponse: + Response from ``QueryCorpus`` containing a list of + relevant chunks. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseQueryCorpus._get_http_options() + ) + + request, metadata = self._interceptor.pre_query_corpus(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseQueryCorpus._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseQueryCorpus._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseQueryCorpus._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.QueryCorpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "QueryCorpus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._QueryCorpus._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.QueryCorpusResponse() + pb_resp = retriever_service.QueryCorpusResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_query_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_corpus_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever_service.QueryCorpusResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.query_corpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "QueryCorpus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _QueryDocument( + _BaseRetrieverServiceRestTransport._BaseQueryDocument, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.QueryDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.QueryDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever_service.QueryDocumentResponse: + r"""Call the query document method over HTTP. + + Args: + request (~.retriever_service.QueryDocumentRequest): + The request object. Request for querying a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever_service.QueryDocumentResponse: + Response from ``QueryDocument`` containing a list of + relevant chunks. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseQueryDocument._get_http_options() + ) + + request, metadata = self._interceptor.pre_query_document(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseQueryDocument._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseQueryDocument._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseQueryDocument._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.QueryDocument", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "QueryDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._QueryDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever_service.QueryDocumentResponse() + pb_resp = retriever_service.QueryDocumentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_query_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_document_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever_service.QueryDocumentResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.query_document", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "QueryDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateChunk( + _BaseRetrieverServiceRestTransport._BaseUpdateChunk, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.UpdateChunk") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.UpdateChunkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Chunk: + r"""Call the update chunk method over HTTP. + + Args: + request (~.retriever_service.UpdateChunkRequest): + The request object. Request to update a ``Chunk``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Chunk: + A ``Chunk`` is a subpart of a ``Document`` that is + treated as an independent unit for the purposes of + vector representation and storage. A ``Corpus`` can have + a maximum of 1 million ``Chunk``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseUpdateChunk._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_chunk(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseUpdateChunk._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseUpdateChunk._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseUpdateChunk._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.UpdateChunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "UpdateChunk", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._UpdateChunk._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Chunk() + pb_resp = retriever.Chunk.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_chunk_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Chunk.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.update_chunk", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "UpdateChunk", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateCorpus( + _BaseRetrieverServiceRestTransport._BaseUpdateCorpus, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.UpdateCorpus") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.UpdateCorpusRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Corpus: + r"""Call the update corpus method over HTTP. + + Args: + request (~.retriever_service.UpdateCorpusRequest): + The request object. Request to update a ``Corpus``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Corpus: + A ``Corpus`` is a collection of ``Document``\ s. A + project can create up to 5 corpora. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseUpdateCorpus._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_corpus(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseUpdateCorpus._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseUpdateCorpus._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseUpdateCorpus._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.UpdateCorpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "UpdateCorpus", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._UpdateCorpus._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Corpus() + pb_resp = retriever.Corpus.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_corpus_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Corpus.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.update_corpus", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "UpdateCorpus", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDocument( + _BaseRetrieverServiceRestTransport._BaseUpdateDocument, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.UpdateDocument") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: retriever_service.UpdateDocumentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> retriever.Document: + r"""Call the update document method over HTTP. + + Args: + request (~.retriever_service.UpdateDocumentRequest): + The request object. Request to update a ``Document``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.retriever.Document: + A ``Document`` is a collection of ``Chunk``\ s. A + ``Corpus`` can have a maximum of 10,000 ``Document``\ s. + + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseUpdateDocument._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_document(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseUpdateDocument._get_transcoded_request( + http_options, request + ) + + body = _BaseRetrieverServiceRestTransport._BaseUpdateDocument._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseUpdateDocument._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.UpdateDocument", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "UpdateDocument", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._UpdateDocument._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = retriever.Document() + pb_resp = retriever.Document.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = retriever.Document.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.update_document", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "UpdateDocument", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def batch_create_chunks( + self, + ) -> Callable[ + [retriever_service.BatchCreateChunksRequest], + retriever_service.BatchCreateChunksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchCreateChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_delete_chunks( + self, + ) -> Callable[[retriever_service.BatchDeleteChunksRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchDeleteChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_update_chunks( + self, + ) -> Callable[ + [retriever_service.BatchUpdateChunksRequest], + retriever_service.BatchUpdateChunksResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchUpdateChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_chunk( + self, + ) -> Callable[[retriever_service.CreateChunkRequest], retriever.Chunk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_corpus( + self, + ) -> Callable[[retriever_service.CreateCorpusRequest], retriever.Corpus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_document( + self, + ) -> Callable[[retriever_service.CreateDocumentRequest], retriever.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_chunk( + self, + ) -> Callable[[retriever_service.DeleteChunkRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_corpus( + self, + ) -> Callable[[retriever_service.DeleteCorpusRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_document( + self, + ) -> Callable[[retriever_service.DeleteDocumentRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_chunk( + self, + ) -> Callable[[retriever_service.GetChunkRequest], retriever.Chunk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_corpus( + self, + ) -> Callable[[retriever_service.GetCorpusRequest], retriever.Corpus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_document( + self, + ) -> Callable[[retriever_service.GetDocumentRequest], retriever.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_chunks( + self, + ) -> Callable[ + [retriever_service.ListChunksRequest], retriever_service.ListChunksResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChunks(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_corpora( + self, + ) -> Callable[ + [retriever_service.ListCorporaRequest], retriever_service.ListCorporaResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListCorpora(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_documents( + self, + ) -> Callable[ + [retriever_service.ListDocumentsRequest], + retriever_service.ListDocumentsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDocuments(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_corpus( + self, + ) -> Callable[ + [retriever_service.QueryCorpusRequest], retriever_service.QueryCorpusResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_document( + self, + ) -> Callable[ + [retriever_service.QueryDocumentRequest], + retriever_service.QueryDocumentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_chunk( + self, + ) -> Callable[[retriever_service.UpdateChunkRequest], retriever.Chunk]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateChunk(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_corpus( + self, + ) -> Callable[[retriever_service.UpdateCorpusRequest], retriever.Corpus]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCorpus(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_document( + self, + ) -> Callable[[retriever_service.UpdateDocumentRequest], retriever.Document]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDocument(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseRetrieverServiceRestTransport._BaseGetOperation, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseRetrieverServiceRestTransport._BaseListOperations, RetrieverServiceRestStub + ): + def __hash__(self): + return hash("RetrieverServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseRetrieverServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseRetrieverServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRetrieverServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.RetrieverServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RetrieverServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RetrieverServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/rest_base.py new file mode 100644 index 000000000000..b5fb721e220f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/retriever_service/transports/rest_base.py @@ -0,0 +1,1196 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +from .base import DEFAULT_CLIENT_INFO, RetrieverServiceTransport + + +class _BaseRetrieverServiceRestTransport(RetrieverServiceTransport): + """Base REST backend transport for RetrieverService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseBatchCreateChunks: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=corpora/*/documents/*}/chunks:batchCreate", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.BatchCreateChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseBatchCreateChunks._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchDeleteChunks: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=corpora/*/documents/*}/chunks:batchDelete", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.BatchDeleteChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseBatchDeleteChunks._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBatchUpdateChunks: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=corpora/*/documents/*}/chunks:batchUpdate", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.BatchUpdateChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseBatchUpdateChunks._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateChunk: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=corpora/*/documents/*}/chunks", + "body": "chunk", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.CreateChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseCreateChunk._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateCorpus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/corpora", + "body": "corpus", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.CreateCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseCreateCorpus._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=corpora/*}/documents", + "body": "document", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.CreateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseCreateDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteChunk: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=corpora/*/documents/*/chunks/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.DeleteChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseDeleteChunk._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteCorpus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=corpora/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.DeleteCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseDeleteCorpus._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1alpha/{name=corpora/*/documents/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.DeleteDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseDeleteDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetChunk: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=corpora/*/documents/*/chunks/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.GetChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseGetChunk._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetCorpus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=corpora/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.GetCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseGetCorpus._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=corpora/*/documents/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.GetDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseGetDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListChunks: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=corpora/*/documents/*}/chunks", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.ListChunksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseListChunks._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListCorpora: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/corpora", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.ListCorporaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDocuments: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{parent=corpora/*}/documents", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.ListDocumentsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseListDocuments._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseQueryCorpus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=corpora/*}:query", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.QueryCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseQueryCorpus._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseQueryDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{name=corpora/*/documents/*}:query", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.QueryDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseQueryDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateChunk: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{chunk.name=corpora/*/documents/*/chunks/*}", + "body": "chunk", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.UpdateChunkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseUpdateChunk._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateCorpus: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{corpus.name=corpora/*}", + "body": "corpus", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.UpdateCorpusRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseUpdateCorpus._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDocument: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1alpha/{document.name=corpora/*/documents/*}", + "body": "document", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = retriever_service.UpdateDocumentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseRetrieverServiceRestTransport._BaseUpdateDocument._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseRetrieverServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/__init__.py new file mode 100644 index 000000000000..56818276d243 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import TextServiceAsyncClient +from .client import TextServiceClient + +__all__ = ( + "TextServiceClient", + "TextServiceAsyncClient", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/async_client.py new file mode 100644 index 000000000000..f9dd0d629eba --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/async_client.py @@ -0,0 +1,1005 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import safety, text_service + +from .client import TextServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .transports.grpc_asyncio import TextServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class TextServiceAsyncClient: + """API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + """ + + _client: TextServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = TextServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TextServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = TextServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = TextServiceClient._DEFAULT_UNIVERSE + + model_path = staticmethod(TextServiceClient.model_path) + parse_model_path = staticmethod(TextServiceClient.parse_model_path) + common_billing_account_path = staticmethod( + TextServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + TextServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(TextServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TextServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TextServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + TextServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(TextServiceClient.common_project_path) + parse_common_project_path = staticmethod( + TextServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(TextServiceClient.common_location_path) + parse_common_location_path = staticmethod( + TextServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceAsyncClient: The constructed client. + """ + return TextServiceClient.from_service_account_info.__func__(TextServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceAsyncClient: The constructed client. + """ + return TextServiceClient.from_service_account_file.__func__(TextServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TextServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TextServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TextServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = TextServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, TextServiceTransport, Callable[..., TextServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,TextServiceTransport,Callable[..., TextServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TextServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TextServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.TextServiceAsyncClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "credentialsType": None, + }, + ) + + async def generate_text( + self, + request: Optional[Union[text_service.GenerateTextRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.GenerateTextResponse: + r"""Generates a response from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_generate_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.GenerateTextRequest, dict]]): + The request object. Request to generate a text completion + response from the model. + model (:class:`str`): + Required. The name of the ``Model`` or ``TunedModel`` to + use for generating the completion. Examples: + models/text-bison-001 + tunedModels/sentence-translator-u3b7m + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1alpha.types.TextPrompt`): + Required. The free-form input text + given to the model as a prompt. + Given a prompt, the model will generate + a TextCompletion response it predicts as + the completion of the input text. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (:class:`float`): + Optional. Controls the randomness of the output. Note: + The default value varies by model, see the + ``Model.temperature`` attribute of the ``Model`` + returned the ``getModel`` function. + + Values can range from [0.0,1.0], inclusive. A value + closer to 1.0 will produce responses that are more + varied and creative, while a value closer to 0.0 will + typically result in more straightforward responses from + the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (:class:`int`): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, + this will default to 1. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_output_tokens (:class:`int`): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit + specified in the ``Model`` specification. + + This corresponds to the ``max_output_tokens`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (:class:`float`): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities + so that only the most likely tokens are considered. + Top-k sampling directly limits the maximum number of + tokens to consider, while Nucleus sampling limits number + of tokens based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (:class:`int`): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + model, + prompt, + temperature, + candidate_count, + max_output_tokens, + top_p, + top_k, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.GenerateTextRequest): + request = text_service.GenerateTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if max_output_tokens is not None: + request.max_output_tokens = max_output_tokens + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.generate_text + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def embed_text( + self, + request: Optional[Union[text_service.EmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + text: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.EmbedTextResponse: + r"""Generates an embedding from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.EmbedTextRequest, dict]]): + The request object. Request to get a text embedding from + the model. + model (:class:`str`): + Required. The model name to use with + the format model=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + text (:class:`str`): + Optional. The free-form input text + that the model will turn into an + embedding. + + This corresponds to the ``text`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, text]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.EmbedTextRequest): + request = text_service.EmbedTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if text is not None: + request.text = text + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.embed_text + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def batch_embed_text( + self, + request: Optional[Union[text_service.BatchEmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + texts: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.BatchEmbedTextResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.batch_embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.BatchEmbedTextRequest, dict]]): + The request object. Batch request to get a text embedding + from the model. + model (:class:`str`): + Required. The name of the ``Model`` to use for + generating the embedding. Examples: + models/embedding-gecko-001 + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + texts (:class:`MutableSequence[str]`): + Optional. The free-form input texts + that the model will turn into an + embedding. The current limit is 100 + texts, over which an error will be + thrown. + + This corresponds to the ``texts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchEmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, texts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.BatchEmbedTextRequest): + request = text_service.BatchEmbedTextRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if texts: + request.texts.extend(texts) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.batch_embed_text + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def count_text_tokens( + self, + request: Optional[Union[text_service.CountTextTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.CountTextTokensResponse: + r"""Runs a model's tokenizer on a text and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + async def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_text_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ai.generativelanguage_v1alpha.types.CountTextTokensRequest, dict]]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (:class:`str`): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (:class:`google.ai.generativelanguage_v1alpha.types.TextPrompt`): + Required. The free-form input text + given to the model as a prompt. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CountTextTokensResponse: + A response from CountTextTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.CountTextTokensRequest): + request = text_service.CountTextTokensRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.count_text_tokens + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "TextServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextServiceAsyncClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/client.py new file mode 100644 index 000000000000..239868ef37e4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/client.py @@ -0,0 +1,1423 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.longrunning import operations_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.types import safety, text_service + +from .transports.base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .transports.grpc import TextServiceGrpcTransport +from .transports.grpc_asyncio import TextServiceGrpcAsyncIOTransport +from .transports.rest import TextServiceRestTransport + + +class TextServiceClientMeta(type): + """Metaclass for the TextService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[TextServiceTransport]] + _transport_registry["grpc"] = TextServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TextServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TextServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[TextServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TextServiceClient(metaclass=TextServiceClientMeta): + """API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "generativelanguage.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "generativelanguage.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TextServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TextServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TextServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def model_path( + model: str, + ) -> str: + """Returns a fully-qualified model string.""" + return "models/{model}".format( + model=model, + ) + + @staticmethod + def parse_model_path(path: str) -> Dict[str, str]: + """Parses a model path into its component segments.""" + m = re.match(r"^models/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = TextServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = TextServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = TextServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = TextServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, TextServiceTransport, Callable[..., TextServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the text service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,TextServiceTransport,Callable[..., TextServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TextServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = TextServiceClient._read_environment_variables() + self._client_cert_source = TextServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = TextServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, TextServiceTransport) + if transport_provided: + # transport is a TextServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(TextServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or TextServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[TextServiceTransport], Callable[..., TextServiceTransport] + ] = ( + TextServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TextServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.ai.generativelanguage_v1alpha.TextServiceClient`.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "credentialsType": None, + }, + ) + + def generate_text( + self, + request: Optional[Union[text_service.GenerateTextRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + temperature: Optional[float] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.GenerateTextResponse: + r"""Generates a response from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_generate_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.GenerateTextRequest, dict]): + The request object. Request to generate a text completion + response from the model. + model (str): + Required. The name of the ``Model`` or ``TunedModel`` to + use for generating the completion. Examples: + models/text-bison-001 + tunedModels/sentence-translator-u3b7m + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1alpha.types.TextPrompt): + Required. The free-form input text + given to the model as a prompt. + Given a prompt, the model will generate + a TextCompletion response it predicts as + the completion of the input text. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + temperature (float): + Optional. Controls the randomness of the output. Note: + The default value varies by model, see the + ``Model.temperature`` attribute of the ``Model`` + returned the ``getModel`` function. + + Values can range from [0.0,1.0], inclusive. A value + closer to 1.0 will produce responses that are more + varied and creative, while a value closer to 0.0 will + typically result in more straightforward responses from + the model. + + This corresponds to the ``temperature`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + candidate_count (int): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, + this will default to 1. + + This corresponds to the ``candidate_count`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit + specified in the ``Model`` specification. + + This corresponds to the ``max_output_tokens`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_p (float): + Optional. The maximum cumulative probability of tokens + to consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities + so that only the most likely tokens are considered. + Top-k sampling directly limits the maximum number of + tokens to consider, while Nucleus sampling limits number + of tokens based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_p`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most + probable tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This corresponds to the ``top_k`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + model, + prompt, + temperature, + candidate_count, + max_output_tokens, + top_p, + top_k, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.GenerateTextRequest): + request = text_service.GenerateTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + if temperature is not None: + request.temperature = temperature + if candidate_count is not None: + request.candidate_count = candidate_count + if max_output_tokens is not None: + request.max_output_tokens = max_output_tokens + if top_p is not None: + request.top_p = top_p + if top_k is not None: + request.top_k = top_k + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def embed_text( + self, + request: Optional[Union[text_service.EmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + text: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.EmbedTextResponse: + r"""Generates an embedding from the model given an input + message. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.EmbedTextRequest, dict]): + The request object. Request to get a text embedding from + the model. + model (str): + Required. The model name to use with + the format model=models/{model}. + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + text (str): + Optional. The free-form input text + that the model will turn into an + embedding. + + This corresponds to the ``text`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, text]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.EmbedTextRequest): + request = text_service.EmbedTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if text is not None: + request.text = text + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.embed_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def batch_embed_text( + self, + request: Optional[Union[text_service.BatchEmbedTextRequest, dict]] = None, + *, + model: Optional[str] = None, + texts: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.BatchEmbedTextResponse: + r"""Generates multiple embeddings from the model given + input text in a synchronous call. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.batch_embed_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.BatchEmbedTextRequest, dict]): + The request object. Batch request to get a text embedding + from the model. + model (str): + Required. The name of the ``Model`` to use for + generating the embedding. Examples: + models/embedding-gecko-001 + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + texts (MutableSequence[str]): + Optional. The free-form input texts + that the model will turn into an + embedding. The current limit is 100 + texts, over which an error will be + thrown. + + This corresponds to the ``texts`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.BatchEmbedTextResponse: + The response to a EmbedTextRequest. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, texts]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.BatchEmbedTextRequest): + request = text_service.BatchEmbedTextRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if texts is not None: + request.texts = texts + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_embed_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def count_text_tokens( + self, + request: Optional[Union[text_service.CountTextTokensRequest, dict]] = None, + *, + model: Optional[str] = None, + prompt: Optional[text_service.TextPrompt] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.CountTextTokensResponse: + r"""Runs a model's tokenizer on a text and returns the + token count. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ai import generativelanguage_v1alpha + + def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_text_tokens(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ai.generativelanguage_v1alpha.types.CountTextTokensRequest, dict]): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + model (str): + Required. The model's resource name. This serves as an + ID for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + + This corresponds to the ``model`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + prompt (google.ai.generativelanguage_v1alpha.types.TextPrompt): + Required. The free-form input text + given to the model as a prompt. + + This corresponds to the ``prompt`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ai.generativelanguage_v1alpha.types.CountTextTokensResponse: + A response from CountTextTokens. + + It returns the model's token_count for the prompt. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([model, prompt]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, text_service.CountTextTokensRequest): + request = text_service.CountTextTokensRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if model is not None: + request.model = model + if prompt is not None: + request.prompt = prompt + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.count_text_tokens] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("model", request.model),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "TextServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("TextServiceClient",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/README.rst b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/README.rst new file mode 100644 index 000000000000..b603a3ea7af6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`TextServiceTransport` is the ABC for all transports. +- public child `TextServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `TextServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseTextServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `TextServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/__init__.py new file mode 100644 index 000000000000..8670693e2f18 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TextServiceTransport +from .grpc import TextServiceGrpcTransport +from .grpc_asyncio import TextServiceGrpcAsyncIOTransport +from .rest import TextServiceRestInterceptor, TextServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TextServiceTransport]] +_transport_registry["grpc"] = TextServiceGrpcTransport +_transport_registry["grpc_asyncio"] = TextServiceGrpcAsyncIOTransport +_transport_registry["rest"] = TextServiceRestTransport + +__all__ = ( + "TextServiceTransport", + "TextServiceGrpcTransport", + "TextServiceGrpcAsyncIOTransport", + "TextServiceRestTransport", + "TextServiceRestInterceptor", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/base.py new file mode 100644 index 000000000000..25b16f03ca97 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/base.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.ai.generativelanguage_v1alpha import gapic_version as package_version +from google.ai.generativelanguage_v1alpha.types import text_service + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class TextServiceTransport(abc.ABC): + """Abstract transport class for TextService.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "generativelanguage.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_text: gapic_v1.method.wrap_method( + self.generate_text, + default_timeout=None, + client_info=client_info, + ), + self.embed_text: gapic_v1.method.wrap_method( + self.embed_text, + default_timeout=None, + client_info=client_info, + ), + self.batch_embed_text: gapic_v1.method.wrap_method( + self.batch_embed_text, + default_timeout=None, + client_info=client_info, + ), + self.count_text_tokens: gapic_v1.method.wrap_method( + self.count_text_tokens, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], + Union[ + text_service.GenerateTextResponse, + Awaitable[text_service.GenerateTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def embed_text( + self, + ) -> Callable[ + [text_service.EmbedTextRequest], + Union[ + text_service.EmbedTextResponse, Awaitable[text_service.EmbedTextResponse] + ], + ]: + raise NotImplementedError() + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], + Union[ + text_service.BatchEmbedTextResponse, + Awaitable[text_service.BatchEmbedTextResponse], + ], + ]: + raise NotImplementedError() + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], + Union[ + text_service.CountTextTokensResponse, + Awaitable[text_service.CountTextTokensResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("TextServiceTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/grpc.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/grpc.py new file mode 100644 index 000000000000..4ea6970fa349 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/grpc.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class TextServiceGrpcTransport(TextServiceTransport): + """gRPC backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], text_service.GenerateTextResponse + ]: + r"""Return a callable for the generate text method over gRPC. + + Generates a response from the model given an input + message. + + Returns: + Callable[[~.GenerateTextRequest], + ~.GenerateTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_text" not in self._stubs: + self._stubs["generate_text"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/GenerateText", + request_serializer=text_service.GenerateTextRequest.serialize, + response_deserializer=text_service.GenerateTextResponse.deserialize, + ) + return self._stubs["generate_text"] + + @property + def embed_text( + self, + ) -> Callable[[text_service.EmbedTextRequest], text_service.EmbedTextResponse]: + r"""Return a callable for the embed text method over gRPC. + + Generates an embedding from the model given an input + message. + + Returns: + Callable[[~.EmbedTextRequest], + ~.EmbedTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_text" not in self._stubs: + self._stubs["embed_text"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/EmbedText", + request_serializer=text_service.EmbedTextRequest.serialize, + response_deserializer=text_service.EmbedTextResponse.deserialize, + ) + return self._stubs["embed_text"] + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], text_service.BatchEmbedTextResponse + ]: + r"""Return a callable for the batch embed text method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedTextRequest], + ~.BatchEmbedTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_text" not in self._stubs: + self._stubs["batch_embed_text"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/BatchEmbedText", + request_serializer=text_service.BatchEmbedTextRequest.serialize, + response_deserializer=text_service.BatchEmbedTextResponse.deserialize, + ) + return self._stubs["batch_embed_text"] + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], text_service.CountTextTokensResponse + ]: + r"""Return a callable for the count text tokens method over gRPC. + + Runs a model's tokenizer on a text and returns the + token count. + + Returns: + Callable[[~.CountTextTokensRequest], + ~.CountTextTokensResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_text_tokens" not in self._stubs: + self._stubs["count_text_tokens"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/CountTextTokens", + request_serializer=text_service.CountTextTokensRequest.serialize, + response_deserializer=text_service.CountTextTokensResponse.deserialize, + ) + return self._stubs["count_text_tokens"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("TextServiceGrpcTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/grpc_asyncio.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..24519afd29bb --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/grpc_asyncio.py @@ -0,0 +1,536 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport +from .grpc import TextServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class TextServiceGrpcAsyncIOTransport(TextServiceTransport): + """gRPC AsyncIO backend transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], Awaitable[text_service.GenerateTextResponse] + ]: + r"""Return a callable for the generate text method over gRPC. + + Generates a response from the model given an input + message. + + Returns: + Callable[[~.GenerateTextRequest], + Awaitable[~.GenerateTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "generate_text" not in self._stubs: + self._stubs["generate_text"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/GenerateText", + request_serializer=text_service.GenerateTextRequest.serialize, + response_deserializer=text_service.GenerateTextResponse.deserialize, + ) + return self._stubs["generate_text"] + + @property + def embed_text( + self, + ) -> Callable[ + [text_service.EmbedTextRequest], Awaitable[text_service.EmbedTextResponse] + ]: + r"""Return a callable for the embed text method over gRPC. + + Generates an embedding from the model given an input + message. + + Returns: + Callable[[~.EmbedTextRequest], + Awaitable[~.EmbedTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "embed_text" not in self._stubs: + self._stubs["embed_text"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/EmbedText", + request_serializer=text_service.EmbedTextRequest.serialize, + response_deserializer=text_service.EmbedTextResponse.deserialize, + ) + return self._stubs["embed_text"] + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], + Awaitable[text_service.BatchEmbedTextResponse], + ]: + r"""Return a callable for the batch embed text method over gRPC. + + Generates multiple embeddings from the model given + input text in a synchronous call. + + Returns: + Callable[[~.BatchEmbedTextRequest], + Awaitable[~.BatchEmbedTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "batch_embed_text" not in self._stubs: + self._stubs["batch_embed_text"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/BatchEmbedText", + request_serializer=text_service.BatchEmbedTextRequest.serialize, + response_deserializer=text_service.BatchEmbedTextResponse.deserialize, + ) + return self._stubs["batch_embed_text"] + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], + Awaitable[text_service.CountTextTokensResponse], + ]: + r"""Return a callable for the count text tokens method over gRPC. + + Runs a model's tokenizer on a text and returns the + token count. + + Returns: + Callable[[~.CountTextTokensRequest], + Awaitable[~.CountTextTokensResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "count_text_tokens" not in self._stubs: + self._stubs["count_text_tokens"] = self._logged_channel.unary_unary( + "/google.ai.generativelanguage.v1alpha.TextService/CountTextTokens", + request_serializer=text_service.CountTextTokensRequest.serialize, + response_deserializer=text_service.CountTextTokensResponse.deserialize, + ) + return self._stubs["count_text_tokens"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_text: self._wrap_method( + self.generate_text, + default_timeout=None, + client_info=client_info, + ), + self.embed_text: self._wrap_method( + self.embed_text, + default_timeout=None, + client_info=client_info, + ), + self.batch_embed_text: self._wrap_method( + self.batch_embed_text, + default_timeout=None, + client_info=client_info, + ), + self.count_text_tokens: self._wrap_method( + self.count_text_tokens, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("TextServiceGrpcAsyncIOTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/rest.py new file mode 100644 index 000000000000..8eae9d59a7c1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/rest.py @@ -0,0 +1,1407 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.ai.generativelanguage_v1alpha.types import text_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseTextServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class TextServiceRestInterceptor: + """Interceptor for TextService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TextServiceRestTransport. + + .. code-block:: python + class MyCustomTextServiceInterceptor(TextServiceRestInterceptor): + def pre_batch_embed_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_embed_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_count_text_tokens(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_count_text_tokens(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_embed_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_embed_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_generate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TextServiceRestTransport(interceptor=MyCustomTextServiceInterceptor()) + client = TextServiceClient(transport=transport) + + + """ + + def pre_batch_embed_text( + self, + request: text_service.BatchEmbedTextRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.BatchEmbedTextRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for batch_embed_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_batch_embed_text( + self, response: text_service.BatchEmbedTextResponse + ) -> text_service.BatchEmbedTextResponse: + """Post-rpc interceptor for batch_embed_text + + DEPRECATED. Please use the `post_batch_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the TextService server but before + it is returned to user code. This `post_batch_embed_text` interceptor runs + before the `post_batch_embed_text_with_metadata` interceptor. + """ + return response + + def post_batch_embed_text_with_metadata( + self, + response: text_service.BatchEmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.BatchEmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for batch_embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_batch_embed_text_with_metadata` + interceptor in new development instead of the `post_batch_embed_text` interceptor. + When both interceptors are used, this `post_batch_embed_text_with_metadata` interceptor runs after the + `post_batch_embed_text` interceptor. The (possibly modified) response returned by + `post_batch_embed_text` will be passed to + `post_batch_embed_text_with_metadata`. + """ + return response, metadata + + def pre_count_text_tokens( + self, + request: text_service.CountTextTokensRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.CountTextTokensRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for count_text_tokens + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_count_text_tokens( + self, response: text_service.CountTextTokensResponse + ) -> text_service.CountTextTokensResponse: + """Post-rpc interceptor for count_text_tokens + + DEPRECATED. Please use the `post_count_text_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the TextService server but before + it is returned to user code. This `post_count_text_tokens` interceptor runs + before the `post_count_text_tokens_with_metadata` interceptor. + """ + return response + + def post_count_text_tokens_with_metadata( + self, + response: text_service.CountTextTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.CountTextTokensResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for count_text_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_count_text_tokens_with_metadata` + interceptor in new development instead of the `post_count_text_tokens` interceptor. + When both interceptors are used, this `post_count_text_tokens_with_metadata` interceptor runs after the + `post_count_text_tokens` interceptor. The (possibly modified) response returned by + `post_count_text_tokens` will be passed to + `post_count_text_tokens_with_metadata`. + """ + return response, metadata + + def pre_embed_text( + self, + request: text_service.EmbedTextRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[text_service.EmbedTextRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for embed_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_embed_text( + self, response: text_service.EmbedTextResponse + ) -> text_service.EmbedTextResponse: + """Post-rpc interceptor for embed_text + + DEPRECATED. Please use the `post_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the TextService server but before + it is returned to user code. This `post_embed_text` interceptor runs + before the `post_embed_text_with_metadata` interceptor. + """ + return response + + def post_embed_text_with_metadata( + self, + response: text_service.EmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[text_service.EmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_embed_text_with_metadata` + interceptor in new development instead of the `post_embed_text` interceptor. + When both interceptors are used, this `post_embed_text_with_metadata` interceptor runs after the + `post_embed_text` interceptor. The (possibly modified) response returned by + `post_embed_text` will be passed to + `post_embed_text_with_metadata`. + """ + return response, metadata + + def pre_generate_text( + self, + request: text_service.GenerateTextRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.GenerateTextRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for generate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_generate_text( + self, response: text_service.GenerateTextResponse + ) -> text_service.GenerateTextResponse: + """Post-rpc interceptor for generate_text + + DEPRECATED. Please use the `post_generate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the TextService server but before + it is returned to user code. This `post_generate_text` interceptor runs + before the `post_generate_text_with_metadata` interceptor. + """ + return response + + def post_generate_text_with_metadata( + self, + response: text_service.GenerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.GenerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_generate_text_with_metadata` + interceptor in new development instead of the `post_generate_text` interceptor. + When both interceptors are used, this `post_generate_text_with_metadata` interceptor runs after the + `post_generate_text` interceptor. The (possibly modified) response returned by + `post_generate_text` will be passed to + `post_generate_text_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the TextService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the TextService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TextServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TextServiceRestInterceptor + + +class TextServiceRestTransport(_BaseTextServiceRestTransport): + """REST backend synchronous transport for TextService. + + API for using Generative Language Models (GLMs) trained to + generate text. + Also known as Large Language Models (LLM)s, these generate text + given an input prompt from the user. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[TextServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TextServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BatchEmbedText( + _BaseTextServiceRestTransport._BaseBatchEmbedText, TextServiceRestStub + ): + def __hash__(self): + return hash("TextServiceRestTransport.BatchEmbedText") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: text_service.BatchEmbedTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.BatchEmbedTextResponse: + r"""Call the batch embed text method over HTTP. + + Args: + request (~.text_service.BatchEmbedTextRequest): + The request object. Batch request to get a text embedding + from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.text_service.BatchEmbedTextResponse: + The response to a EmbedTextRequest. + """ + + http_options = ( + _BaseTextServiceRestTransport._BaseBatchEmbedText._get_http_options() + ) + + request, metadata = self._interceptor.pre_batch_embed_text( + request, metadata + ) + transcoded_request = _BaseTextServiceRestTransport._BaseBatchEmbedText._get_transcoded_request( + http_options, request + ) + + body = _BaseTextServiceRestTransport._BaseBatchEmbedText._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseTextServiceRestTransport._BaseBatchEmbedText._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.TextServiceClient.BatchEmbedText", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "BatchEmbedText", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TextServiceRestTransport._BatchEmbedText._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.BatchEmbedTextResponse() + pb_resp = text_service.BatchEmbedTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_batch_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_embed_text_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = text_service.BatchEmbedTextResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.TextServiceClient.batch_embed_text", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "BatchEmbedText", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CountTextTokens( + _BaseTextServiceRestTransport._BaseCountTextTokens, TextServiceRestStub + ): + def __hash__(self): + return hash("TextServiceRestTransport.CountTextTokens") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: text_service.CountTextTokensRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.CountTextTokensResponse: + r"""Call the count text tokens method over HTTP. + + Args: + request (~.text_service.CountTextTokensRequest): + The request object. Counts the number of tokens in the ``prompt`` sent to a + model. + + Models may tokenize text differently, so each model may + return a different ``token_count``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.text_service.CountTextTokensResponse: + A response from ``CountTextTokens``. + + It returns the model's ``token_count`` for the + ``prompt``. + + """ + + http_options = ( + _BaseTextServiceRestTransport._BaseCountTextTokens._get_http_options() + ) + + request, metadata = self._interceptor.pre_count_text_tokens( + request, metadata + ) + transcoded_request = _BaseTextServiceRestTransport._BaseCountTextTokens._get_transcoded_request( + http_options, request + ) + + body = _BaseTextServiceRestTransport._BaseCountTextTokens._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseTextServiceRestTransport._BaseCountTextTokens._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.TextServiceClient.CountTextTokens", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "CountTextTokens", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TextServiceRestTransport._CountTextTokens._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.CountTextTokensResponse() + pb_resp = text_service.CountTextTokensResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_count_text_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_text_tokens_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = text_service.CountTextTokensResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.TextServiceClient.count_text_tokens", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "CountTextTokens", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _EmbedText(_BaseTextServiceRestTransport._BaseEmbedText, TextServiceRestStub): + def __hash__(self): + return hash("TextServiceRestTransport.EmbedText") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: text_service.EmbedTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.EmbedTextResponse: + r"""Call the embed text method over HTTP. + + Args: + request (~.text_service.EmbedTextRequest): + The request object. Request to get a text embedding from + the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.text_service.EmbedTextResponse: + The response to a EmbedTextRequest. + """ + + http_options = ( + _BaseTextServiceRestTransport._BaseEmbedText._get_http_options() + ) + + request, metadata = self._interceptor.pre_embed_text(request, metadata) + transcoded_request = ( + _BaseTextServiceRestTransport._BaseEmbedText._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseTextServiceRestTransport._BaseEmbedText._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseTextServiceRestTransport._BaseEmbedText._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.TextServiceClient.EmbedText", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "EmbedText", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TextServiceRestTransport._EmbedText._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.EmbedTextResponse() + pb_resp = text_service.EmbedTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_text_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = text_service.EmbedTextResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.TextServiceClient.embed_text", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "EmbedText", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GenerateText( + _BaseTextServiceRestTransport._BaseGenerateText, TextServiceRestStub + ): + def __hash__(self): + return hash("TextServiceRestTransport.GenerateText") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: text_service.GenerateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> text_service.GenerateTextResponse: + r"""Call the generate text method over HTTP. + + Args: + request (~.text_service.GenerateTextRequest): + The request object. Request to generate a text completion + response from the model. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.text_service.GenerateTextResponse: + The response from the model, + including candidate completions. + + """ + + http_options = ( + _BaseTextServiceRestTransport._BaseGenerateText._get_http_options() + ) + + request, metadata = self._interceptor.pre_generate_text(request, metadata) + transcoded_request = ( + _BaseTextServiceRestTransport._BaseGenerateText._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseTextServiceRestTransport._BaseGenerateText._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseTextServiceRestTransport._BaseGenerateText._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.TextServiceClient.GenerateText", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "GenerateText", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TextServiceRestTransport._GenerateText._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = text_service.GenerateTextResponse() + pb_resp = text_service.GenerateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_generate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_text_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = text_service.GenerateTextResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.TextServiceClient.generate_text", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "GenerateText", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def batch_embed_text( + self, + ) -> Callable[ + [text_service.BatchEmbedTextRequest], text_service.BatchEmbedTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchEmbedText(self._session, self._host, self._interceptor) # type: ignore + + @property + def count_text_tokens( + self, + ) -> Callable[ + [text_service.CountTextTokensRequest], text_service.CountTextTokensResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CountTextTokens(self._session, self._host, self._interceptor) # type: ignore + + @property + def embed_text( + self, + ) -> Callable[[text_service.EmbedTextRequest], text_service.EmbedTextResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EmbedText(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_text( + self, + ) -> Callable[ + [text_service.GenerateTextRequest], text_service.GenerateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseTextServiceRestTransport._BaseGetOperation, TextServiceRestStub + ): + def __hash__(self): + return hash("TextServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseTextServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = ( + _BaseTextServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseTextServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.TextServiceClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TextServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.TextServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseTextServiceRestTransport._BaseListOperations, TextServiceRestStub + ): + def __hash__(self): + return hash("TextServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseTextServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseTextServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseTextServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.ai.generativelanguage_v1alpha.TextServiceClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TextServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.ai.generativelanguage_v1alpha.TextServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.ai.generativelanguage.v1alpha.TextService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("TextServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/rest_base.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/rest_base.py new file mode 100644 index 000000000000..6139a0ec95a1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/services/text_service/transports/rest_base.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.ai.generativelanguage_v1alpha.types import text_service + +from .base import DEFAULT_CLIENT_INFO, TextServiceTransport + + +class _BaseTextServiceRestTransport(TextServiceTransport): + """Base REST backend transport for TextService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "generativelanguage.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'generativelanguage.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseBatchEmbedText: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:batchEmbedText", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = text_service.BatchEmbedTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseTextServiceRestTransport._BaseBatchEmbedText._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCountTextTokens: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:countTextTokens", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = text_service.CountTextTokensRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseTextServiceRestTransport._BaseCountTextTokens._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseEmbedText: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:embedText", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = text_service.EmbedTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseTextServiceRestTransport._BaseEmbedText._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGenerateText: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{model=models/*}:generateText", + "body": "*", + }, + { + "method": "post", + "uri": "/v1alpha/{model=tunedModels/*}:generateText", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = text_service.GenerateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseTextServiceRestTransport._BaseGenerateText._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=generatedFiles/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=tunedModels/*}/operations", + }, + { + "method": "get", + "uri": "/v1alpha/{name=models/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseTextServiceRestTransport",) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/__init__.py new file mode 100644 index 000000000000..bb351f14ec55 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/__init__.py @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cache_service import ( + CreateCachedContentRequest, + DeleteCachedContentRequest, + GetCachedContentRequest, + ListCachedContentsRequest, + ListCachedContentsResponse, + UpdateCachedContentRequest, +) +from .cached_content import CachedContent +from .citation import CitationMetadata, CitationSource +from .content import ( + Blob, + CodeExecution, + CodeExecutionResult, + Content, + DynamicRetrievalConfig, + ExecutableCode, + FileData, + FunctionCall, + FunctionCallingConfig, + FunctionDeclaration, + FunctionResponse, + GoogleSearchRetrieval, + GroundingPassage, + GroundingPassages, + Part, + Schema, + Tool, + ToolConfig, + Type, +) +from .discuss_service import ( + CountMessageTokensRequest, + CountMessageTokensResponse, + Example, + GenerateMessageRequest, + GenerateMessageResponse, + Message, + MessagePrompt, +) +from .file import File, VideoMetadata +from .file_service import ( + CreateFileRequest, + CreateFileResponse, + DeleteFileRequest, + GetFileRequest, + ListFilesRequest, + ListFilesResponse, +) +from .generative_service import ( + AttributionSourceId, + BatchEmbedContentsRequest, + BatchEmbedContentsResponse, + BidiGenerateContentClientContent, + BidiGenerateContentClientMessage, + BidiGenerateContentRealtimeInput, + BidiGenerateContentServerContent, + BidiGenerateContentServerMessage, + BidiGenerateContentSetup, + BidiGenerateContentSetupComplete, + BidiGenerateContentToolCall, + BidiGenerateContentToolCallCancellation, + BidiGenerateContentToolResponse, + Candidate, + ContentEmbedding, + CountTokensRequest, + CountTokensResponse, + EmbedContentRequest, + EmbedContentResponse, + GenerateAnswerRequest, + GenerateAnswerResponse, + GenerateContentRequest, + GenerateContentResponse, + GenerationConfig, + GroundingAttribution, + GroundingChunk, + GroundingMetadata, + GroundingSupport, + LogprobsResult, + PrebuiltVoiceConfig, + RetrievalMetadata, + SearchEntryPoint, + Segment, + SemanticRetrieverConfig, + SpeechConfig, + TaskType, + VoiceConfig, +) +from .model import Model +from .model_service import ( + CreateTunedModelMetadata, + CreateTunedModelRequest, + DeleteTunedModelRequest, + GetModelRequest, + GetTunedModelRequest, + ListModelsRequest, + ListModelsResponse, + ListTunedModelsRequest, + ListTunedModelsResponse, + UpdateTunedModelRequest, +) +from .permission import Permission +from .permission_service import ( + CreatePermissionRequest, + DeletePermissionRequest, + GetPermissionRequest, + ListPermissionsRequest, + ListPermissionsResponse, + TransferOwnershipRequest, + TransferOwnershipResponse, + UpdatePermissionRequest, +) +from .prediction_service import PredictRequest, PredictResponse +from .retriever import ( + Chunk, + ChunkData, + Condition, + Corpus, + CustomMetadata, + Document, + MetadataFilter, + StringList, +) +from .retriever_service import ( + BatchCreateChunksRequest, + BatchCreateChunksResponse, + BatchDeleteChunksRequest, + BatchUpdateChunksRequest, + BatchUpdateChunksResponse, + CreateChunkRequest, + CreateCorpusRequest, + CreateDocumentRequest, + DeleteChunkRequest, + DeleteCorpusRequest, + DeleteDocumentRequest, + GetChunkRequest, + GetCorpusRequest, + GetDocumentRequest, + ListChunksRequest, + ListChunksResponse, + ListCorporaRequest, + ListCorporaResponse, + ListDocumentsRequest, + ListDocumentsResponse, + QueryCorpusRequest, + QueryCorpusResponse, + QueryDocumentRequest, + QueryDocumentResponse, + RelevantChunk, + UpdateChunkRequest, + UpdateCorpusRequest, + UpdateDocumentRequest, +) +from .safety import ( + ContentFilter, + HarmCategory, + SafetyFeedback, + SafetyRating, + SafetySetting, +) +from .text_service import ( + BatchEmbedTextRequest, + BatchEmbedTextResponse, + CountTextTokensRequest, + CountTextTokensResponse, + Embedding, + EmbedTextRequest, + EmbedTextResponse, + GenerateTextRequest, + GenerateTextResponse, + TextCompletion, + TextPrompt, +) +from .tuned_model import ( + Dataset, + Hyperparameters, + TunedModel, + TunedModelSource, + TuningContent, + TuningExample, + TuningExamples, + TuningMultiturnExample, + TuningPart, + TuningSnapshot, + TuningTask, +) + +__all__ = ( + "CreateCachedContentRequest", + "DeleteCachedContentRequest", + "GetCachedContentRequest", + "ListCachedContentsRequest", + "ListCachedContentsResponse", + "UpdateCachedContentRequest", + "CachedContent", + "CitationMetadata", + "CitationSource", + "Blob", + "CodeExecution", + "CodeExecutionResult", + "Content", + "DynamicRetrievalConfig", + "ExecutableCode", + "FileData", + "FunctionCall", + "FunctionCallingConfig", + "FunctionDeclaration", + "FunctionResponse", + "GoogleSearchRetrieval", + "GroundingPassage", + "GroundingPassages", + "Part", + "Schema", + "Tool", + "ToolConfig", + "Type", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + "Example", + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "File", + "VideoMetadata", + "CreateFileRequest", + "CreateFileResponse", + "DeleteFileRequest", + "GetFileRequest", + "ListFilesRequest", + "ListFilesResponse", + "AttributionSourceId", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "BidiGenerateContentClientContent", + "BidiGenerateContentClientMessage", + "BidiGenerateContentRealtimeInput", + "BidiGenerateContentServerContent", + "BidiGenerateContentServerMessage", + "BidiGenerateContentSetup", + "BidiGenerateContentSetupComplete", + "BidiGenerateContentToolCall", + "BidiGenerateContentToolCallCancellation", + "BidiGenerateContentToolResponse", + "Candidate", + "ContentEmbedding", + "CountTokensRequest", + "CountTokensResponse", + "EmbedContentRequest", + "EmbedContentResponse", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "GenerateContentRequest", + "GenerateContentResponse", + "GenerationConfig", + "GroundingAttribution", + "GroundingChunk", + "GroundingMetadata", + "GroundingSupport", + "LogprobsResult", + "PrebuiltVoiceConfig", + "RetrievalMetadata", + "SearchEntryPoint", + "Segment", + "SemanticRetrieverConfig", + "SpeechConfig", + "VoiceConfig", + "TaskType", + "Model", + "CreateTunedModelMetadata", + "CreateTunedModelRequest", + "DeleteTunedModelRequest", + "GetModelRequest", + "GetTunedModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "ListTunedModelsRequest", + "ListTunedModelsResponse", + "UpdateTunedModelRequest", + "Permission", + "CreatePermissionRequest", + "DeletePermissionRequest", + "GetPermissionRequest", + "ListPermissionsRequest", + "ListPermissionsResponse", + "TransferOwnershipRequest", + "TransferOwnershipResponse", + "UpdatePermissionRequest", + "PredictRequest", + "PredictResponse", + "Chunk", + "ChunkData", + "Condition", + "Corpus", + "CustomMetadata", + "Document", + "MetadataFilter", + "StringList", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "BatchDeleteChunksRequest", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "CreateChunkRequest", + "CreateCorpusRequest", + "CreateDocumentRequest", + "DeleteChunkRequest", + "DeleteCorpusRequest", + "DeleteDocumentRequest", + "GetChunkRequest", + "GetCorpusRequest", + "GetDocumentRequest", + "ListChunksRequest", + "ListChunksResponse", + "ListCorporaRequest", + "ListCorporaResponse", + "ListDocumentsRequest", + "ListDocumentsResponse", + "QueryCorpusRequest", + "QueryCorpusResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "RelevantChunk", + "UpdateChunkRequest", + "UpdateCorpusRequest", + "UpdateDocumentRequest", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + "HarmCategory", + "BatchEmbedTextRequest", + "BatchEmbedTextResponse", + "CountTextTokensRequest", + "CountTextTokensResponse", + "Embedding", + "EmbedTextRequest", + "EmbedTextResponse", + "GenerateTextRequest", + "GenerateTextResponse", + "TextCompletion", + "TextPrompt", + "Dataset", + "Hyperparameters", + "TunedModel", + "TunedModelSource", + "TuningContent", + "TuningExample", + "TuningExamples", + "TuningMultiturnExample", + "TuningPart", + "TuningSnapshot", + "TuningTask", +) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/cache_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/cache_service.py new file mode 100644 index 000000000000..74a6145eb924 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/cache_service.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "ListCachedContentsRequest", + "ListCachedContentsResponse", + "CreateCachedContentRequest", + "GetCachedContentRequest", + "UpdateCachedContentRequest", + "DeleteCachedContentRequest", + }, +) + + +class ListCachedContentsRequest(proto.Message): + r"""Request to list CachedContents. + + Attributes: + page_size (int): + Optional. The maximum number of cached + contents to return. The service may return fewer + than this value. If unspecified, some default + (under maximum) number of items will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListCachedContents`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListCachedContents`` must match the call that provided the + page token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListCachedContentsResponse(proto.Message): + r"""Response with CachedContents list. + + Attributes: + cached_contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.CachedContent]): + List of cached contents. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + cached_contents: MutableSequence[ + gag_cached_content.CachedContent + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_cached_content.CachedContent, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateCachedContentRequest(proto.Message): + r"""Request to create CachedContent. + + Attributes: + cached_content (google.ai.generativelanguage_v1alpha.types.CachedContent): + Required. The cached content to create. + """ + + cached_content: gag_cached_content.CachedContent = proto.Field( + proto.MESSAGE, + number=1, + message=gag_cached_content.CachedContent, + ) + + +class GetCachedContentRequest(proto.Message): + r"""Request to read CachedContent. + + Attributes: + name (str): + Required. The resource name referring to the content cache + entry. Format: ``cachedContents/{id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCachedContentRequest(proto.Message): + r"""Request to update CachedContent. + + Attributes: + cached_content (google.ai.generativelanguage_v1alpha.types.CachedContent): + Required. The content cache entry to update + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The list of fields to update. + """ + + cached_content: gag_cached_content.CachedContent = proto.Field( + proto.MESSAGE, + number=1, + message=gag_cached_content.CachedContent, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteCachedContentRequest(proto.Message): + r"""Request to delete CachedContent. + + Attributes: + name (str): + Required. The resource name referring to the content cache + entry Format: ``cachedContents/{id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/cached_content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/cached_content.py new file mode 100644 index 000000000000..3eeed986fcc9 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/cached_content.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import content + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "CachedContent", + }, +) + + +class CachedContent(proto.Message): + r"""Content that has been preprocessed and can be used in + subsequent request to GenerativeService. + + Cached content can be only used with model it was created for. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp in UTC of when this resource is considered + expired. This is *always* provided on output, regardless of + what was sent on input. + + This field is a member of `oneof`_ ``expiration``. + ttl (google.protobuf.duration_pb2.Duration): + Input only. New TTL for this resource, input + only. + + This field is a member of `oneof`_ ``expiration``. + name (str): + Optional. Identifier. The resource name referring to the + cached content. Format: ``cachedContents/{id}`` + + This field is a member of `oneof`_ ``_name``. + display_name (str): + Optional. Immutable. The user-generated + meaningful display name of the cached content. + Maximum 128 Unicode characters. + + This field is a member of `oneof`_ ``_display_name``. + model (str): + Required. Immutable. The name of the ``Model`` to use for + cached content Format: ``models/{model}`` + + This field is a member of `oneof`_ ``_model``. + system_instruction (google.ai.generativelanguage_v1alpha.types.Content): + Optional. Input only. Immutable. Developer + set system instruction. Currently text only. + + This field is a member of `oneof`_ ``_system_instruction``. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Optional. Input only. Immutable. The content + to cache. + tools (MutableSequence[google.ai.generativelanguage_v1alpha.types.Tool]): + Optional. Input only. Immutable. A list of ``Tools`` the + model may use to generate the next response + tool_config (google.ai.generativelanguage_v1alpha.types.ToolConfig): + Optional. Input only. Immutable. Tool config. + This config is shared for all tools. + + This field is a member of `oneof`_ ``_tool_config``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Creation time of the cache + entry. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. When the cache entry was last + updated in UTC time. + usage_metadata (google.ai.generativelanguage_v1alpha.types.CachedContent.UsageMetadata): + Output only. Metadata on the usage of the + cached content. + """ + + class UsageMetadata(proto.Message): + r"""Metadata on the usage of the cached content. + + Attributes: + total_token_count (int): + Total number of tokens that the cached + content consumes. + """ + + total_token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + oneof="expiration", + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + oneof="expiration", + message=duration_pb2.Duration, + ) + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + model: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + system_instruction: content.Content = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=content.Content, + ) + contents: MutableSequence[content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=content.Content, + ) + tools: MutableSequence[content.Tool] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=content.Tool, + ) + tool_config: content.ToolConfig = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=content.ToolConfig, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + usage_metadata: UsageMetadata = proto.Field( + proto.MESSAGE, + number=12, + message=UsageMetadata, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/citation.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/citation.py new file mode 100644 index 000000000000..837bd3f4b2ea --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/citation.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "CitationMetadata", + "CitationSource", + }, +) + + +class CitationMetadata(proto.Message): + r"""A collection of source attributions for a piece of content. + + Attributes: + citation_sources (MutableSequence[google.ai.generativelanguage_v1alpha.types.CitationSource]): + Citations to sources for a specific response. + """ + + citation_sources: MutableSequence["CitationSource"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="CitationSource", + ) + + +class CitationSource(proto.Message): + r"""A citation to a source for a portion of a specific response. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_index (int): + Optional. Start of segment of the response + that is attributed to this source. + + Index indicates the start of the segment, + measured in bytes. + + This field is a member of `oneof`_ ``_start_index``. + end_index (int): + Optional. End of the attributed segment, + exclusive. + + This field is a member of `oneof`_ ``_end_index``. + uri (str): + Optional. URI that is attributed as a source + for a portion of the text. + + This field is a member of `oneof`_ ``_uri``. + license_ (str): + Optional. License for the GitHub project that + is attributed as a source for segment. + + License info is required for code citations. + + This field is a member of `oneof`_ ``_license``. + """ + + start_index: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + end_index: int = proto.Field( + proto.INT32, + number=2, + optional=True, + ) + uri: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + license_: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/content.py new file mode 100644 index 000000000000..d8ec135a889f --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/content.py @@ -0,0 +1,819 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "Type", + "Content", + "Part", + "Blob", + "FileData", + "ExecutableCode", + "CodeExecutionResult", + "Tool", + "GoogleSearchRetrieval", + "DynamicRetrievalConfig", + "CodeExecution", + "ToolConfig", + "FunctionCallingConfig", + "FunctionDeclaration", + "FunctionCall", + "FunctionResponse", + "Schema", + "GroundingPassage", + "GroundingPassages", + }, +) + + +class Type(proto.Enum): + r"""Type contains the list of OpenAPI data types as defined by + https://spec.openapis.org/oas/v3.0.3#data-types + + Values: + TYPE_UNSPECIFIED (0): + Not specified, should not be used. + STRING (1): + String type. + NUMBER (2): + Number type. + INTEGER (3): + Integer type. + BOOLEAN (4): + Boolean type. + ARRAY (5): + Array type. + OBJECT (6): + Object type. + """ + TYPE_UNSPECIFIED = 0 + STRING = 1 + NUMBER = 2 + INTEGER = 3 + BOOLEAN = 4 + ARRAY = 5 + OBJECT = 6 + + +class Content(proto.Message): + r"""The base structured datatype containing multi-part content of a + message. + + A ``Content`` includes a ``role`` field designating the producer of + the ``Content`` and a ``parts`` field containing multi-part data + that contains the content of the message turn. + + Attributes: + parts (MutableSequence[google.ai.generativelanguage_v1alpha.types.Part]): + Ordered ``Parts`` that constitute a single message. Parts + may have different MIME types. + role (str): + Optional. The producer of the content. Must + be either 'user' or 'model'. + Useful to set for multi-turn conversations, + otherwise can be left blank or unset. + """ + + parts: MutableSequence["Part"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Part", + ) + role: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Part(proto.Message): + r"""A datatype containing media that is part of a multi-part ``Content`` + message. + + A ``Part`` consists of data which has an associated datatype. A + ``Part`` can only contain one of the accepted types in + ``Part.data``. + + A ``Part`` must have a fixed IANA MIME type identifying the type and + subtype of the media if the ``inline_data`` field is filled with raw + bytes. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + Inline text. + + This field is a member of `oneof`_ ``data``. + inline_data (google.ai.generativelanguage_v1alpha.types.Blob): + Inline media bytes. + + This field is a member of `oneof`_ ``data``. + function_call (google.ai.generativelanguage_v1alpha.types.FunctionCall): + A predicted ``FunctionCall`` returned from the model that + contains a string representing the + ``FunctionDeclaration.name`` with the arguments and their + values. + + This field is a member of `oneof`_ ``data``. + function_response (google.ai.generativelanguage_v1alpha.types.FunctionResponse): + The result output of a ``FunctionCall`` that contains a + string representing the ``FunctionDeclaration.name`` and a + structured JSON object containing any output from the + function is used as context to the model. + + This field is a member of `oneof`_ ``data``. + file_data (google.ai.generativelanguage_v1alpha.types.FileData): + URI based data. + + This field is a member of `oneof`_ ``data``. + executable_code (google.ai.generativelanguage_v1alpha.types.ExecutableCode): + Code generated by the model that is meant to + be executed. + + This field is a member of `oneof`_ ``data``. + code_execution_result (google.ai.generativelanguage_v1alpha.types.CodeExecutionResult): + Result of executing the ``ExecutableCode``. + + This field is a member of `oneof`_ ``data``. + """ + + text: str = proto.Field( + proto.STRING, + number=2, + oneof="data", + ) + inline_data: "Blob" = proto.Field( + proto.MESSAGE, + number=3, + oneof="data", + message="Blob", + ) + function_call: "FunctionCall" = proto.Field( + proto.MESSAGE, + number=4, + oneof="data", + message="FunctionCall", + ) + function_response: "FunctionResponse" = proto.Field( + proto.MESSAGE, + number=5, + oneof="data", + message="FunctionResponse", + ) + file_data: "FileData" = proto.Field( + proto.MESSAGE, + number=6, + oneof="data", + message="FileData", + ) + executable_code: "ExecutableCode" = proto.Field( + proto.MESSAGE, + number=9, + oneof="data", + message="ExecutableCode", + ) + code_execution_result: "CodeExecutionResult" = proto.Field( + proto.MESSAGE, + number=10, + oneof="data", + message="CodeExecutionResult", + ) + + +class Blob(proto.Message): + r"""Raw media bytes. + + Text should not be sent as raw bytes, use the 'text' field. + + Attributes: + mime_type (str): + The IANA standard MIME type of the source data. Examples: + + - image/png + - image/jpeg If an unsupported MIME type is provided, an + error will be returned. For a complete list of supported + types, see `Supported file + formats `__. + data (bytes): + Raw bytes for media formats. + """ + + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + data: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class FileData(proto.Message): + r"""URI based data. + + Attributes: + mime_type (str): + Optional. The IANA standard MIME type of the + source data. + file_uri (str): + Required. URI. + """ + + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + file_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ExecutableCode(proto.Message): + r"""Code generated by the model that is meant to be executed, and the + result returned to the model. + + Only generated when using the ``CodeExecution`` tool, in which the + code will be automatically executed, and a corresponding + ``CodeExecutionResult`` will also be generated. + + Attributes: + language (google.ai.generativelanguage_v1alpha.types.ExecutableCode.Language): + Required. Programming language of the ``code``. + code (str): + Required. The code to be executed. + """ + + class Language(proto.Enum): + r"""Supported programming languages for the generated code. + + Values: + LANGUAGE_UNSPECIFIED (0): + Unspecified language. This value should not + be used. + PYTHON (1): + Python >= 3.10, with numpy and simpy + available. + """ + LANGUAGE_UNSPECIFIED = 0 + PYTHON = 1 + + language: Language = proto.Field( + proto.ENUM, + number=1, + enum=Language, + ) + code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CodeExecutionResult(proto.Message): + r"""Result of executing the ``ExecutableCode``. + + Only generated when using the ``CodeExecution``, and always follows + a ``part`` containing the ``ExecutableCode``. + + Attributes: + outcome (google.ai.generativelanguage_v1alpha.types.CodeExecutionResult.Outcome): + Required. Outcome of the code execution. + output (str): + Optional. Contains stdout when code execution + is successful, stderr or other description + otherwise. + """ + + class Outcome(proto.Enum): + r"""Enumeration of possible outcomes of the code execution. + + Values: + OUTCOME_UNSPECIFIED (0): + Unspecified status. This value should not be + used. + OUTCOME_OK (1): + Code execution completed successfully. + OUTCOME_FAILED (2): + Code execution finished but with a failure. ``stderr`` + should contain the reason. + OUTCOME_DEADLINE_EXCEEDED (3): + Code execution ran for too long, and was + cancelled. There may or may not be a partial + output present. + """ + OUTCOME_UNSPECIFIED = 0 + OUTCOME_OK = 1 + OUTCOME_FAILED = 2 + OUTCOME_DEADLINE_EXCEEDED = 3 + + outcome: Outcome = proto.Field( + proto.ENUM, + number=1, + enum=Outcome, + ) + output: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Tool(proto.Message): + r"""Tool details that the model may use to generate response. + + A ``Tool`` is a piece of code that enables the system to interact + with external systems to perform an action, or set of actions, + outside of knowledge and scope of the model. + + Attributes: + function_declarations (MutableSequence[google.ai.generativelanguage_v1alpha.types.FunctionDeclaration]): + Optional. A list of ``FunctionDeclarations`` available to + the model that can be used for function calling. + + The model or system does not execute the function. Instead + the defined function may be returned as a + [FunctionCall][google.ai.generativelanguage.v1alpha.Part.function_call] + with arguments to the client side for execution. The model + may decide to call a subset of these functions by populating + [FunctionCall][google.ai.generativelanguage.v1alpha.Part.function_call] + in the response. The next conversation turn may contain a + [FunctionResponse][google.ai.generativelanguage.v1alpha.Part.function_response] + with the + [Content.role][google.ai.generativelanguage.v1alpha.Content.role] + "function" generation context for the next model turn. + google_search_retrieval (google.ai.generativelanguage_v1alpha.types.GoogleSearchRetrieval): + Optional. Retrieval tool that is powered by + Google search. + code_execution (google.ai.generativelanguage_v1alpha.types.CodeExecution): + Optional. Enables the model to execute code + as part of generation. + google_search (google.ai.generativelanguage_v1alpha.types.Tool.GoogleSearch): + Optional. GoogleSearch tool type. + Tool to support Google Search in Model. Powered + by Google. + """ + + class GoogleSearch(proto.Message): + r"""GoogleSearch tool type. + Tool to support Google Search in Model. Powered by Google. + + """ + + function_declarations: MutableSequence["FunctionDeclaration"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FunctionDeclaration", + ) + google_search_retrieval: "GoogleSearchRetrieval" = proto.Field( + proto.MESSAGE, + number=2, + message="GoogleSearchRetrieval", + ) + code_execution: "CodeExecution" = proto.Field( + proto.MESSAGE, + number=3, + message="CodeExecution", + ) + google_search: GoogleSearch = proto.Field( + proto.MESSAGE, + number=4, + message=GoogleSearch, + ) + + +class GoogleSearchRetrieval(proto.Message): + r"""Tool to retrieve public web data for grounding, powered by + Google. + + Attributes: + dynamic_retrieval_config (google.ai.generativelanguage_v1alpha.types.DynamicRetrievalConfig): + Specifies the dynamic retrieval configuration + for the given source. + """ + + dynamic_retrieval_config: "DynamicRetrievalConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="DynamicRetrievalConfig", + ) + + +class DynamicRetrievalConfig(proto.Message): + r"""Describes the options to customize dynamic retrieval. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mode (google.ai.generativelanguage_v1alpha.types.DynamicRetrievalConfig.Mode): + The mode of the predictor to be used in + dynamic retrieval. + dynamic_threshold (float): + The threshold to be used in dynamic + retrieval. If not set, a system default value is + used. + + This field is a member of `oneof`_ ``_dynamic_threshold``. + """ + + class Mode(proto.Enum): + r"""The mode of the predictor to be used in dynamic retrieval. + + Values: + MODE_UNSPECIFIED (0): + Always trigger retrieval. + MODE_DYNAMIC (1): + Run retrieval only when system decides it is + necessary. + """ + MODE_UNSPECIFIED = 0 + MODE_DYNAMIC = 1 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + dynamic_threshold: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + +class CodeExecution(proto.Message): + r"""Tool that executes code generated by the model, and automatically + returns the result to the model. + + See also ``ExecutableCode`` and ``CodeExecutionResult`` which are + only generated when using this tool. + + """ + + +class ToolConfig(proto.Message): + r"""The Tool configuration containing parameters for specifying ``Tool`` + use in the request. + + Attributes: + function_calling_config (google.ai.generativelanguage_v1alpha.types.FunctionCallingConfig): + Optional. Function calling config. + """ + + function_calling_config: "FunctionCallingConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="FunctionCallingConfig", + ) + + +class FunctionCallingConfig(proto.Message): + r"""Configuration for specifying function calling behavior. + + Attributes: + mode (google.ai.generativelanguage_v1alpha.types.FunctionCallingConfig.Mode): + Optional. Specifies the mode in which + function calling should execute. If unspecified, + the default value will be set to AUTO. + allowed_function_names (MutableSequence[str]): + Optional. A set of function names that, when provided, + limits the functions the model will call. + + This should only be set when the Mode is ANY. Function names + should match [FunctionDeclaration.name]. With mode set to + ANY, model will predict a function call from the set of + function names provided. + """ + + class Mode(proto.Enum): + r"""Defines the execution behavior for function calling by + defining the execution mode. + + Values: + MODE_UNSPECIFIED (0): + Unspecified function calling mode. This value + should not be used. + AUTO (1): + Default model behavior, model decides to + predict either a function call or a natural + language response. + ANY (2): + Model is constrained to always predicting a function call + only. If "allowed_function_names" are set, the predicted + function call will be limited to any one of + "allowed_function_names", else the predicted function call + will be any one of the provided "function_declarations". + NONE (3): + Model will not predict any function call. + Model behavior is same as when not passing any + function declarations. + """ + MODE_UNSPECIFIED = 0 + AUTO = 1 + ANY = 2 + NONE = 3 + + mode: Mode = proto.Field( + proto.ENUM, + number=1, + enum=Mode, + ) + allowed_function_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class FunctionDeclaration(proto.Message): + r"""Structured representation of a function declaration as defined by + the `OpenAPI 3.03 + specification `__. Included in + this declaration are the function name and parameters. This + FunctionDeclaration is a representation of a block of code that can + be used as a ``Tool`` by the model and executed by the client. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the function. + Must be a-z, A-Z, 0-9, or contain underscores + and dashes, with a maximum length of 63. + description (str): + Required. A brief description of the + function. + parameters (google.ai.generativelanguage_v1alpha.types.Schema): + Optional. Describes the parameters to this + function. Reflects the Open API 3.03 Parameter + Object string Key: the name of the parameter. + Parameter names are case sensitive. Schema + Value: the Schema defining the type used for the + parameter. + + This field is a member of `oneof`_ ``_parameters``. + response (google.ai.generativelanguage_v1alpha.types.Schema): + Optional. Describes the output from this + function in JSON Schema format. Reflects the + Open API 3.03 Response Object. The Schema + defines the type used for the response value of + the function. + + This field is a member of `oneof`_ ``_response``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + parameters: "Schema" = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message="Schema", + ) + response: "Schema" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Schema", + ) + + +class FunctionCall(proto.Message): + r"""A predicted ``FunctionCall`` returned from the model that contains a + string representing the ``FunctionDeclaration.name`` with the + arguments and their values. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + Optional. The unique id of the function call. If populated, + the client to execute the ``function_call`` and return the + response with the matching ``id``. + name (str): + Required. The name of the function to call. + Must be a-z, A-Z, 0-9, or contain underscores + and dashes, with a maximum length of 63. + args (google.protobuf.struct_pb2.Struct): + Optional. The function parameters and values + in JSON object format. + + This field is a member of `oneof`_ ``_args``. + """ + + id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + args: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + optional=True, + message=struct_pb2.Struct, + ) + + +class FunctionResponse(proto.Message): + r"""The result output from a ``FunctionCall`` that contains a string + representing the ``FunctionDeclaration.name`` and a structured JSON + object containing any output from the function is used as context to + the model. This should contain the result of a\ ``FunctionCall`` + made based on model prediction. + + Attributes: + id (str): + Optional. The id of the function call this response is for. + Populated by the client to match the corresponding function + call ``id``. + name (str): + Required. The name of the function to call. + Must be a-z, A-Z, 0-9, or contain underscores + and dashes, with a maximum length of 63. + response (google.protobuf.struct_pb2.Struct): + Required. The function response in JSON + object format. + """ + + id: str = proto.Field( + proto.STRING, + number=3, + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + response: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class Schema(proto.Message): + r"""The ``Schema`` object allows the definition of input and output data + types. These types can be objects, but also primitives and arrays. + Represents a select subset of an `OpenAPI 3.0 schema + object `__. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + type_ (google.ai.generativelanguage_v1alpha.types.Type): + Required. Data type. + format_ (str): + Optional. The format of the data. This is + used only for primitive datatypes. Supported + formats: + + for NUMBER type: float, double + for INTEGER type: int32, int64 + for STRING type: enum + description (str): + Optional. A brief description of the + parameter. This could contain examples of use. + Parameter description may be formatted as + Markdown. + nullable (bool): + Optional. Indicates if the value may be null. + enum (MutableSequence[str]): + Optional. Possible values of the element of Type.STRING with + enum format. For example we can define an Enum Direction as + : {type:STRING, format:enum, enum:["EAST", NORTH", "SOUTH", + "WEST"]} + items (google.ai.generativelanguage_v1alpha.types.Schema): + Optional. Schema of the elements of + Type.ARRAY. + + This field is a member of `oneof`_ ``_items``. + max_items (int): + Optional. Maximum number of the elements for + Type.ARRAY. + min_items (int): + Optional. Minimum number of the elements for + Type.ARRAY. + properties (MutableMapping[str, google.ai.generativelanguage_v1alpha.types.Schema]): + Optional. Properties of Type.OBJECT. + required (MutableSequence[str]): + Optional. Required properties of Type.OBJECT. + """ + + type_: "Type" = proto.Field( + proto.ENUM, + number=1, + enum="Type", + ) + format_: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + nullable: bool = proto.Field( + proto.BOOL, + number=4, + ) + enum: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + items: "Schema" = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message="Schema", + ) + max_items: int = proto.Field( + proto.INT64, + number=21, + ) + min_items: int = proto.Field( + proto.INT64, + number=22, + ) + properties: MutableMapping[str, "Schema"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=7, + message="Schema", + ) + required: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class GroundingPassage(proto.Message): + r"""Passage included inline with a grounding configuration. + + Attributes: + id (str): + Identifier for the passage for attributing + this passage in grounded answers. + content (google.ai.generativelanguage_v1alpha.types.Content): + Content of the passage. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + content: "Content" = proto.Field( + proto.MESSAGE, + number=2, + message="Content", + ) + + +class GroundingPassages(proto.Message): + r"""A repeated list of passages. + + Attributes: + passages (MutableSequence[google.ai.generativelanguage_v1alpha.types.GroundingPassage]): + List of passages. + """ + + passages: MutableSequence["GroundingPassage"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GroundingPassage", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/discuss_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/discuss_service.py new file mode 100644 index 000000000000..2ce5d18dc6b1 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/discuss_service.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import citation, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "GenerateMessageRequest", + "GenerateMessageResponse", + "Message", + "MessagePrompt", + "Example", + "CountMessageTokensRequest", + "CountMessageTokensResponse", + }, +) + + +class GenerateMessageRequest(proto.Message): + r"""Request to generate a message response from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the model to use. + + Format: ``name=models/{model}``. + prompt (google.ai.generativelanguage_v1alpha.types.MessagePrompt): + Required. The structured textual input given + to the model as a prompt. + Given a + prompt, the model will return what it predicts + is the next message in the discussion. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + candidate_count (int): + Optional. The number of generated response messages to + return. + + This value must be between ``[1, 8]``, inclusive. If unset, + this will default to ``1``. + + This field is a member of `oneof`_ ``_candidate_count``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. + + This field is a member of `oneof`_ ``_top_k``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "MessagePrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="MessagePrompt", + ) + temperature: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + candidate_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=6, + optional=True, + ) + + +class GenerateMessageResponse(proto.Message): + r"""The response from the model. + + This includes candidate messages and + conversation history in the form of chronologically-ordered + messages. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1alpha.types.Message]): + Candidate response messages from the model. + messages (MutableSequence[google.ai.generativelanguage_v1alpha.types.Message]): + The conversation history used by the model. + filters (MutableSequence[google.ai.generativelanguage_v1alpha.types.ContentFilter]): + A set of content filtering metadata for the prompt and + response text. + + This indicates which ``SafetyCategory``\ (s) blocked a + candidate from this response, the lowest ``HarmProbability`` + that triggered a block, and the HarmThreshold setting for + that category. + """ + + candidates: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Message", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Message", + ) + filters: MutableSequence[safety.ContentFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.ContentFilter, + ) + + +class Message(proto.Message): + r"""The base unit of structured text. + + A ``Message`` includes an ``author`` and the ``content`` of the + ``Message``. + + The ``author`` is used to tag messages when they are fed to the + model as text. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + author (str): + Optional. The author of this Message. + + This serves as a key for tagging + the content of this Message when it is fed to + the model as text. + + The author can be any alphanumeric string. + content (str): + Required. The text content of the structured ``Message``. + citation_metadata (google.ai.generativelanguage_v1alpha.types.CitationMetadata): + Output only. Citation information for model-generated + ``content`` in this ``Message``. + + If this ``Message`` was generated as output from the model, + this field may be populated with attribution information for + any text included in the ``content``. This field is used + only on output. + + This field is a member of `oneof`_ ``_citation_metadata``. + """ + + author: str = proto.Field( + proto.STRING, + number=1, + ) + content: str = proto.Field( + proto.STRING, + number=2, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=citation.CitationMetadata, + ) + + +class MessagePrompt(proto.Message): + r"""All of the structured input text passed to the model as a prompt. + + A ``MessagePrompt`` contains a structured set of fields that provide + context for the conversation, examples of user input/model output + message pairs that prime the model to respond in different ways, and + the conversation history or list of messages representing the + alternating turns of the conversation between the user and the + model. + + Attributes: + context (str): + Optional. Text that should be provided to the model first to + ground the response. + + If not empty, this ``context`` will be given to the model + first before the ``examples`` and ``messages``. When using a + ``context`` be sure to provide it with every request to + maintain continuity. + + This field can be a description of your prompt to the model + to help provide context and guide the responses. Examples: + "Translate the phrase from English to French." or "Given a + statement, classify the sentiment as happy, sad or neutral." + + Anything included in this field will take precedence over + message history if the total input size exceeds the model's + ``input_token_limit`` and the input request is truncated. + examples (MutableSequence[google.ai.generativelanguage_v1alpha.types.Example]): + Optional. Examples of what the model should generate. + + This includes both user input and the response that the + model should emulate. + + These ``examples`` are treated identically to conversation + messages except that they take precedence over the history + in ``messages``: If the total input size exceeds the model's + ``input_token_limit`` the input will be truncated. Items + will be dropped from ``messages`` before ``examples``. + messages (MutableSequence[google.ai.generativelanguage_v1alpha.types.Message]): + Required. A snapshot of the recent conversation history + sorted chronologically. + + Turns alternate between two authors. + + If the total input size exceeds the model's + ``input_token_limit`` the input will be truncated: The + oldest items will be dropped from ``messages``. + """ + + context: str = proto.Field( + proto.STRING, + number=1, + ) + examples: MutableSequence["Example"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Example", + ) + messages: MutableSequence["Message"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="Message", + ) + + +class Example(proto.Message): + r"""An input/output example used to instruct the Model. + + It demonstrates how the model should respond or format its + response. + + Attributes: + input (google.ai.generativelanguage_v1alpha.types.Message): + Required. An example of an input ``Message`` from the user. + output (google.ai.generativelanguage_v1alpha.types.Message): + Required. An example of what the model should + output given the input. + """ + + input: "Message" = proto.Field( + proto.MESSAGE, + number=1, + message="Message", + ) + output: "Message" = proto.Field( + proto.MESSAGE, + number=2, + message="Message", + ) + + +class CountMessageTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + prompt (google.ai.generativelanguage_v1alpha.types.MessagePrompt): + Required. The prompt, whose token count is to + be returned. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "MessagePrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="MessagePrompt", + ) + + +class CountMessageTokensResponse(proto.Message): + r"""A response from ``CountMessageTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + token_count (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/file.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/file.py new file mode 100644 index 000000000000..8a3fdac30930 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/file.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "File", + "VideoMetadata", + }, +) + + +class File(proto.Message): + r"""A file uploaded to the API. + Next ID: 15 + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + video_metadata (google.ai.generativelanguage_v1alpha.types.VideoMetadata): + Output only. Metadata for a video. + + This field is a member of `oneof`_ ``metadata``. + name (str): + Immutable. Identifier. The ``File`` resource name. The ID + (name excluding the "files/" prefix) can contain up to 40 + characters that are lowercase alphanumeric or dashes (-). + The ID cannot start or end with a dash. If the name is empty + on create, a unique name will be generated. Example: + ``files/123-456`` + display_name (str): + Optional. The human-readable display name for the ``File``. + The display name must be no more than 512 characters in + length, including spaces. Example: "Welcome Image". + mime_type (str): + Output only. MIME type of the file. + size_bytes (int): + Output only. Size of the file in bytes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of when the ``File`` was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of when the ``File`` was last + updated. + expiration_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of when the ``File`` will be + deleted. Only set if the ``File`` is scheduled to expire. + sha256_hash (bytes): + Output only. SHA-256 hash of the uploaded + bytes. + uri (str): + Output only. The uri of the ``File``. + state (google.ai.generativelanguage_v1alpha.types.File.State): + Output only. Processing state of the File. + error (google.rpc.status_pb2.Status): + Output only. Error status if File processing + failed. + """ + + class State(proto.Enum): + r"""States for the lifecycle of a File. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + PROCESSING (1): + File is being processed and cannot be used + for inference yet. + ACTIVE (2): + File is processed and available for + inference. + FAILED (10): + File failed processing. + """ + STATE_UNSPECIFIED = 0 + PROCESSING = 1 + ACTIVE = 2 + FAILED = 10 + + video_metadata: "VideoMetadata" = proto.Field( + proto.MESSAGE, + number=12, + oneof="metadata", + message="VideoMetadata", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + mime_type: str = proto.Field( + proto.STRING, + number=3, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + expiration_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + sha256_hash: bytes = proto.Field( + proto.BYTES, + number=8, + ) + uri: str = proto.Field( + proto.STRING, + number=9, + ) + state: State = proto.Field( + proto.ENUM, + number=10, + enum=State, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + + +class VideoMetadata(proto.Message): + r"""Metadata for a video ``File``. + + Attributes: + video_duration (google.protobuf.duration_pb2.Duration): + Duration of the video. + """ + + video_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/file_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/file_service.py new file mode 100644 index 000000000000..6398c075f2d3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/file_service.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import file as gag_file + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "CreateFileRequest", + "CreateFileResponse", + "ListFilesRequest", + "ListFilesResponse", + "GetFileRequest", + "DeleteFileRequest", + }, +) + + +class CreateFileRequest(proto.Message): + r"""Request for ``CreateFile``. + + Attributes: + file (google.ai.generativelanguage_v1alpha.types.File): + Optional. Metadata for the file to create. + """ + + file: gag_file.File = proto.Field( + proto.MESSAGE, + number=1, + message=gag_file.File, + ) + + +class CreateFileResponse(proto.Message): + r"""Response for ``CreateFile``. + + Attributes: + file (google.ai.generativelanguage_v1alpha.types.File): + Metadata for the created file. + """ + + file: gag_file.File = proto.Field( + proto.MESSAGE, + number=1, + message=gag_file.File, + ) + + +class ListFilesRequest(proto.Message): + r"""Request for ``ListFiles``. + + Attributes: + page_size (int): + Optional. Maximum number of ``File``\ s to return per page. + If unspecified, defaults to 10. Maximum ``page_size`` is + 100. + page_token (str): + Optional. A page token from a previous ``ListFiles`` call. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListFilesResponse(proto.Message): + r"""Response for ``ListFiles``. + + Attributes: + files (MutableSequence[google.ai.generativelanguage_v1alpha.types.File]): + The list of ``File``\ s. + next_page_token (str): + A token that can be sent as a ``page_token`` into a + subsequent ``ListFiles`` call. + """ + + @property + def raw_page(self): + return self + + files: MutableSequence[gag_file.File] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_file.File, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetFileRequest(proto.Message): + r"""Request for ``GetFile``. + + Attributes: + name (str): + Required. The name of the ``File`` to get. Example: + ``files/abc-123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteFileRequest(proto.Message): + r"""Request for ``DeleteFile``. + + Attributes: + name (str): + Required. The name of the ``File`` to delete. Example: + ``files/abc-123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/generative_service.py new file mode 100644 index 000000000000..c1088ab1682c --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/generative_service.py @@ -0,0 +1,2139 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import citation +from google.ai.generativelanguage_v1alpha.types import content as gag_content +from google.ai.generativelanguage_v1alpha.types import retriever, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "TaskType", + "GenerateContentRequest", + "PrebuiltVoiceConfig", + "VoiceConfig", + "SpeechConfig", + "GenerationConfig", + "SemanticRetrieverConfig", + "GenerateContentResponse", + "Candidate", + "LogprobsResult", + "AttributionSourceId", + "GroundingAttribution", + "RetrievalMetadata", + "GroundingMetadata", + "SearchEntryPoint", + "GroundingChunk", + "Segment", + "GroundingSupport", + "GenerateAnswerRequest", + "GenerateAnswerResponse", + "EmbedContentRequest", + "ContentEmbedding", + "EmbedContentResponse", + "BatchEmbedContentsRequest", + "BatchEmbedContentsResponse", + "CountTokensRequest", + "CountTokensResponse", + "BidiGenerateContentSetup", + "BidiGenerateContentClientContent", + "BidiGenerateContentRealtimeInput", + "BidiGenerateContentToolResponse", + "BidiGenerateContentClientMessage", + "BidiGenerateContentSetupComplete", + "BidiGenerateContentServerContent", + "BidiGenerateContentToolCall", + "BidiGenerateContentToolCallCancellation", + "BidiGenerateContentServerMessage", + }, +) + + +class TaskType(proto.Enum): + r"""Type of task for which the embedding will be used. + + Values: + TASK_TYPE_UNSPECIFIED (0): + Unset value, which will default to one of the + other enum values. + RETRIEVAL_QUERY (1): + Specifies the given text is a query in a + search/retrieval setting. + RETRIEVAL_DOCUMENT (2): + Specifies the given text is a document from + the corpus being searched. + SEMANTIC_SIMILARITY (3): + Specifies the given text will be used for + STS. + CLASSIFICATION (4): + Specifies that the given text will be + classified. + CLUSTERING (5): + Specifies that the embeddings will be used + for clustering. + QUESTION_ANSWERING (6): + Specifies that the given text will be used + for question answering. + FACT_VERIFICATION (7): + Specifies that the given text will be used + for fact verification. + """ + TASK_TYPE_UNSPECIFIED = 0 + RETRIEVAL_QUERY = 1 + RETRIEVAL_DOCUMENT = 2 + SEMANTIC_SIMILARITY = 3 + CLASSIFICATION = 4 + CLUSTERING = 5 + QUESTION_ANSWERING = 6 + FACT_VERIFICATION = 7 + + +class GenerateContentRequest(proto.Message): + r"""Request to generate a completion from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the ``Model`` to use for generating + the completion. + + Format: ``models/{model}``. + system_instruction (google.ai.generativelanguage_v1alpha.types.Content): + Optional. Developer set `system + instruction(s) `__. + Currently, text only. + + This field is a member of `oneof`_ ``_system_instruction``. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Required. The content of the current conversation with the + model. + + For single-turn queries, this is a single instance. For + multi-turn queries like + `chat `__, + this is a repeated field that contains the conversation + history and the latest request. + tools (MutableSequence[google.ai.generativelanguage_v1alpha.types.Tool]): + Optional. A list of ``Tools`` the ``Model`` may use to + generate the next response. + + A ``Tool`` is a piece of code that enables the system to + interact with external systems to perform an action, or set + of actions, outside of knowledge and scope of the ``Model``. + Supported ``Tool``\ s are ``Function`` and + ``code_execution``. Refer to the `Function + calling `__ + and the `Code + execution `__ + guides to learn more. + tool_config (google.ai.generativelanguage_v1alpha.types.ToolConfig): + Optional. Tool configuration for any ``Tool`` specified in + the request. Refer to the `Function calling + guide `__ + for a usage example. + safety_settings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + This will be enforced on the + ``GenerateContentRequest.contents`` and + ``GenerateContentResponse.candidates``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any contents and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. Harm + categories HARM_CATEGORY_HATE_SPEECH, + HARM_CATEGORY_SEXUALLY_EXPLICIT, + HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT, + HARM_CATEGORY_CIVIC_INTEGRITY are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. + generation_config (google.ai.generativelanguage_v1alpha.types.GenerationConfig): + Optional. Configuration options for model + generation and outputs. + + This field is a member of `oneof`_ ``_generation_config``. + cached_content (str): + Optional. The name of the content + `cached `__ + to use as context to serve the prediction. Format: + ``cachedContents/{cachedContent}`` + + This field is a member of `oneof`_ ``_cached_content``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + system_instruction: gag_content.Content = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message=gag_content.Content, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + tools: MutableSequence[gag_content.Tool] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=gag_content.Tool, + ) + tool_config: gag_content.ToolConfig = proto.Field( + proto.MESSAGE, + number=7, + message=gag_content.ToolConfig, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.SafetySetting, + ) + generation_config: "GenerationConfig" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="GenerationConfig", + ) + cached_content: str = proto.Field( + proto.STRING, + number=9, + optional=True, + ) + + +class PrebuiltVoiceConfig(proto.Message): + r"""The configuration for the prebuilt speaker to use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + voice_name (str): + The name of the preset voice to use. + + This field is a member of `oneof`_ ``_voice_name``. + """ + + voice_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + +class VoiceConfig(proto.Message): + r"""The configuration for the voice to use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + prebuilt_voice_config (google.ai.generativelanguage_v1alpha.types.PrebuiltVoiceConfig): + The configuration for the prebuilt voice to + use. + + This field is a member of `oneof`_ ``voice_config``. + """ + + prebuilt_voice_config: "PrebuiltVoiceConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="voice_config", + message="PrebuiltVoiceConfig", + ) + + +class SpeechConfig(proto.Message): + r"""The speech generation config. + + Attributes: + voice_config (google.ai.generativelanguage_v1alpha.types.VoiceConfig): + The configuration for the speaker to use. + """ + + voice_config: "VoiceConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="VoiceConfig", + ) + + +class GenerationConfig(proto.Message): + r"""Configuration options for model generation and outputs. Not + all parameters are configurable for every model. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + candidate_count (int): + Optional. Number of generated responses to + return. + Currently, this value can only be set to 1. If + unset, this will default to 1. + + This field is a member of `oneof`_ ``_candidate_count``. + stop_sequences (MutableSequence[str]): + Optional. The set of character sequences (up to 5) that will + stop output generation. If specified, the API will stop at + the first appearance of a ``stop_sequence``. The stop + sequence will not be included as part of the response. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + response candidate. + + Note: The default value varies by model, see the + ``Model.output_token_limit`` attribute of the ``Model`` + returned from the ``getModel`` function. + + This field is a member of `oneof`_ ``_max_output_tokens``. + temperature (float): + Optional. Controls the randomness of the output. + + Note: The default value varies by model, see the + ``Model.temperature`` attribute of the ``Model`` returned + from the ``getModel`` function. + + Values can range from [0.0, 2.0]. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and Top-p (nucleus) sampling. + + Tokens are sorted based on their assigned probabilities so + that only the most likely tokens are considered. Top-k + sampling directly limits the maximum number of tokens to + consider, while Nucleus sampling limits the number of tokens + based on the cumulative probability. + + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + Gemini models use Top-p (nucleus) sampling or a combination + of Top-k and nucleus sampling. Top-k sampling considers the + set of ``top_k`` most probable tokens. Models running with + nucleus sampling don't allow top_k setting. + + Note: The default value varies by ``Model`` and is specified + by the\ ``Model.top_p`` attribute returned from the + ``getModel`` function. An empty ``top_k`` attribute + indicates that the model doesn't apply top-k sampling and + doesn't allow setting ``top_k`` on requests. + + This field is a member of `oneof`_ ``_top_k``. + response_mime_type (str): + Optional. MIME type of the generated candidate text. + Supported MIME types are: ``text/plain``: (default) Text + output. ``application/json``: JSON response in the response + candidates. ``text/x.enum``: ENUM as a string response in + the response candidates. Refer to the + `docs `__ + for a list of all supported text MIME types. + response_schema (google.ai.generativelanguage_v1alpha.types.Schema): + Optional. Output schema of the generated candidate text. + Schemas must be a subset of the `OpenAPI + schema `__ and + can be objects, primitives or arrays. + + If set, a compatible ``response_mime_type`` must also be + set. Compatible MIME types: ``application/json``: Schema for + JSON response. Refer to the `JSON text generation + guide `__ + for more details. + presence_penalty (float): + Optional. Presence penalty applied to the next token's + logprobs if the token has already been seen in the response. + + This penalty is binary on/off and not dependant on the + number of times the token is used (after the first). Use + [frequency_penalty][google.ai.generativelanguage.v1alpha.GenerationConfig.frequency_penalty] + for a penalty that increases with each use. + + A positive penalty will discourage the use of tokens that + have already been used in the response, increasing the + vocabulary. + + A negative penalty will encourage the use of tokens that + have already been used in the response, decreasing the + vocabulary. + + This field is a member of `oneof`_ ``_presence_penalty``. + frequency_penalty (float): + Optional. Frequency penalty applied to the next token's + logprobs, multiplied by the number of times each token has + been seen in the respponse so far. + + A positive penalty will discourage the use of tokens that + have already been used, proportional to the number of times + the token has been used: The more a token is used, the more + dificult it is for the model to use that token again + increasing the vocabulary of responses. + + Caution: A *negative* penalty will encourage the model to + reuse tokens proportional to the number of times the token + has been used. Small negative values will reduce the + vocabulary of a response. Larger negative values will cause + the model to start repeating a common token until it hits + the + [max_output_tokens][google.ai.generativelanguage.v1alpha.GenerationConfig.max_output_tokens] + limit. + + This field is a member of `oneof`_ ``_frequency_penalty``. + response_logprobs (bool): + Optional. If true, export the logprobs + results in response. + + This field is a member of `oneof`_ ``_response_logprobs``. + logprobs (int): + Optional. Only valid if + [response_logprobs=True][google.ai.generativelanguage.v1alpha.GenerationConfig.response_logprobs]. + This sets the number of top logprobs to return at each + decoding step in the + [Candidate.logprobs_result][google.ai.generativelanguage.v1alpha.Candidate.logprobs_result]. + + This field is a member of `oneof`_ ``_logprobs``. + enable_enhanced_civic_answers (bool): + Optional. Enables enhanced civic answers. It + may not be available for all models. + + This field is a member of `oneof`_ ``_enable_enhanced_civic_answers``. + response_modalities (MutableSequence[google.ai.generativelanguage_v1alpha.types.GenerationConfig.Modality]): + Optional. The requested modalities of the + response. Represents the set of modalities that + the model can return, and should be expected in + the response. This is an exact match to the + modalities of the response. + + A model may have multiple combinations of + supported modalities. If the requested + modalities do not match any of the supported + combinations, an error will be returned. + + An empty list is equivalent to requesting only + text. + speech_config (google.ai.generativelanguage_v1alpha.types.SpeechConfig): + Optional. The speech generation config. + + This field is a member of `oneof`_ ``_speech_config``. + """ + + class Modality(proto.Enum): + r"""Supported modalities of the response. + + Values: + MODALITY_UNSPECIFIED (0): + Default value. + TEXT (1): + Indicates the model should return text. + IMAGE (2): + Indicates the model should return images. + AUDIO (3): + Indicates the model should return audio. + """ + MODALITY_UNSPECIFIED = 0 + TEXT = 1 + IMAGE = 2 + AUDIO = 3 + + candidate_count: int = proto.Field( + proto.INT32, + number=1, + optional=True, + ) + stop_sequences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + max_output_tokens: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + response_mime_type: str = proto.Field( + proto.STRING, + number=13, + ) + response_schema: gag_content.Schema = proto.Field( + proto.MESSAGE, + number=14, + message=gag_content.Schema, + ) + presence_penalty: float = proto.Field( + proto.FLOAT, + number=15, + optional=True, + ) + frequency_penalty: float = proto.Field( + proto.FLOAT, + number=16, + optional=True, + ) + response_logprobs: bool = proto.Field( + proto.BOOL, + number=17, + optional=True, + ) + logprobs: int = proto.Field( + proto.INT32, + number=18, + optional=True, + ) + enable_enhanced_civic_answers: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + response_modalities: MutableSequence[Modality] = proto.RepeatedField( + proto.ENUM, + number=20, + enum=Modality, + ) + speech_config: "SpeechConfig" = proto.Field( + proto.MESSAGE, + number=21, + optional=True, + message="SpeechConfig", + ) + + +class SemanticRetrieverConfig(proto.Message): + r"""Configuration for retrieving grounding content from a ``Corpus`` or + ``Document`` created using the Semantic Retriever API. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source (str): + Required. Name of the resource for retrieval. Example: + ``corpora/123`` or ``corpora/123/documents/abc``. + query (google.ai.generativelanguage_v1alpha.types.Content): + Required. Query to use for matching ``Chunk``\ s in the + given resource by similarity. + metadata_filters (MutableSequence[google.ai.generativelanguage_v1alpha.types.MetadataFilter]): + Optional. Filters for selecting ``Document``\ s and/or + ``Chunk``\ s from the resource. + max_chunks_count (int): + Optional. Maximum number of relevant ``Chunk``\ s to + retrieve. + + This field is a member of `oneof`_ ``_max_chunks_count``. + minimum_relevance_score (float): + Optional. Minimum relevance score for retrieved relevant + ``Chunk``\ s. + + This field is a member of `oneof`_ ``_minimum_relevance_score``. + """ + + source: str = proto.Field( + proto.STRING, + number=1, + ) + query: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + metadata_filters: MutableSequence[retriever.MetadataFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=retriever.MetadataFilter, + ) + max_chunks_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + minimum_relevance_score: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + + +class GenerateContentResponse(proto.Message): + r"""Response from the model supporting multiple candidate responses. + + Safety ratings and content filtering are reported for both prompt in + ``GenerateContentResponse.prompt_feedback`` and for each candidate + in ``finish_reason`` and in ``safety_ratings``. The API: + + - Returns either all requested candidates or none of them + - Returns no candidates at all only if there was something wrong + with the prompt (check ``prompt_feedback``) + - Reports feedback on each candidate in ``finish_reason`` and + ``safety_ratings``. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1alpha.types.Candidate]): + Candidate responses from the model. + prompt_feedback (google.ai.generativelanguage_v1alpha.types.GenerateContentResponse.PromptFeedback): + Returns the prompt's feedback related to the + content filters. + usage_metadata (google.ai.generativelanguage_v1alpha.types.GenerateContentResponse.UsageMetadata): + Output only. Metadata on the generation + requests' token usage. + model_version (str): + Output only. The model version used to + generate the response. + """ + + class PromptFeedback(proto.Message): + r"""A set of the feedback metadata the prompt specified in + ``GenerateContentRequest.content``. + + Attributes: + block_reason (google.ai.generativelanguage_v1alpha.types.GenerateContentResponse.PromptFeedback.BlockReason): + Optional. If set, the prompt was blocked and + no candidates are returned. Rephrase the prompt. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetyRating]): + Ratings for safety of the prompt. + There is at most one rating per category. + """ + + class BlockReason(proto.Enum): + r"""Specifies the reason why the prompt was blocked. + + Values: + BLOCK_REASON_UNSPECIFIED (0): + Default value. This value is unused. + SAFETY (1): + Prompt was blocked due to safety reasons. Inspect + ``safety_ratings`` to understand which safety category + blocked it. + OTHER (2): + Prompt was blocked due to unknown reasons. + BLOCKLIST (3): + Prompt was blocked due to the terms which are + included from the terminology blocklist. + PROHIBITED_CONTENT (4): + Prompt was blocked due to prohibited content. + IMAGE_SAFETY (5): + Candidates blocked due to unsafe image + generation content. + """ + BLOCK_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + BLOCKLIST = 3 + PROHIBITED_CONTENT = 4 + IMAGE_SAFETY = 5 + + block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( + proto.Field( + proto.ENUM, + number=1, + enum="GenerateContentResponse.PromptFeedback.BlockReason", + ) + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + + class UsageMetadata(proto.Message): + r"""Metadata on the generation request's token usage. + + Attributes: + prompt_token_count (int): + Number of tokens in the prompt. When ``cached_content`` is + set, this is still the total effective prompt size meaning + this includes the number of tokens in the cached content. + cached_content_token_count (int): + Number of tokens in the cached part of the + prompt (the cached content) + candidates_token_count (int): + Total number of tokens across all the + generated response candidates. + total_token_count (int): + Total token count for the generation request + (prompt + response candidates). + """ + + prompt_token_count: int = proto.Field( + proto.INT32, + number=1, + ) + cached_content_token_count: int = proto.Field( + proto.INT32, + number=4, + ) + candidates_token_count: int = proto.Field( + proto.INT32, + number=2, + ) + total_token_count: int = proto.Field( + proto.INT32, + number=3, + ) + + candidates: MutableSequence["Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Candidate", + ) + prompt_feedback: PromptFeedback = proto.Field( + proto.MESSAGE, + number=2, + message=PromptFeedback, + ) + usage_metadata: UsageMetadata = proto.Field( + proto.MESSAGE, + number=3, + message=UsageMetadata, + ) + model_version: str = proto.Field( + proto.STRING, + number=4, + ) + + +class Candidate(proto.Message): + r"""A response candidate generated from the model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + index (int): + Output only. Index of the candidate in the + list of response candidates. + + This field is a member of `oneof`_ ``_index``. + content (google.ai.generativelanguage_v1alpha.types.Content): + Output only. Generated content returned from + the model. + finish_reason (google.ai.generativelanguage_v1alpha.types.Candidate.FinishReason): + Optional. Output only. The reason why the + model stopped generating tokens. + If empty, the model has not stopped generating + tokens. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetyRating]): + List of ratings for the safety of a response + candidate. + There is at most one rating per category. + citation_metadata (google.ai.generativelanguage_v1alpha.types.CitationMetadata): + Output only. Citation information for model-generated + candidate. + + This field may be populated with recitation information for + any text included in the ``content``. These are passages + that are "recited" from copyrighted material in the + foundational LLM's training data. + token_count (int): + Output only. Token count for this candidate. + grounding_attributions (MutableSequence[google.ai.generativelanguage_v1alpha.types.GroundingAttribution]): + Output only. Attribution information for sources that + contributed to a grounded answer. + + This field is populated for ``GenerateAnswer`` calls. + grounding_metadata (google.ai.generativelanguage_v1alpha.types.GroundingMetadata): + Output only. Grounding metadata for the candidate. + + This field is populated for ``GenerateContent`` calls. + avg_logprobs (float): + Output only. Average log probability score of + the candidate. + logprobs_result (google.ai.generativelanguage_v1alpha.types.LogprobsResult): + Output only. Log-likelihood scores for the + response tokens and top tokens + """ + + class FinishReason(proto.Enum): + r"""Defines the reason why the model stopped generating tokens. + + Values: + FINISH_REASON_UNSPECIFIED (0): + Default value. This value is unused. + STOP (1): + Natural stop point of the model or provided + stop sequence. + MAX_TOKENS (2): + The maximum number of tokens as specified in + the request was reached. + SAFETY (3): + The response candidate content was flagged + for safety reasons. + RECITATION (4): + The response candidate content was flagged + for recitation reasons. + LANGUAGE (6): + The response candidate content was flagged + for using an unsupported language. + OTHER (5): + Unknown reason. + BLOCKLIST (7): + Token generation stopped because the content + contains forbidden terms. + PROHIBITED_CONTENT (8): + Token generation stopped for potentially + containing prohibited content. + SPII (9): + Token generation stopped because the content + potentially contains Sensitive Personally + Identifiable Information (SPII). + MALFORMED_FUNCTION_CALL (10): + The function call generated by the model is + invalid. + IMAGE_SAFETY (11): + Token generation stopped because generated + images contain safety violations. + """ + FINISH_REASON_UNSPECIFIED = 0 + STOP = 1 + MAX_TOKENS = 2 + SAFETY = 3 + RECITATION = 4 + LANGUAGE = 6 + OTHER = 5 + BLOCKLIST = 7 + PROHIBITED_CONTENT = 8 + SPII = 9 + MALFORMED_FUNCTION_CALL = 10 + IMAGE_SAFETY = 11 + + index: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=1, + message=gag_content.Content, + ) + finish_reason: FinishReason = proto.Field( + proto.ENUM, + number=2, + enum=FinishReason, + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=safety.SafetyRating, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=6, + message=citation.CitationMetadata, + ) + token_count: int = proto.Field( + proto.INT32, + number=7, + ) + grounding_attributions: MutableSequence[ + "GroundingAttribution" + ] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="GroundingAttribution", + ) + grounding_metadata: "GroundingMetadata" = proto.Field( + proto.MESSAGE, + number=9, + message="GroundingMetadata", + ) + avg_logprobs: float = proto.Field( + proto.DOUBLE, + number=10, + ) + logprobs_result: "LogprobsResult" = proto.Field( + proto.MESSAGE, + number=11, + message="LogprobsResult", + ) + + +class LogprobsResult(proto.Message): + r"""Logprobs Result + + Attributes: + top_candidates (MutableSequence[google.ai.generativelanguage_v1alpha.types.LogprobsResult.TopCandidates]): + Length = total number of decoding steps. + chosen_candidates (MutableSequence[google.ai.generativelanguage_v1alpha.types.LogprobsResult.Candidate]): + Length = total number of decoding steps. The chosen + candidates may or may not be in top_candidates. + """ + + class Candidate(proto.Message): + r"""Candidate for the logprobs token and score. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + token (str): + The candidate’s token string value. + + This field is a member of `oneof`_ ``_token``. + token_id (int): + The candidate’s token id value. + + This field is a member of `oneof`_ ``_token_id``. + log_probability (float): + The candidate's log probability. + + This field is a member of `oneof`_ ``_log_probability``. + """ + + token: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + token_id: int = proto.Field( + proto.INT32, + number=3, + optional=True, + ) + log_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + + class TopCandidates(proto.Message): + r"""Candidates with top log probabilities at each decoding step. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1alpha.types.LogprobsResult.Candidate]): + Sorted by log probability in descending + order. + """ + + candidates: MutableSequence["LogprobsResult.Candidate"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogprobsResult.Candidate", + ) + + top_candidates: MutableSequence[TopCandidates] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=TopCandidates, + ) + chosen_candidates: MutableSequence[Candidate] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Candidate, + ) + + +class AttributionSourceId(proto.Message): + r"""Identifier for the source contributing to this attribution. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + grounding_passage (google.ai.generativelanguage_v1alpha.types.AttributionSourceId.GroundingPassageId): + Identifier for an inline passage. + + This field is a member of `oneof`_ ``source``. + semantic_retriever_chunk (google.ai.generativelanguage_v1alpha.types.AttributionSourceId.SemanticRetrieverChunk): + Identifier for a ``Chunk`` fetched via Semantic Retriever. + + This field is a member of `oneof`_ ``source``. + """ + + class GroundingPassageId(proto.Message): + r"""Identifier for a part within a ``GroundingPassage``. + + Attributes: + passage_id (str): + Output only. ID of the passage matching the + ``GenerateAnswerRequest``'s ``GroundingPassage.id``. + part_index (int): + Output only. Index of the part within the + ``GenerateAnswerRequest``'s ``GroundingPassage.content``. + """ + + passage_id: str = proto.Field( + proto.STRING, + number=1, + ) + part_index: int = proto.Field( + proto.INT32, + number=2, + ) + + class SemanticRetrieverChunk(proto.Message): + r"""Identifier for a ``Chunk`` retrieved via Semantic Retriever + specified in the ``GenerateAnswerRequest`` using + ``SemanticRetrieverConfig``. + + Attributes: + source (str): + Output only. Name of the source matching the request's + ``SemanticRetrieverConfig.source``. Example: ``corpora/123`` + or ``corpora/123/documents/abc`` + chunk (str): + Output only. Name of the ``Chunk`` containing the attributed + text. Example: ``corpora/123/documents/abc/chunks/xyz`` + """ + + source: str = proto.Field( + proto.STRING, + number=1, + ) + chunk: str = proto.Field( + proto.STRING, + number=2, + ) + + grounding_passage: GroundingPassageId = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message=GroundingPassageId, + ) + semantic_retriever_chunk: SemanticRetrieverChunk = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=SemanticRetrieverChunk, + ) + + +class GroundingAttribution(proto.Message): + r"""Attribution for a source that contributed to an answer. + + Attributes: + source_id (google.ai.generativelanguage_v1alpha.types.AttributionSourceId): + Output only. Identifier for the source + contributing to this attribution. + content (google.ai.generativelanguage_v1alpha.types.Content): + Grounding source content that makes up this + attribution. + """ + + source_id: "AttributionSourceId" = proto.Field( + proto.MESSAGE, + number=3, + message="AttributionSourceId", + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + + +class RetrievalMetadata(proto.Message): + r"""Metadata related to retrieval in the grounding flow. + + Attributes: + google_search_dynamic_retrieval_score (float): + Optional. Score indicating how likely information from + google search could help answer the prompt. The score is in + the range [0, 1], where 0 is the least likely and 1 is the + most likely. This score is only populated when google search + grounding and dynamic retrieval is enabled. It will be + compared to the threshold to determine whether to trigger + google search. + """ + + google_search_dynamic_retrieval_score: float = proto.Field( + proto.FLOAT, + number=2, + ) + + +class GroundingMetadata(proto.Message): + r"""Metadata returned to client when grounding is enabled. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + search_entry_point (google.ai.generativelanguage_v1alpha.types.SearchEntryPoint): + Optional. Google search entry for the + following-up web searches. + + This field is a member of `oneof`_ ``_search_entry_point``. + grounding_chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.GroundingChunk]): + List of supporting references retrieved from + specified grounding source. + grounding_supports (MutableSequence[google.ai.generativelanguage_v1alpha.types.GroundingSupport]): + List of grounding support. + retrieval_metadata (google.ai.generativelanguage_v1alpha.types.RetrievalMetadata): + Metadata related to retrieval in the + grounding flow. + + This field is a member of `oneof`_ ``_retrieval_metadata``. + web_search_queries (MutableSequence[str]): + Web search queries for the following-up web + search. + """ + + search_entry_point: "SearchEntryPoint" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="SearchEntryPoint", + ) + grounding_chunks: MutableSequence["GroundingChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GroundingChunk", + ) + grounding_supports: MutableSequence["GroundingSupport"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="GroundingSupport", + ) + retrieval_metadata: "RetrievalMetadata" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="RetrievalMetadata", + ) + web_search_queries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class SearchEntryPoint(proto.Message): + r"""Google search entry point. + + Attributes: + rendered_content (str): + Optional. Web content snippet that can be + embedded in a web page or an app webview. + sdk_blob (bytes): + Optional. Base64 encoded JSON representing + array of tuple. + """ + + rendered_content: str = proto.Field( + proto.STRING, + number=1, + ) + sdk_blob: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + +class GroundingChunk(proto.Message): + r"""Grounding chunk. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + web (google.ai.generativelanguage_v1alpha.types.GroundingChunk.Web): + Grounding chunk from the web. + + This field is a member of `oneof`_ ``chunk_type``. + """ + + class Web(proto.Message): + r"""Chunk from the web. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + uri (str): + URI reference of the chunk. + + This field is a member of `oneof`_ ``_uri``. + title (str): + Title of the chunk. + + This field is a member of `oneof`_ ``_title``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + title: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + web: Web = proto.Field( + proto.MESSAGE, + number=1, + oneof="chunk_type", + message=Web, + ) + + +class Segment(proto.Message): + r"""Segment of the content. + + Attributes: + part_index (int): + Output only. The index of a Part object + within its parent Content object. + start_index (int): + Output only. Start index in the given Part, + measured in bytes. Offset from the start of the + Part, inclusive, starting at zero. + end_index (int): + Output only. End index in the given Part, + measured in bytes. Offset from the start of the + Part, exclusive, starting at zero. + text (str): + Output only. The text corresponding to the + segment from the response. + """ + + part_index: int = proto.Field( + proto.INT32, + number=1, + ) + start_index: int = proto.Field( + proto.INT32, + number=2, + ) + end_index: int = proto.Field( + proto.INT32, + number=3, + ) + text: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GroundingSupport(proto.Message): + r"""Grounding support. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + segment (google.ai.generativelanguage_v1alpha.types.Segment): + Segment of the content this support belongs + to. + + This field is a member of `oneof`_ ``_segment``. + grounding_chunk_indices (MutableSequence[int]): + A list of indices (into 'grounding_chunk') specifying the + citations associated with the claim. For instance [1,3,4] + means that grounding_chunk[1], grounding_chunk[3], + grounding_chunk[4] are the retrieved content attributed to + the claim. + confidence_scores (MutableSequence[float]): + Confidence score of the support references. Ranges from 0 to + 1. 1 is the most confident. This list must have the same + size as the grounding_chunk_indices. + """ + + segment: "Segment" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Segment", + ) + grounding_chunk_indices: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=2, + ) + confidence_scores: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=3, + ) + + +class GenerateAnswerRequest(proto.Message): + r"""Request to generate a grounded answer from the ``Model``. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inline_passages (google.ai.generativelanguage_v1alpha.types.GroundingPassages): + Passages provided inline with the request. + + This field is a member of `oneof`_ ``grounding_source``. + semantic_retriever (google.ai.generativelanguage_v1alpha.types.SemanticRetrieverConfig): + Content retrieved from resources created via + the Semantic Retriever API. + + This field is a member of `oneof`_ ``grounding_source``. + model (str): + Required. The name of the ``Model`` to use for generating + the grounded response. + + Format: ``model=models/{model}``. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Required. The content of the current conversation with the + ``Model``. For single-turn queries, this is a single + question to answer. For multi-turn queries, this is a + repeated field that contains conversation history and the + last ``Content`` in the list containing the question. + + Note: ``GenerateAnswer`` only supports queries in English. + answer_style (google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest.AnswerStyle): + Required. Style in which answers should be + returned. + safety_settings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + This will be enforced on the + ``GenerateAnswerRequest.contents`` and + ``GenerateAnswerResponse.candidate``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any contents and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. Harm + categories HARM_CATEGORY_HATE_SPEECH, + HARM_CATEGORY_SEXUALLY_EXPLICIT, + HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT + are supported. Refer to the + `guide `__ + for detailed information on available safety settings. Also + refer to the `Safety + guidance `__ + to learn how to incorporate safety considerations in your AI + applications. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. A low + temperature (~0.2) is usually recommended for + Attributed-Question-Answering use cases. + + This field is a member of `oneof`_ ``_temperature``. + """ + + class AnswerStyle(proto.Enum): + r"""Style for grounded answers. + + Values: + ANSWER_STYLE_UNSPECIFIED (0): + Unspecified answer style. + ABSTRACTIVE (1): + Succint but abstract style. + EXTRACTIVE (2): + Very brief and extractive style. + VERBOSE (3): + Verbose style including extra details. The + response may be formatted as a sentence, + paragraph, multiple paragraphs, or bullet + points, etc. + """ + ANSWER_STYLE_UNSPECIFIED = 0 + ABSTRACTIVE = 1 + EXTRACTIVE = 2 + VERBOSE = 3 + + inline_passages: gag_content.GroundingPassages = proto.Field( + proto.MESSAGE, + number=6, + oneof="grounding_source", + message=gag_content.GroundingPassages, + ) + semantic_retriever: "SemanticRetrieverConfig" = proto.Field( + proto.MESSAGE, + number=7, + oneof="grounding_source", + message="SemanticRetrieverConfig", + ) + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + answer_style: AnswerStyle = proto.Field( + proto.ENUM, + number=5, + enum=AnswerStyle, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.SafetySetting, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=4, + optional=True, + ) + + +class GenerateAnswerResponse(proto.Message): + r"""Response from the model for a grounded answer. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + answer (google.ai.generativelanguage_v1alpha.types.Candidate): + Candidate answer from the model. + + Note: The model *always* attempts to provide a grounded + answer, even when the answer is unlikely to be answerable + from the given passages. In that case, a low-quality or + ungrounded answer may be provided, along with a low + ``answerable_probability``. + answerable_probability (float): + Output only. The model's estimate of the probability that + its answer is correct and grounded in the input passages. + + A low ``answerable_probability`` indicates that the answer + might not be grounded in the sources. + + When ``answerable_probability`` is low, you may want to: + + - Display a message to the effect of "We couldn’t answer + that question" to the user. + - Fall back to a general-purpose LLM that answers the + question from world knowledge. The threshold and nature + of such fallbacks will depend on individual use cases. + ``0.5`` is a good starting threshold. + + This field is a member of `oneof`_ ``_answerable_probability``. + input_feedback (google.ai.generativelanguage_v1alpha.types.GenerateAnswerResponse.InputFeedback): + Output only. Feedback related to the input data used to + answer the question, as opposed to the model-generated + response to the question. + + The input data can be one or more of the following: + + - Question specified by the last entry in + ``GenerateAnswerRequest.content`` + - Conversation history specified by the other entries in + ``GenerateAnswerRequest.content`` + - Grounding sources + (``GenerateAnswerRequest.semantic_retriever`` or + ``GenerateAnswerRequest.inline_passages``) + + This field is a member of `oneof`_ ``_input_feedback``. + """ + + class InputFeedback(proto.Message): + r"""Feedback related to the input data used to answer the + question, as opposed to the model-generated response to the + question. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + block_reason (google.ai.generativelanguage_v1alpha.types.GenerateAnswerResponse.InputFeedback.BlockReason): + Optional. If set, the input was blocked and + no candidates are returned. Rephrase the input. + + This field is a member of `oneof`_ ``_block_reason``. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetyRating]): + Ratings for safety of the input. + There is at most one rating per category. + """ + + class BlockReason(proto.Enum): + r"""Specifies what was the reason why input was blocked. + + Values: + BLOCK_REASON_UNSPECIFIED (0): + Default value. This value is unused. + SAFETY (1): + Input was blocked due to safety reasons. Inspect + ``safety_ratings`` to understand which safety category + blocked it. + OTHER (2): + Input was blocked due to other reasons. + """ + BLOCK_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + block_reason: "GenerateAnswerResponse.InputFeedback.BlockReason" = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum="GenerateAnswerResponse.InputFeedback.BlockReason", + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + + answer: "Candidate" = proto.Field( + proto.MESSAGE, + number=1, + message="Candidate", + ) + answerable_probability: float = proto.Field( + proto.FLOAT, + number=2, + optional=True, + ) + input_feedback: InputFeedback = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=InputFeedback, + ) + + +class EmbedContentRequest(proto.Message): + r"""Request containing the ``Content`` for the model to embed. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + content (google.ai.generativelanguage_v1alpha.types.Content): + Required. The content to embed. Only the ``parts.text`` + fields will be counted. + task_type (google.ai.generativelanguage_v1alpha.types.TaskType): + Optional. Optional task type for which the embeddings will + be used. Can only be set for ``models/embedding-001``. + + This field is a member of `oneof`_ ``_task_type``. + title (str): + Optional. An optional title for the text. Only applicable + when TaskType is ``RETRIEVAL_DOCUMENT``. + + Note: Specifying a ``title`` for ``RETRIEVAL_DOCUMENT`` + provides better quality embeddings for retrieval. + + This field is a member of `oneof`_ ``_title``. + output_dimensionality (int): + Optional. Optional reduced dimension for the output + embedding. If set, excessive values in the output embedding + are truncated from the end. Supported by newer models since + 2024 only. You cannot set this value if using the earlier + model (``models/embedding-001``). + + This field is a member of `oneof`_ ``_output_dimensionality``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + content: gag_content.Content = proto.Field( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + task_type: "TaskType" = proto.Field( + proto.ENUM, + number=3, + optional=True, + enum="TaskType", + ) + title: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + output_dimensionality: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + + +class ContentEmbedding(proto.Message): + r"""A list of floats representing an embedding. + + Attributes: + values (MutableSequence[float]): + The embedding values. + """ + + values: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=1, + ) + + +class EmbedContentResponse(proto.Message): + r"""The response to an ``EmbedContentRequest``. + + Attributes: + embedding (google.ai.generativelanguage_v1alpha.types.ContentEmbedding): + Output only. The embedding generated from the + input content. + """ + + embedding: "ContentEmbedding" = proto.Field( + proto.MESSAGE, + number=1, + message="ContentEmbedding", + ) + + +class BatchEmbedContentsRequest(proto.Message): + r"""Batch request to get embeddings from the model for a list of + prompts. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + requests (MutableSequence[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest]): + Required. Embed requests for the batch. The model in each of + these requests must match the model specified + ``BatchEmbedContentsRequest.model``. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["EmbedContentRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="EmbedContentRequest", + ) + + +class BatchEmbedContentsResponse(proto.Message): + r"""The response to a ``BatchEmbedContentsRequest``. + + Attributes: + embeddings (MutableSequence[google.ai.generativelanguage_v1alpha.types.ContentEmbedding]): + Output only. The embeddings for each request, + in the same order as provided in the batch + request. + """ + + embeddings: MutableSequence["ContentEmbedding"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ContentEmbedding", + ) + + +class CountTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Optional. The input given to the model as a prompt. This + field is ignored when ``generate_content_request`` is set. + generate_content_request (google.ai.generativelanguage_v1alpha.types.GenerateContentRequest): + Optional. The overall input given to the ``Model``. This + includes the prompt as well as other model steering + information like `system + instructions `__, + and/or function declarations for `function + calling `__. + ``Model``\ s/\ ``Content``\ s and + ``generate_content_request``\ s are mutually exclusive. You + can either send ``Model`` + ``Content``\ s or a + ``generate_content_request``, but never both. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + contents: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.Content, + ) + generate_content_request: "GenerateContentRequest" = proto.Field( + proto.MESSAGE, + number=3, + message="GenerateContentRequest", + ) + + +class CountTokensResponse(proto.Message): + r"""A response from ``CountTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + total_tokens (int): + The number of tokens that the ``Model`` tokenizes the + ``prompt`` into. Always non-negative. + cached_content_token_count (int): + Number of tokens in the cached part of the + prompt (the cached content). + """ + + total_tokens: int = proto.Field( + proto.INT32, + number=1, + ) + cached_content_token_count: int = proto.Field( + proto.INT32, + number=5, + ) + + +class BidiGenerateContentSetup(proto.Message): + r"""Message to be sent in the first and only first + ``BidiGenerateContentClientMessage``. Contains configuration that + will apply for the duration of the streaming RPC. + + Clients should wait for a ``BidiGenerateContentSetupComplete`` + message before sending any additional messages. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + Format: ``models/{model}`` + generation_config (google.ai.generativelanguage_v1alpha.types.GenerationConfig): + Optional. Generation config. + + The following fields are not supported: + + - ``response_logprobs`` + - ``response_mime_type`` + - ``logprobs`` + - ``response_schema`` + - ``stop_sequence`` + - ``routing_config`` + - ``audio_timestamp`` + system_instruction (google.ai.generativelanguage_v1alpha.types.Content): + Optional. The user provided system + instructions for the model. + Note: Only text should be used in parts and + content in each part will be in a separate + paragraph. + tools (MutableSequence[google.ai.generativelanguage_v1alpha.types.Tool]): + Optional. A list of ``Tools`` the model may use to generate + the next response. + + A ``Tool`` is a piece of code that enables the system to + interact with external systems to perform an action, or set + of actions, outside of knowledge and scope of the model. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + generation_config: "GenerationConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="GenerationConfig", + ) + system_instruction: gag_content.Content = proto.Field( + proto.MESSAGE, + number=3, + message=gag_content.Content, + ) + tools: MutableSequence[gag_content.Tool] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=gag_content.Tool, + ) + + +class BidiGenerateContentClientContent(proto.Message): + r"""Incremental update of the current conversation delivered from + the client. All of the content here is unconditionally appended + to the conversation history and used as part of the prompt to + the model to generate content. + + A message here will interrupt any current model generation. + + Attributes: + turns (MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]): + Optional. The content appended to the current + conversation with the model. + For single-turn queries, this is a single + instance. For multi-turn queries, this is a + repeated field that contains conversation + history and the latest request. + turn_complete (bool): + Optional. If true, indicates that the server + content generation should start with the + currently accumulated prompt. Otherwise, the + server awaits additional messages before + starting generation. + """ + + turns: MutableSequence[gag_content.Content] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_content.Content, + ) + turn_complete: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class BidiGenerateContentRealtimeInput(proto.Message): + r"""User input that is sent in real time. + + This is different from + [BidiGenerateContentClientContent][google.ai.generativelanguage.v1alpha.BidiGenerateContentClientContent] + in a few ways: + + - Can be sent continuously without interruption to model + generation. + - If there is a need to mix data interleaved across the + [BidiGenerateContentClientContent][google.ai.generativelanguage.v1alpha.BidiGenerateContentClientContent] + and the + [BidiGenerateContentRealtimeInput][google.ai.generativelanguage.v1alpha.BidiGenerateContentRealtimeInput], + the server attempts to optimize for best response, but there are + no guarantees. + - End of turn is not explicitly specified, but is rather derived + from user activity (for example, end of speech). + - Even before the end of turn, the data is processed incrementally + to optimize for a fast start of the response from the model. + - Is always direct user input that is sent in real time. Can be + sent continuously without interruptions. The model automatically + detects the beginning and the end of user speech and starts or + terminates streaming the response accordingly. Data is processed + incrementally as it arrives, minimizing latency. + + Attributes: + media_chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.Blob]): + Optional. Inlined bytes data for media input. + """ + + media_chunks: MutableSequence[gag_content.Blob] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_content.Blob, + ) + + +class BidiGenerateContentToolResponse(proto.Message): + r"""Client generated response to a ``ToolCall`` received from the + server. Individual ``FunctionResponse`` objects are matched to the + respective ``FunctionCall`` objects by the ``id`` field. + + Note that in the unary and server-streaming GenerateContent APIs + function calling happens by exchanging the ``Content`` parts, while + in the bidi GenerateContent APIs function calling happens over these + dedicated set of messages. + + Attributes: + function_responses (MutableSequence[google.ai.generativelanguage_v1alpha.types.FunctionResponse]): + Optional. The response to the function calls. + """ + + function_responses: MutableSequence[ + gag_content.FunctionResponse + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_content.FunctionResponse, + ) + + +class BidiGenerateContentClientMessage(proto.Message): + r"""Messages sent by the client in the BidiGenerateContent call. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + setup (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentSetup): + Optional. Session configuration sent in the + first and only first client message. + + This field is a member of `oneof`_ ``message_type``. + client_content (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentClientContent): + Optional. Incremental update of the current + conversation delivered from the client. + + This field is a member of `oneof`_ ``message_type``. + realtime_input (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentRealtimeInput): + Optional. User input that is sent in real + time. + + This field is a member of `oneof`_ ``message_type``. + tool_response (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentToolResponse): + Optional. Response to a ``ToolCallMessage`` received from + the server. + + This field is a member of `oneof`_ ``message_type``. + """ + + setup: "BidiGenerateContentSetup" = proto.Field( + proto.MESSAGE, + number=1, + oneof="message_type", + message="BidiGenerateContentSetup", + ) + client_content: "BidiGenerateContentClientContent" = proto.Field( + proto.MESSAGE, + number=2, + oneof="message_type", + message="BidiGenerateContentClientContent", + ) + realtime_input: "BidiGenerateContentRealtimeInput" = proto.Field( + proto.MESSAGE, + number=3, + oneof="message_type", + message="BidiGenerateContentRealtimeInput", + ) + tool_response: "BidiGenerateContentToolResponse" = proto.Field( + proto.MESSAGE, + number=4, + oneof="message_type", + message="BidiGenerateContentToolResponse", + ) + + +class BidiGenerateContentSetupComplete(proto.Message): + r"""Sent in response to a ``BidiGenerateContentSetup`` message from the + client. + + """ + + +class BidiGenerateContentServerContent(proto.Message): + r"""Incremental server update generated by the model in response + to client messages. + + Content is generated as quickly as possible, and not in real + time. Clients may choose to buffer and play it out in real time. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model_turn (google.ai.generativelanguage_v1alpha.types.Content): + Output only. The content that the model has + generated as part of the current conversation + with the user. + + This field is a member of `oneof`_ ``_model_turn``. + turn_complete (bool): + Output only. If true, indicates that the model is done + generating. Generation will only start in response to + additional client messages. Can be set alongside + ``content``, indicating that the ``content`` is the last in + the turn. + interrupted (bool): + Output only. If true, indicates that a client + message has interrupted current model + generation. If the client is playing out the + content in real time, this is a good signal to + stop and empty the current playback queue. + grounding_metadata (google.ai.generativelanguage_v1alpha.types.GroundingMetadata): + Output only. Grounding metadata for the + generated content. + """ + + model_turn: gag_content.Content = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message=gag_content.Content, + ) + turn_complete: bool = proto.Field( + proto.BOOL, + number=2, + ) + interrupted: bool = proto.Field( + proto.BOOL, + number=3, + ) + grounding_metadata: "GroundingMetadata" = proto.Field( + proto.MESSAGE, + number=4, + message="GroundingMetadata", + ) + + +class BidiGenerateContentToolCall(proto.Message): + r"""Request for the client to execute the ``function_calls`` and return + the responses with the matching ``id``\ s. + + Attributes: + function_calls (MutableSequence[google.ai.generativelanguage_v1alpha.types.FunctionCall]): + Output only. The function call to be + executed. + """ + + function_calls: MutableSequence[gag_content.FunctionCall] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gag_content.FunctionCall, + ) + + +class BidiGenerateContentToolCallCancellation(proto.Message): + r"""Notification for the client that a previously issued + ``ToolCallMessage`` with the specified ``id``\ s should have been + not executed and should be cancelled. If there were side-effects to + those tool calls, clients may attempt to undo the tool calls. This + message occurs only in cases where the clients interrupt server + turns. + + Attributes: + ids (MutableSequence[str]): + Output only. The ids of the tool calls to be + cancelled. + """ + + ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class BidiGenerateContentServerMessage(proto.Message): + r"""Response message for the BidiGenerateContent call. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + setup_complete (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentSetupComplete): + Output only. Sent in response to a + ``BidiGenerateContentSetup`` message from the client when + setup is complete. + + This field is a member of `oneof`_ ``message_type``. + server_content (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentServerContent): + Output only. Content generated by the model + in response to client messages. + + This field is a member of `oneof`_ ``message_type``. + tool_call (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentToolCall): + Output only. Request for the client to execute the + ``function_calls`` and return the responses with the + matching ``id``\ s. + + This field is a member of `oneof`_ ``message_type``. + tool_call_cancellation (google.ai.generativelanguage_v1alpha.types.BidiGenerateContentToolCallCancellation): + Output only. Notification for the client that a previously + issued ``ToolCallMessage`` with the specified ``id``\ s + should be cancelled. + + This field is a member of `oneof`_ ``message_type``. + """ + + setup_complete: "BidiGenerateContentSetupComplete" = proto.Field( + proto.MESSAGE, + number=2, + oneof="message_type", + message="BidiGenerateContentSetupComplete", + ) + server_content: "BidiGenerateContentServerContent" = proto.Field( + proto.MESSAGE, + number=3, + oneof="message_type", + message="BidiGenerateContentServerContent", + ) + tool_call: "BidiGenerateContentToolCall" = proto.Field( + proto.MESSAGE, + number=4, + oneof="message_type", + message="BidiGenerateContentToolCall", + ) + tool_call_cancellation: "BidiGenerateContentToolCallCancellation" = proto.Field( + proto.MESSAGE, + number=5, + oneof="message_type", + message="BidiGenerateContentToolCallCancellation", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/model.py new file mode 100644 index 000000000000..9549cefb8fb6 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/model.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "Model", + }, +) + + +class Model(proto.Message): + r"""Information about a Generative Language Model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the ``Model``. Refer to + `Model + variants `__ + for all allowed values. + + Format: ``models/{model}`` with a ``{model}`` naming + convention of: + + - "{base_model_id}-{version}" + + Examples: + + - ``models/gemini-1.5-flash-001`` + base_model_id (str): + Required. The name of the base model, pass this to the + generation request. + + Examples: + + - ``gemini-1.5-flash`` + version (str): + Required. The version number of the model. + + This represents the major version (``1.0`` or ``1.5``) + display_name (str): + The human-readable name of the model. E.g. + "Gemini 1.5 Flash". + The name can be up to 128 characters long and + can consist of any UTF-8 characters. + description (str): + A short description of the model. + input_token_limit (int): + Maximum number of input tokens allowed for + this model. + output_token_limit (int): + Maximum number of output tokens available for + this model. + supported_generation_methods (MutableSequence[str]): + The model's supported generation methods. + + The corresponding API method names are defined as Pascal + case strings, such as ``generateMessage`` and + ``generateContent``. + temperature (float): + Controls the randomness of the output. + + Values can range over ``[0.0,max_temperature]``, inclusive. + A higher value will produce responses that are more varied, + while a value closer to ``0.0`` will typically result in + less surprising responses from the model. This value + specifies default to be used by the backend while making the + call to the model. + + This field is a member of `oneof`_ ``_temperature``. + max_temperature (float): + The maximum temperature this model can use. + + This field is a member of `oneof`_ ``_max_temperature``. + top_p (float): + For `Nucleus + sampling `__. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. This value specifies + default to be used by the backend while making the call to + the model. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + For Top-k sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. This value specifies default to be used by the + backend while making the call to the model. If empty, + indicates the model doesn't use top-k sampling, and + ``top_k`` isn't allowed as a generation parameter. + + This field is a member of `oneof`_ ``_top_k``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + base_model_id: str = proto.Field( + proto.STRING, + number=2, + ) + version: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + description: str = proto.Field( + proto.STRING, + number=5, + ) + input_token_limit: int = proto.Field( + proto.INT32, + number=6, + ) + output_token_limit: int = proto.Field( + proto.INT32, + number=7, + ) + supported_generation_methods: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=9, + optional=True, + ) + max_temperature: float = proto.Field( + proto.FLOAT, + number=13, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=10, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=11, + optional=True, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/model_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/model_service.py new file mode 100644 index 000000000000..afa4fc0bc748 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/model_service.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "GetModelRequest", + "ListModelsRequest", + "ListModelsResponse", + "GetTunedModelRequest", + "ListTunedModelsRequest", + "ListTunedModelsResponse", + "CreateTunedModelRequest", + "CreateTunedModelMetadata", + "UpdateTunedModelRequest", + "DeleteTunedModelRequest", + }, +) + + +class GetModelRequest(proto.Message): + r"""Request for getting information about a specific Model. + + Attributes: + name (str): + Required. The resource name of the model. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListModelsRequest(proto.Message): + r"""Request for listing all Models. + + Attributes: + page_size (int): + The maximum number of ``Models`` to return (per page). + + If unspecified, 50 models will be returned per page. This + method returns at most 1000 models per page, even if you + pass a larger page_size. + page_token (str): + A page token, received from a previous ``ListModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListModels`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListModelsResponse(proto.Message): + r"""Response from ``ListModel`` containing a paginated list of Models. + + Attributes: + models (MutableSequence[google.ai.generativelanguage_v1alpha.types.Model]): + The returned Models. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + models: MutableSequence[model.Model] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=model.Model, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetTunedModelRequest(proto.Message): + r"""Request for getting information about a specific Model. + + Attributes: + name (str): + Required. The resource name of the model. + + Format: ``tunedModels/my-model-id`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTunedModelsRequest(proto.Message): + r"""Request for listing TunedModels. + + Attributes: + page_size (int): + Optional. The maximum number of ``TunedModels`` to return + (per page). The service may return fewer tuned models. + + If unspecified, at most 10 tuned models will be returned. + This method returns at most 1000 models per page, even if + you pass a larger page_size. + page_token (str): + Optional. A page token, received from a previous + ``ListTunedModels`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListTunedModels`` must match the call that provided the + page token. + filter (str): + Optional. A filter is a full text search over + the tuned model's description and display name. + By default, results will not include tuned + models shared with everyone. + + Additional operators: + + - owner:me + - writers:me + - readers:me + - readers:everyone + + Examples: + + "owner:me" returns all tuned models to which + caller has owner role "readers:me" returns all + tuned models to which caller has reader role + "readers:everyone" returns all tuned models that + are shared with everyone + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTunedModelsResponse(proto.Message): + r"""Response from ``ListTunedModels`` containing a paginated list of + Models. + + Attributes: + tuned_models (MutableSequence[google.ai.generativelanguage_v1alpha.types.TunedModel]): + The returned Models. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + tuned_models: MutableSequence[gag_tuned_model.TunedModel] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_tuned_model.TunedModel, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateTunedModelRequest(proto.Message): + r"""Request to create a TunedModel. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tuned_model_id (str): + Optional. The unique id for the tuned model if specified. + This value should be up to 40 characters, the first + character must be a letter, the last could be a letter or a + number. The id must match the regular expression: + ``[a-z]([a-z0-9-]{0,38}[a-z0-9])?``. + + This field is a member of `oneof`_ ``_tuned_model_id``. + tuned_model (google.ai.generativelanguage_v1alpha.types.TunedModel): + Required. The tuned model to create. + """ + + tuned_model_id: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tuned_model: gag_tuned_model.TunedModel = proto.Field( + proto.MESSAGE, + number=2, + message=gag_tuned_model.TunedModel, + ) + + +class CreateTunedModelMetadata(proto.Message): + r"""Metadata about the state and progress of creating a tuned + model returned from the long-running operation + + Attributes: + tuned_model (str): + Name of the tuned model associated with the + tuning operation. + total_steps (int): + The total number of tuning steps. + completed_steps (int): + The number of steps completed. + completed_percent (float): + The completed percentage for the tuning + operation. + snapshots (MutableSequence[google.ai.generativelanguage_v1alpha.types.TuningSnapshot]): + Metrics collected during tuning. + """ + + tuned_model: str = proto.Field( + proto.STRING, + number=5, + ) + total_steps: int = proto.Field( + proto.INT32, + number=1, + ) + completed_steps: int = proto.Field( + proto.INT32, + number=2, + ) + completed_percent: float = proto.Field( + proto.FLOAT, + number=3, + ) + snapshots: MutableSequence[gag_tuned_model.TuningSnapshot] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=gag_tuned_model.TuningSnapshot, + ) + + +class UpdateTunedModelRequest(proto.Message): + r"""Request to update a TunedModel. + + Attributes: + tuned_model (google.ai.generativelanguage_v1alpha.types.TunedModel): + Required. The tuned model to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. + """ + + tuned_model: gag_tuned_model.TunedModel = proto.Field( + proto.MESSAGE, + number=1, + message=gag_tuned_model.TunedModel, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteTunedModelRequest(proto.Message): + r"""Request to delete a TunedModel. + + Attributes: + name (str): + Required. The resource name of the model. Format: + ``tunedModels/my-model-id`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/permission.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/permission.py new file mode 100644 index 000000000000..a73758c6c688 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/permission.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "Permission", + }, +) + + +class Permission(proto.Message): + r"""Permission resource grants user, group or the rest of the + world access to the PaLM API resource (e.g. a tuned model, + corpus). + + A role is a collection of permitted operations that allows users + to perform specific actions on PaLM API resources. To make them + available to users, groups, or service accounts, you assign + roles. When you assign a role, you grant permissions that the + role contains. + + There are three concentric roles. Each role is a superset of the + previous role's permitted operations: + + - reader can use the resource (e.g. tuned model, corpus) for + inference + - writer has reader's permissions and additionally can edit and + share + - owner has writer's permissions and additionally can delete + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. Identifier. The permission name. A unique name + will be generated on create. Examples: + tunedModels/{tuned_model}/permissions/{permission} + corpora/{corpus}/permissions/{permission} Output only. + grantee_type (google.ai.generativelanguage_v1alpha.types.Permission.GranteeType): + Optional. Immutable. The type of the grantee. + + This field is a member of `oneof`_ ``_grantee_type``. + email_address (str): + Optional. Immutable. The email address of the + user of group which this permission refers. + Field is not set when permission's grantee type + is EVERYONE. + + This field is a member of `oneof`_ ``_email_address``. + role (google.ai.generativelanguage_v1alpha.types.Permission.Role): + Required. The role granted by this + permission. + + This field is a member of `oneof`_ ``_role``. + """ + + class GranteeType(proto.Enum): + r"""Defines types of the grantee of this permission. + + Values: + GRANTEE_TYPE_UNSPECIFIED (0): + The default value. This value is unused. + USER (1): + Represents a user. When set, you must provide email_address + for the user. + GROUP (2): + Represents a group. When set, you must provide email_address + for the group. + EVERYONE (3): + Represents access to everyone. No extra + information is required. + """ + GRANTEE_TYPE_UNSPECIFIED = 0 + USER = 1 + GROUP = 2 + EVERYONE = 3 + + class Role(proto.Enum): + r"""Defines the role granted by this permission. + + Values: + ROLE_UNSPECIFIED (0): + The default value. This value is unused. + OWNER (1): + Owner can use, update, share and delete the + resource. + WRITER (2): + Writer can use, update and share the + resource. + READER (3): + Reader can use the resource. + """ + ROLE_UNSPECIFIED = 0 + OWNER = 1 + WRITER = 2 + READER = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + grantee_type: GranteeType = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=GranteeType, + ) + email_address: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + role: Role = proto.Field( + proto.ENUM, + number=4, + optional=True, + enum=Role, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/permission_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/permission_service.py new file mode 100644 index 000000000000..1e64faf3fe0d --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/permission_service.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "CreatePermissionRequest", + "GetPermissionRequest", + "ListPermissionsRequest", + "ListPermissionsResponse", + "UpdatePermissionRequest", + "DeletePermissionRequest", + "TransferOwnershipRequest", + "TransferOwnershipResponse", + }, +) + + +class CreatePermissionRequest(proto.Message): + r"""Request to create a ``Permission``. + + Attributes: + parent (str): + Required. The parent resource of the ``Permission``. + Formats: ``tunedModels/{tuned_model}`` ``corpora/{corpus}`` + permission (google.ai.generativelanguage_v1alpha.types.Permission): + Required. The permission to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + permission: gag_permission.Permission = proto.Field( + proto.MESSAGE, + number=2, + message=gag_permission.Permission, + ) + + +class GetPermissionRequest(proto.Message): + r"""Request for getting information about a specific ``Permission``. + + Attributes: + name (str): + Required. The resource name of the permission. + + Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPermissionsRequest(proto.Message): + r"""Request for listing permissions. + + Attributes: + parent (str): + Required. The parent resource of the permissions. Formats: + ``tunedModels/{tuned_model}`` ``corpora/{corpus}`` + page_size (int): + Optional. The maximum number of ``Permission``\ s to return + (per page). The service may return fewer permissions. + + If unspecified, at most 10 permissions will be returned. + This method returns at most 1000 permissions per page, even + if you pass larger page_size. + page_token (str): + Optional. A page token, received from a previous + ``ListPermissions`` call. + + Provide the ``page_token`` returned by one request as an + argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListPermissions`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListPermissionsResponse(proto.Message): + r"""Response from ``ListPermissions`` containing a paginated list of + permissions. + + Attributes: + permissions (MutableSequence[google.ai.generativelanguage_v1alpha.types.Permission]): + Returned permissions. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. + + If this field is omitted, there are no more pages. + """ + + @property + def raw_page(self): + return self + + permissions: MutableSequence[gag_permission.Permission] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gag_permission.Permission, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdatePermissionRequest(proto.Message): + r"""Request to update the ``Permission``. + + Attributes: + permission (google.ai.generativelanguage_v1alpha.types.Permission): + Required. The permission to update. + + The permission's ``name`` field is used to identify the + permission to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Accepted ones: + + - role (``Permission.role`` field) + """ + + permission: gag_permission.Permission = proto.Field( + proto.MESSAGE, + number=1, + message=gag_permission.Permission, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeletePermissionRequest(proto.Message): + r"""Request to delete the ``Permission``. + + Attributes: + name (str): + Required. The resource name of the permission. Formats: + ``tunedModels/{tuned_model}/permissions/{permission}`` + ``corpora/{corpus}/permissions/{permission}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TransferOwnershipRequest(proto.Message): + r"""Request to transfer the ownership of the tuned model. + + Attributes: + name (str): + Required. The resource name of the tuned model to transfer + ownership. + + Format: ``tunedModels/my-model-id`` + email_address (str): + Required. The email address of the user to + whom the tuned model is being transferred to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + email_address: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TransferOwnershipResponse(proto.Message): + r"""Response from ``TransferOwnership``.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/prediction_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/prediction_service.py new file mode 100644 index 000000000000..bb5326c4a9db --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/prediction_service.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "PredictRequest", + "PredictResponse", + }, +) + + +class PredictRequest(proto.Message): + r"""Request message for + [PredictionService.Predict][google.ai.generativelanguage.v1alpha.PredictionService.Predict]. + + Attributes: + model (str): + Required. The name of the model for prediction. Format: + ``name=models/{model}``. + instances (MutableSequence[google.protobuf.struct_pb2.Value]): + Required. The instances that are the input to + the prediction call. + parameters (google.protobuf.struct_pb2.Value): + Optional. The parameters that govern the + prediction call. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + instances: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + parameters: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Value, + ) + + +class PredictResponse(proto.Message): + r"""Response message for [PredictionService.Predict]. + + Attributes: + predictions (MutableSequence[google.protobuf.struct_pb2.Value]): + The outputs of the prediction call. + """ + + predictions: MutableSequence[struct_pb2.Value] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Value, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/retriever.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/retriever.py new file mode 100644 index 000000000000..79efd68a6ed4 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/retriever.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "Corpus", + "Document", + "StringList", + "CustomMetadata", + "MetadataFilter", + "Condition", + "Chunk", + "ChunkData", + }, +) + + +class Corpus(proto.Message): + r"""A ``Corpus`` is a collection of ``Document``\ s. A project can + create up to 5 corpora. + + Attributes: + name (str): + Immutable. Identifier. The ``Corpus`` resource name. The ID + (name excluding the "corpora/" prefix) can contain up to 40 + characters that are lowercase alphanumeric or dashes (-). + The ID cannot start or end with a dash. If the name is empty + on create, a unique name will be derived from + ``display_name`` along with a 12 character random suffix. + Example: ``corpora/my-awesome-corpora-123a456b789c`` + display_name (str): + Optional. The human-readable display name for the + ``Corpus``. The display name must be no more than 512 + characters in length, including spaces. Example: "Docs on + Semantic Retriever". + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Corpus`` was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Corpus`` was last + updated. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class Document(proto.Message): + r"""A ``Document`` is a collection of ``Chunk``\ s. A ``Corpus`` can + have a maximum of 10,000 ``Document``\ s. + + Attributes: + name (str): + Immutable. Identifier. The ``Document`` resource name. The + ID (name excluding the `corpora/*/documents/` prefix) can + contain up to 40 characters that are lowercase alphanumeric + or dashes (-). The ID cannot start or end with a dash. If + the name is empty on create, a unique name will be derived + from ``display_name`` along with a 12 character random + suffix. Example: + ``corpora/{corpus_id}/documents/my-awesome-doc-123a456b789c`` + display_name (str): + Optional. The human-readable display name for the + ``Document``. The display name must be no more than 512 + characters in length, including spaces. Example: "Semantic + Retriever Documentation". + custom_metadata (MutableSequence[google.ai.generativelanguage_v1alpha.types.CustomMetadata]): + Optional. User provided custom metadata stored as key-value + pairs used for querying. A ``Document`` can have a maximum + of 20 ``CustomMetadata``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Document`` was last + updated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Document`` was + created. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + custom_metadata: MutableSequence["CustomMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CustomMetadata", + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class StringList(proto.Message): + r"""User provided string values assigned to a single metadata + key. + + Attributes: + values (MutableSequence[str]): + The string values of the metadata to store. + """ + + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class CustomMetadata(proto.Message): + r"""User provided metadata stored as key-value pairs. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_value (str): + The string value of the metadata to store. + + This field is a member of `oneof`_ ``value``. + string_list_value (google.ai.generativelanguage_v1alpha.types.StringList): + The StringList value of the metadata to + store. + + This field is a member of `oneof`_ ``value``. + numeric_value (float): + The numeric value of the metadata to store. + + This field is a member of `oneof`_ ``value``. + key (str): + Required. The key of the metadata to store. + """ + + string_value: str = proto.Field( + proto.STRING, + number=2, + oneof="value", + ) + string_list_value: "StringList" = proto.Field( + proto.MESSAGE, + number=6, + oneof="value", + message="StringList", + ) + numeric_value: float = proto.Field( + proto.FLOAT, + number=7, + oneof="value", + ) + key: str = proto.Field( + proto.STRING, + number=1, + ) + + +class MetadataFilter(proto.Message): + r"""User provided filter to limit retrieval based on ``Chunk`` or + ``Document`` level metadata values. Example (genre = drama OR genre + = action): key = "document.custom_metadata.genre" conditions = + [{string_value = "drama", operation = EQUAL}, {string_value = + "action", operation = EQUAL}] + + Attributes: + key (str): + Required. The key of the metadata to filter + on. + conditions (MutableSequence[google.ai.generativelanguage_v1alpha.types.Condition]): + Required. The ``Condition``\ s for the given key that will + trigger this filter. Multiple ``Condition``\ s are joined by + logical ORs. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + conditions: MutableSequence["Condition"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Condition", + ) + + +class Condition(proto.Message): + r"""Filter condition applicable to a single key. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_value (str): + The string value to filter the metadata on. + + This field is a member of `oneof`_ ``value``. + numeric_value (float): + The numeric value to filter the metadata on. + + This field is a member of `oneof`_ ``value``. + operation (google.ai.generativelanguage_v1alpha.types.Condition.Operator): + Required. Operator applied to the given + key-value pair to trigger the condition. + """ + + class Operator(proto.Enum): + r"""Defines the valid operators that can be applied to a + key-value pair. + + Values: + OPERATOR_UNSPECIFIED (0): + The default value. This value is unused. + LESS (1): + Supported by numeric. + LESS_EQUAL (2): + Supported by numeric. + EQUAL (3): + Supported by numeric & string. + GREATER_EQUAL (4): + Supported by numeric. + GREATER (5): + Supported by numeric. + NOT_EQUAL (6): + Supported by numeric & string. + INCLUDES (7): + Supported by string only when ``CustomMetadata`` value type + for the given key has a ``string_list_value``. + EXCLUDES (8): + Supported by string only when ``CustomMetadata`` value type + for the given key has a ``string_list_value``. + """ + OPERATOR_UNSPECIFIED = 0 + LESS = 1 + LESS_EQUAL = 2 + EQUAL = 3 + GREATER_EQUAL = 4 + GREATER = 5 + NOT_EQUAL = 6 + INCLUDES = 7 + EXCLUDES = 8 + + string_value: str = proto.Field( + proto.STRING, + number=1, + oneof="value", + ) + numeric_value: float = proto.Field( + proto.FLOAT, + number=6, + oneof="value", + ) + operation: Operator = proto.Field( + proto.ENUM, + number=5, + enum=Operator, + ) + + +class Chunk(proto.Message): + r"""A ``Chunk`` is a subpart of a ``Document`` that is treated as an + independent unit for the purposes of vector representation and + storage. A ``Corpus`` can have a maximum of 1 million ``Chunk``\ s. + + Attributes: + name (str): + Immutable. Identifier. The ``Chunk`` resource name. The ID + (name excluding the "corpora/*/documents/*/chunks/" prefix) + can contain up to 40 characters that are lowercase + alphanumeric or dashes (-). The ID cannot start or end with + a dash. If the name is empty on create, a random + 12-character unique ID will be generated. Example: + ``corpora/{corpus_id}/documents/{document_id}/chunks/123a456b789c`` + data (google.ai.generativelanguage_v1alpha.types.ChunkData): + Required. The content for the ``Chunk``, such as the text + string. The maximum number of tokens per chunk is 2043. + custom_metadata (MutableSequence[google.ai.generativelanguage_v1alpha.types.CustomMetadata]): + Optional. User provided custom metadata stored as key-value + pairs. The maximum number of ``CustomMetadata`` per chunk is + 20. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Chunk`` was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The Timestamp of when the ``Chunk`` was last + updated. + state (google.ai.generativelanguage_v1alpha.types.Chunk.State): + Output only. Current state of the ``Chunk``. + """ + + class State(proto.Enum): + r"""States for the lifecycle of a ``Chunk``. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + STATE_PENDING_PROCESSING (1): + ``Chunk`` is being processed (embedding and vector storage). + STATE_ACTIVE (2): + ``Chunk`` is processed and available for querying. + STATE_FAILED (10): + ``Chunk`` failed processing. + """ + STATE_UNSPECIFIED = 0 + STATE_PENDING_PROCESSING = 1 + STATE_ACTIVE = 2 + STATE_FAILED = 10 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data: "ChunkData" = proto.Field( + proto.MESSAGE, + number=2, + message="ChunkData", + ) + custom_metadata: MutableSequence["CustomMetadata"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="CustomMetadata", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + + +class ChunkData(proto.Message): + r"""Extracted data that represents the ``Chunk`` content. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + string_value (str): + The ``Chunk`` content as a string. The maximum number of + tokens per chunk is 2043. + + This field is a member of `oneof`_ ``data``. + """ + + string_value: str = proto.Field( + proto.STRING, + number=1, + oneof="data", + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/retriever_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/retriever_service.py new file mode 100644 index 000000000000..0aa460d1e3b3 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/retriever_service.py @@ -0,0 +1,793 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import retriever + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "CreateCorpusRequest", + "GetCorpusRequest", + "UpdateCorpusRequest", + "DeleteCorpusRequest", + "ListCorporaRequest", + "ListCorporaResponse", + "QueryCorpusRequest", + "QueryCorpusResponse", + "RelevantChunk", + "CreateDocumentRequest", + "GetDocumentRequest", + "UpdateDocumentRequest", + "DeleteDocumentRequest", + "ListDocumentsRequest", + "ListDocumentsResponse", + "QueryDocumentRequest", + "QueryDocumentResponse", + "CreateChunkRequest", + "BatchCreateChunksRequest", + "BatchCreateChunksResponse", + "GetChunkRequest", + "UpdateChunkRequest", + "BatchUpdateChunksRequest", + "BatchUpdateChunksResponse", + "DeleteChunkRequest", + "BatchDeleteChunksRequest", + "ListChunksRequest", + "ListChunksResponse", + }, +) + + +class CreateCorpusRequest(proto.Message): + r"""Request to create a ``Corpus``. + + Attributes: + corpus (google.ai.generativelanguage_v1alpha.types.Corpus): + Required. The ``Corpus`` to create. + """ + + corpus: retriever.Corpus = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Corpus, + ) + + +class GetCorpusRequest(proto.Message): + r"""Request for getting information about a specific ``Corpus``. + + Attributes: + name (str): + Required. The name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCorpusRequest(proto.Message): + r"""Request to update a ``Corpus``. + + Attributes: + corpus (google.ai.generativelanguage_v1alpha.types.Corpus): + Required. The ``Corpus`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this only + supports updating ``display_name``. + """ + + corpus: retriever.Corpus = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Corpus, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteCorpusRequest(proto.Message): + r"""Request to delete a ``Corpus``. + + Attributes: + name (str): + Required. The resource name of the ``Corpus``. Example: + ``corpora/my-corpus-123`` + force (bool): + Optional. If set to true, any ``Document``\ s and objects + related to this ``Corpus`` will also be deleted. + + If false (the default), a ``FAILED_PRECONDITION`` error will + be returned if ``Corpus`` contains any ``Document``\ s. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListCorporaRequest(proto.Message): + r"""Request for listing ``Corpora``. + + Attributes: + page_size (int): + Optional. The maximum number of ``Corpora`` to return (per + page). The service may return fewer ``Corpora``. + + If unspecified, at most 10 ``Corpora`` will be returned. The + maximum size limit is 20 ``Corpora`` per page. + page_token (str): + Optional. A page token, received from a previous + ``ListCorpora`` call. + + Provide the ``next_page_token`` returned in the response as + an argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListCorpora`` must match the call that provided the page + token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListCorporaResponse(proto.Message): + r"""Response from ``ListCorpora`` containing a paginated list of + ``Corpora``. The results are sorted by ascending + ``corpus.create_time``. + + Attributes: + corpora (MutableSequence[google.ai.generativelanguage_v1alpha.types.Corpus]): + The returned corpora. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no more + pages. + """ + + @property + def raw_page(self): + return self + + corpora: MutableSequence[retriever.Corpus] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Corpus, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class QueryCorpusRequest(proto.Message): + r"""Request for querying a ``Corpus``. + + Attributes: + name (str): + Required. The name of the ``Corpus`` to query. Example: + ``corpora/my-corpus-123`` + query (str): + Required. Query string to perform semantic + search. + metadata_filters (MutableSequence[google.ai.generativelanguage_v1alpha.types.MetadataFilter]): + Optional. Filter for ``Chunk`` and ``Document`` metadata. + Each ``MetadataFilter`` object should correspond to a unique + key. Multiple ``MetadataFilter`` objects are joined by + logical "AND"s. + + Example query at document level: (year >= 2020 OR year < + 2010) AND (genre = drama OR genre = action) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "document.custom_metadata.year" conditions = [{int_value = + 2020, operation = GREATER_EQUAL}, {int_value = 2010, + operation = LESS}]}, {key = "document.custom_metadata.year" + conditions = [{int_value = 2020, operation = GREATER_EQUAL}, + {int_value = 2010, operation = LESS}]}, {key = + "document.custom_metadata.genre" conditions = [{string_value + = "drama", operation = EQUAL}, {string_value = "action", + operation = EQUAL}]}] + + Example query at chunk level for a numeric range of values: + (year > 2015 AND year <= 2020) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2015, operation = GREATER}]}, {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2020, operation = LESS_EQUAL}]}] + + Note: "AND"s for the same key are only supported for numeric + values. String values only support "OR"s for the same key. + results_count (int): + Optional. The maximum number of ``Chunk``\ s to return. The + service may return fewer ``Chunk``\ s. + + If unspecified, at most 10 ``Chunk``\ s will be returned. + The maximum specified result count is 100. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + metadata_filters: MutableSequence[retriever.MetadataFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=retriever.MetadataFilter, + ) + results_count: int = proto.Field( + proto.INT32, + number=4, + ) + + +class QueryCorpusResponse(proto.Message): + r"""Response from ``QueryCorpus`` containing a list of relevant chunks. + + Attributes: + relevant_chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.RelevantChunk]): + The relevant chunks. + """ + + relevant_chunks: MutableSequence["RelevantChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RelevantChunk", + ) + + +class RelevantChunk(proto.Message): + r"""The information for a chunk relevant to a query. + + Attributes: + chunk_relevance_score (float): + ``Chunk`` relevance to the query. + chunk (google.ai.generativelanguage_v1alpha.types.Chunk): + ``Chunk`` associated with the query. + """ + + chunk_relevance_score: float = proto.Field( + proto.FLOAT, + number=1, + ) + chunk: retriever.Chunk = proto.Field( + proto.MESSAGE, + number=2, + message=retriever.Chunk, + ) + + +class CreateDocumentRequest(proto.Message): + r"""Request to create a ``Document``. + + Attributes: + parent (str): + Required. The name of the ``Corpus`` where this ``Document`` + will be created. Example: ``corpora/my-corpus-123`` + document (google.ai.generativelanguage_v1alpha.types.Document): + Required. The ``Document`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + document: retriever.Document = proto.Field( + proto.MESSAGE, + number=2, + message=retriever.Document, + ) + + +class GetDocumentRequest(proto.Message): + r"""Request for getting information about a specific ``Document``. + + Attributes: + name (str): + Required. The name of the ``Document`` to retrieve. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDocumentRequest(proto.Message): + r"""Request to update a ``Document``. + + Attributes: + document (google.ai.generativelanguage_v1alpha.types.Document): + Required. The ``Document`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this only + supports updating ``display_name`` and ``custom_metadata``. + """ + + document: retriever.Document = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Document, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteDocumentRequest(proto.Message): + r"""Request to delete a ``Document``. + + Attributes: + name (str): + Required. The resource name of the ``Document`` to delete. + Example: ``corpora/my-corpus-123/documents/the-doc-abc`` + force (bool): + Optional. If set to true, any ``Chunk``\ s and objects + related to this ``Document`` will also be deleted. + + If false (the default), a ``FAILED_PRECONDITION`` error will + be returned if ``Document`` contains any ``Chunk``\ s. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListDocumentsRequest(proto.Message): + r"""Request for listing ``Document``\ s. + + Attributes: + parent (str): + Required. The name of the ``Corpus`` containing + ``Document``\ s. Example: ``corpora/my-corpus-123`` + page_size (int): + Optional. The maximum number of ``Document``\ s to return + (per page). The service may return fewer ``Document``\ s. + + If unspecified, at most 10 ``Document``\ s will be returned. + The maximum size limit is 20 ``Document``\ s per page. + page_token (str): + Optional. A page token, received from a previous + ``ListDocuments`` call. + + Provide the ``next_page_token`` returned in the response as + an argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListDocuments`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDocumentsResponse(proto.Message): + r"""Response from ``ListDocuments`` containing a paginated list of + ``Document``\ s. The ``Document``\ s are sorted by ascending + ``document.create_time``. + + Attributes: + documents (MutableSequence[google.ai.generativelanguage_v1alpha.types.Document]): + The returned ``Document``\ s. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no more + pages. + """ + + @property + def raw_page(self): + return self + + documents: MutableSequence[retriever.Document] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Document, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class QueryDocumentRequest(proto.Message): + r"""Request for querying a ``Document``. + + Attributes: + name (str): + Required. The name of the ``Document`` to query. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + query (str): + Required. Query string to perform semantic + search. + results_count (int): + Optional. The maximum number of ``Chunk``\ s to return. The + service may return fewer ``Chunk``\ s. + + If unspecified, at most 10 ``Chunk``\ s will be returned. + The maximum specified result count is 100. + metadata_filters (MutableSequence[google.ai.generativelanguage_v1alpha.types.MetadataFilter]): + Optional. Filter for ``Chunk`` metadata. Each + ``MetadataFilter`` object should correspond to a unique key. + Multiple ``MetadataFilter`` objects are joined by logical + "AND"s. + + Note: ``Document``-level filtering is not supported for this + request because a ``Document`` name is already specified. + + Example query: (year >= 2020 OR year < 2010) AND (genre = + drama OR genre = action) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2020, operation = GREATER_EQUAL}, {int_value = 2010, + operation = LESS}}, {key = "chunk.custom_metadata.genre" + conditions = [{string_value = "drama", operation = EQUAL}, + {string_value = "action", operation = EQUAL}}] + + Example query for a numeric range of values: (year > 2015 + AND year <= 2020) + + ``MetadataFilter`` object list: metadata_filters = [ {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2015, operation = GREATER}]}, {key = + "chunk.custom_metadata.year" conditions = [{int_value = + 2020, operation = LESS_EQUAL}]}] + + Note: "AND"s for the same key are only supported for numeric + values. String values only support "OR"s for the same key. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + query: str = proto.Field( + proto.STRING, + number=2, + ) + results_count: int = proto.Field( + proto.INT32, + number=3, + ) + metadata_filters: MutableSequence[retriever.MetadataFilter] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=retriever.MetadataFilter, + ) + + +class QueryDocumentResponse(proto.Message): + r"""Response from ``QueryDocument`` containing a list of relevant + chunks. + + Attributes: + relevant_chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.RelevantChunk]): + The returned relevant chunks. + """ + + relevant_chunks: MutableSequence["RelevantChunk"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="RelevantChunk", + ) + + +class CreateChunkRequest(proto.Message): + r"""Request to create a ``Chunk``. + + Attributes: + parent (str): + Required. The name of the ``Document`` where this ``Chunk`` + will be created. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + chunk (google.ai.generativelanguage_v1alpha.types.Chunk): + Required. The ``Chunk`` to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + chunk: retriever.Chunk = proto.Field( + proto.MESSAGE, + number=2, + message=retriever.Chunk, + ) + + +class BatchCreateChunksRequest(proto.Message): + r"""Request to batch create ``Chunk``\ s. + + Attributes: + parent (str): + Optional. The name of the ``Document`` where this batch of + ``Chunk``\ s will be created. The parent field in every + ``CreateChunkRequest`` must match this value. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + requests (MutableSequence[google.ai.generativelanguage_v1alpha.types.CreateChunkRequest]): + Required. The request messages specifying the ``Chunk``\ s + to create. A maximum of 100 ``Chunk``\ s can be created in a + batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateChunkRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateChunkRequest", + ) + + +class BatchCreateChunksResponse(proto.Message): + r"""Response from ``BatchCreateChunks`` containing a list of created + ``Chunk``\ s. + + Attributes: + chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.Chunk]): + ``Chunk``\ s created. + """ + + chunks: MutableSequence[retriever.Chunk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + + +class GetChunkRequest(proto.Message): + r"""Request for getting information about a specific ``Chunk``. + + Attributes: + name (str): + Required. The name of the ``Chunk`` to retrieve. Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateChunkRequest(proto.Message): + r"""Request to update a ``Chunk``. + + Attributes: + chunk (google.ai.generativelanguage_v1alpha.types.Chunk): + Required. The ``Chunk`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Currently, this only + supports updating ``custom_metadata`` and ``data``. + """ + + chunk: retriever.Chunk = proto.Field( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class BatchUpdateChunksRequest(proto.Message): + r"""Request to batch update ``Chunk``\ s. + + Attributes: + parent (str): + Optional. The name of the ``Document`` containing the + ``Chunk``\ s to update. The parent field in every + ``UpdateChunkRequest`` must match this value. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + requests (MutableSequence[google.ai.generativelanguage_v1alpha.types.UpdateChunkRequest]): + Required. The request messages specifying the ``Chunk``\ s + to update. A maximum of 100 ``Chunk``\ s can be updated in a + batch. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["UpdateChunkRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="UpdateChunkRequest", + ) + + +class BatchUpdateChunksResponse(proto.Message): + r"""Response from ``BatchUpdateChunks`` containing a list of updated + ``Chunk``\ s. + + Attributes: + chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.Chunk]): + ``Chunk``\ s updated. + """ + + chunks: MutableSequence[retriever.Chunk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + + +class DeleteChunkRequest(proto.Message): + r"""Request to delete a ``Chunk``. + + Attributes: + name (str): + Required. The resource name of the ``Chunk`` to delete. + Example: + ``corpora/my-corpus-123/documents/the-doc-abc/chunks/some-chunk`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BatchDeleteChunksRequest(proto.Message): + r"""Request to batch delete ``Chunk``\ s. + + Attributes: + parent (str): + Optional. The name of the ``Document`` containing the + ``Chunk``\ s to delete. The parent field in every + ``DeleteChunkRequest`` must match this value. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + requests (MutableSequence[google.ai.generativelanguage_v1alpha.types.DeleteChunkRequest]): + Required. The request messages specifying the ``Chunk``\ s + to delete. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["DeleteChunkRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="DeleteChunkRequest", + ) + + +class ListChunksRequest(proto.Message): + r"""Request for listing ``Chunk``\ s. + + Attributes: + parent (str): + Required. The name of the ``Document`` containing + ``Chunk``\ s. Example: + ``corpora/my-corpus-123/documents/the-doc-abc`` + page_size (int): + Optional. The maximum number of ``Chunk``\ s to return (per + page). The service may return fewer ``Chunk``\ s. + + If unspecified, at most 10 ``Chunk``\ s will be returned. + The maximum size limit is 100 ``Chunk``\ s per page. + page_token (str): + Optional. A page token, received from a previous + ``ListChunks`` call. + + Provide the ``next_page_token`` returned in the response as + an argument to the next request to retrieve the next page. + + When paginating, all other parameters provided to + ``ListChunks`` must match the call that provided the page + token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListChunksResponse(proto.Message): + r"""Response from ``ListChunks`` containing a paginated list of + ``Chunk``\ s. The ``Chunk``\ s are sorted by ascending + ``chunk.create_time``. + + Attributes: + chunks (MutableSequence[google.ai.generativelanguage_v1alpha.types.Chunk]): + The returned ``Chunk``\ s. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no more + pages. + """ + + @property + def raw_page(self): + return self + + chunks: MutableSequence[retriever.Chunk] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=retriever.Chunk, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/safety.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/safety.py new file mode 100644 index 000000000000..3fbad41feeff --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/safety.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "HarmCategory", + "ContentFilter", + "SafetyFeedback", + "SafetyRating", + "SafetySetting", + }, +) + + +class HarmCategory(proto.Enum): + r"""The category of a rating. + + These categories cover various kinds of harms that developers + may wish to adjust. + + Values: + HARM_CATEGORY_UNSPECIFIED (0): + Category is unspecified. + HARM_CATEGORY_DEROGATORY (1): + **PaLM** - Negative or harmful comments targeting identity + and/or protected attribute. + HARM_CATEGORY_TOXICITY (2): + **PaLM** - Content that is rude, disrespectful, or profane. + HARM_CATEGORY_VIOLENCE (3): + **PaLM** - Describes scenarios depicting violence against an + individual or group, or general descriptions of gore. + HARM_CATEGORY_SEXUAL (4): + **PaLM** - Contains references to sexual acts or other lewd + content. + HARM_CATEGORY_MEDICAL (5): + **PaLM** - Promotes unchecked medical advice. + HARM_CATEGORY_DANGEROUS (6): + **PaLM** - Dangerous content that promotes, facilitates, or + encourages harmful acts. + HARM_CATEGORY_HARASSMENT (7): + **Gemini** - Harassment content. + HARM_CATEGORY_HATE_SPEECH (8): + **Gemini** - Hate speech and content. + HARM_CATEGORY_SEXUALLY_EXPLICIT (9): + **Gemini** - Sexually explicit content. + HARM_CATEGORY_DANGEROUS_CONTENT (10): + **Gemini** - Dangerous content. + HARM_CATEGORY_CIVIC_INTEGRITY (11): + **Gemini** - Content that may be used to harm civic + integrity. + """ + HARM_CATEGORY_UNSPECIFIED = 0 + HARM_CATEGORY_DEROGATORY = 1 + HARM_CATEGORY_TOXICITY = 2 + HARM_CATEGORY_VIOLENCE = 3 + HARM_CATEGORY_SEXUAL = 4 + HARM_CATEGORY_MEDICAL = 5 + HARM_CATEGORY_DANGEROUS = 6 + HARM_CATEGORY_HARASSMENT = 7 + HARM_CATEGORY_HATE_SPEECH = 8 + HARM_CATEGORY_SEXUALLY_EXPLICIT = 9 + HARM_CATEGORY_DANGEROUS_CONTENT = 10 + HARM_CATEGORY_CIVIC_INTEGRITY = 11 + + +class ContentFilter(proto.Message): + r"""Content filtering metadata associated with processing a + single request. + ContentFilter contains a reason and an optional supporting + string. The reason may be unspecified. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reason (google.ai.generativelanguage_v1alpha.types.ContentFilter.BlockedReason): + The reason content was blocked during request + processing. + message (str): + A string that describes the filtering + behavior in more detail. + + This field is a member of `oneof`_ ``_message``. + """ + + class BlockedReason(proto.Enum): + r"""A list of reasons why content may have been blocked. + + Values: + BLOCKED_REASON_UNSPECIFIED (0): + A blocked reason was not specified. + SAFETY (1): + Content was blocked by safety settings. + OTHER (2): + Content was blocked, but the reason is + uncategorized. + """ + BLOCKED_REASON_UNSPECIFIED = 0 + SAFETY = 1 + OTHER = 2 + + reason: BlockedReason = proto.Field( + proto.ENUM, + number=1, + enum=BlockedReason, + ) + message: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + + +class SafetyFeedback(proto.Message): + r"""Safety feedback for an entire request. + + This field is populated if content in the input and/or response + is blocked due to safety settings. SafetyFeedback may not exist + for every HarmCategory. Each SafetyFeedback will return the + safety settings used by the request as well as the lowest + HarmProbability that should be allowed in order to return a + result. + + Attributes: + rating (google.ai.generativelanguage_v1alpha.types.SafetyRating): + Safety rating evaluated from content. + setting (google.ai.generativelanguage_v1alpha.types.SafetySetting): + Safety settings applied to the request. + """ + + rating: "SafetyRating" = proto.Field( + proto.MESSAGE, + number=1, + message="SafetyRating", + ) + setting: "SafetySetting" = proto.Field( + proto.MESSAGE, + number=2, + message="SafetySetting", + ) + + +class SafetyRating(proto.Message): + r"""Safety rating for a piece of content. + + The safety rating contains the category of harm and the harm + probability level in that category for a piece of content. + Content is classified for safety across a number of harm + categories and the probability of the harm classification is + included here. + + Attributes: + category (google.ai.generativelanguage_v1alpha.types.HarmCategory): + Required. The category for this rating. + probability (google.ai.generativelanguage_v1alpha.types.SafetyRating.HarmProbability): + Required. The probability of harm for this + content. + blocked (bool): + Was this content blocked because of this + rating? + """ + + class HarmProbability(proto.Enum): + r"""The probability that a piece of content is harmful. + + The classification system gives the probability of the content + being unsafe. This does not indicate the severity of harm for a + piece of content. + + Values: + HARM_PROBABILITY_UNSPECIFIED (0): + Probability is unspecified. + NEGLIGIBLE (1): + Content has a negligible chance of being + unsafe. + LOW (2): + Content has a low chance of being unsafe. + MEDIUM (3): + Content has a medium chance of being unsafe. + HIGH (4): + Content has a high chance of being unsafe. + """ + HARM_PROBABILITY_UNSPECIFIED = 0 + NEGLIGIBLE = 1 + LOW = 2 + MEDIUM = 3 + HIGH = 4 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + probability: HarmProbability = proto.Field( + proto.ENUM, + number=4, + enum=HarmProbability, + ) + blocked: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class SafetySetting(proto.Message): + r"""Safety setting, affecting the safety-blocking behavior. + + Passing a safety setting for a category changes the allowed + probability that content is blocked. + + Attributes: + category (google.ai.generativelanguage_v1alpha.types.HarmCategory): + Required. The category for this setting. + threshold (google.ai.generativelanguage_v1alpha.types.SafetySetting.HarmBlockThreshold): + Required. Controls the probability threshold + at which harm is blocked. + """ + + class HarmBlockThreshold(proto.Enum): + r"""Block at and beyond a specified harm probability. + + Values: + HARM_BLOCK_THRESHOLD_UNSPECIFIED (0): + Threshold is unspecified. + BLOCK_LOW_AND_ABOVE (1): + Content with NEGLIGIBLE will be allowed. + BLOCK_MEDIUM_AND_ABOVE (2): + Content with NEGLIGIBLE and LOW will be + allowed. + BLOCK_ONLY_HIGH (3): + Content with NEGLIGIBLE, LOW, and MEDIUM will + be allowed. + BLOCK_NONE (4): + All content will be allowed. + OFF (5): + Turn off the safety filter. + """ + HARM_BLOCK_THRESHOLD_UNSPECIFIED = 0 + BLOCK_LOW_AND_ABOVE = 1 + BLOCK_MEDIUM_AND_ABOVE = 2 + BLOCK_ONLY_HIGH = 3 + BLOCK_NONE = 4 + OFF = 5 + + category: "HarmCategory" = proto.Field( + proto.ENUM, + number=3, + enum="HarmCategory", + ) + threshold: HarmBlockThreshold = proto.Field( + proto.ENUM, + number=4, + enum=HarmBlockThreshold, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/text_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/text_service.py new file mode 100644 index 000000000000..9daa38137d05 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/text_service.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.ai.generativelanguage_v1alpha.types import citation, safety + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "GenerateTextRequest", + "GenerateTextResponse", + "TextPrompt", + "TextCompletion", + "EmbedTextRequest", + "EmbedTextResponse", + "BatchEmbedTextRequest", + "BatchEmbedTextResponse", + "Embedding", + "CountTextTokensRequest", + "CountTextTokensResponse", + }, +) + + +class GenerateTextRequest(proto.Message): + r"""Request to generate a text completion response from the + model. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + model (str): + Required. The name of the ``Model`` or ``TunedModel`` to use + for generating the completion. Examples: + models/text-bison-001 tunedModels/sentence-translator-u3b7m + prompt (google.ai.generativelanguage_v1alpha.types.TextPrompt): + Required. The free-form input text given to + the model as a prompt. + Given a prompt, the model will generate a + TextCompletion response it predicts as the + completion of the input text. + temperature (float): + Optional. Controls the randomness of the output. Note: The + default value varies by model, see the ``Model.temperature`` + attribute of the ``Model`` returned the ``getModel`` + function. + + Values can range from [0.0,1.0], inclusive. A value closer + to 1.0 will produce responses that are more varied and + creative, while a value closer to 0.0 will typically result + in more straightforward responses from the model. + + This field is a member of `oneof`_ ``_temperature``. + candidate_count (int): + Optional. Number of generated responses to return. + + This value must be between [1, 8], inclusive. If unset, this + will default to 1. + + This field is a member of `oneof`_ ``_candidate_count``. + max_output_tokens (int): + Optional. The maximum number of tokens to include in a + candidate. + + If unset, this will default to output_token_limit specified + in the ``Model`` specification. + + This field is a member of `oneof`_ ``_max_output_tokens``. + top_p (float): + Optional. The maximum cumulative probability of tokens to + consider when sampling. + + The model uses combined Top-k and nucleus sampling. + + Tokens are sorted based on their assigned probabilities so + that only the most likely tokens are considered. Top-k + sampling directly limits the maximum number of tokens to + consider, while Nucleus sampling limits number of tokens + based on the cumulative probability. + + Note: The default value varies by model, see the + ``Model.top_p`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. The maximum number of tokens to consider when + sampling. + + The model uses combined Top-k and nucleus sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. Defaults to 40. + + Note: The default value varies by model, see the + ``Model.top_k`` attribute of the ``Model`` returned the + ``getModel`` function. + + This field is a member of `oneof`_ ``_top_k``. + safety_settings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]): + Optional. A list of unique ``SafetySetting`` instances for + blocking unsafe content. + + that will be enforced on the ``GenerateTextRequest.prompt`` + and ``GenerateTextResponse.candidates``. There should not be + more than one setting for each ``SafetyCategory`` type. The + API will block any prompts and responses that fail to meet + the thresholds set by these settings. This list overrides + the default settings for each ``SafetyCategory`` specified + in the safety_settings. If there is no ``SafetySetting`` for + a given ``SafetyCategory`` provided in the list, the API + will use the default safety setting for that category. Harm + categories HARM_CATEGORY_DEROGATORY, HARM_CATEGORY_TOXICITY, + HARM_CATEGORY_VIOLENCE, HARM_CATEGORY_SEXUAL, + HARM_CATEGORY_MEDICAL, HARM_CATEGORY_DANGEROUS are supported + in text service. + stop_sequences (MutableSequence[str]): + The set of character sequences (up to 5) that + will stop output generation. If specified, the + API will stop at the first appearance of a stop + sequence. The stop sequence will not be included + as part of the response. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "TextPrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="TextPrompt", + ) + temperature: float = proto.Field( + proto.FLOAT, + number=3, + optional=True, + ) + candidate_count: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + max_output_tokens: int = proto.Field( + proto.INT32, + number=5, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=6, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=7, + optional=True, + ) + safety_settings: MutableSequence[safety.SafetySetting] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message=safety.SafetySetting, + ) + stop_sequences: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + + +class GenerateTextResponse(proto.Message): + r"""The response from the model, including candidate completions. + + Attributes: + candidates (MutableSequence[google.ai.generativelanguage_v1alpha.types.TextCompletion]): + Candidate responses from the model. + filters (MutableSequence[google.ai.generativelanguage_v1alpha.types.ContentFilter]): + A set of content filtering metadata for the prompt and + response text. + + This indicates which ``SafetyCategory``\ (s) blocked a + candidate from this response, the lowest ``HarmProbability`` + that triggered a block, and the HarmThreshold setting for + that category. This indicates the smallest change to the + ``SafetySettings`` that would be necessary to unblock at + least 1 response. + + The blocking is configured by the ``SafetySettings`` in the + request (or the default ``SafetySettings`` of the API). + safety_feedback (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetyFeedback]): + Returns any safety feedback related to + content filtering. + """ + + candidates: MutableSequence["TextCompletion"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TextCompletion", + ) + filters: MutableSequence[safety.ContentFilter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=safety.ContentFilter, + ) + safety_feedback: MutableSequence[safety.SafetyFeedback] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=safety.SafetyFeedback, + ) + + +class TextPrompt(proto.Message): + r"""Text given to the model as a prompt. + + The Model will use this TextPrompt to Generate a text + completion. + + Attributes: + text (str): + Required. The prompt text. + """ + + text: str = proto.Field( + proto.STRING, + number=1, + ) + + +class TextCompletion(proto.Message): + r"""Output text returned from a model. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + output (str): + Output only. The generated text returned from + the model. + safety_ratings (MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetyRating]): + Ratings for the safety of a response. + + There is at most one rating per category. + citation_metadata (google.ai.generativelanguage_v1alpha.types.CitationMetadata): + Output only. Citation information for model-generated + ``output`` in this ``TextCompletion``. + + This field may be populated with attribution information for + any text included in the ``output``. + + This field is a member of `oneof`_ ``_citation_metadata``. + """ + + output: str = proto.Field( + proto.STRING, + number=1, + ) + safety_ratings: MutableSequence[safety.SafetyRating] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=safety.SafetyRating, + ) + citation_metadata: citation.CitationMetadata = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=citation.CitationMetadata, + ) + + +class EmbedTextRequest(proto.Message): + r"""Request to get a text embedding from the model. + + Attributes: + model (str): + Required. The model name to use with the + format model=models/{model}. + text (str): + Optional. The free-form input text that the + model will turn into an embedding. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + text: str = proto.Field( + proto.STRING, + number=2, + ) + + +class EmbedTextResponse(proto.Message): + r"""The response to a EmbedTextRequest. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + embedding (google.ai.generativelanguage_v1alpha.types.Embedding): + Output only. The embedding generated from the + input text. + + This field is a member of `oneof`_ ``_embedding``. + """ + + embedding: "Embedding" = proto.Field( + proto.MESSAGE, + number=1, + optional=True, + message="Embedding", + ) + + +class BatchEmbedTextRequest(proto.Message): + r"""Batch request to get a text embedding from the model. + + Attributes: + model (str): + Required. The name of the ``Model`` to use for generating + the embedding. Examples: models/embedding-gecko-001 + texts (MutableSequence[str]): + Optional. The free-form input texts that the + model will turn into an embedding. The current + limit is 100 texts, over which an error will be + thrown. + requests (MutableSequence[google.ai.generativelanguage_v1alpha.types.EmbedTextRequest]): + Optional. Embed requests for the batch. Only one of + ``texts`` or ``requests`` can be set. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + texts: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + requests: MutableSequence["EmbedTextRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="EmbedTextRequest", + ) + + +class BatchEmbedTextResponse(proto.Message): + r"""The response to a EmbedTextRequest. + + Attributes: + embeddings (MutableSequence[google.ai.generativelanguage_v1alpha.types.Embedding]): + Output only. The embeddings generated from + the input text. + """ + + embeddings: MutableSequence["Embedding"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Embedding", + ) + + +class Embedding(proto.Message): + r"""A list of floats representing the embedding. + + Attributes: + value (MutableSequence[float]): + The embedding values. + """ + + value: MutableSequence[float] = proto.RepeatedField( + proto.FLOAT, + number=1, + ) + + +class CountTextTokensRequest(proto.Message): + r"""Counts the number of tokens in the ``prompt`` sent to a model. + + Models may tokenize text differently, so each model may return a + different ``token_count``. + + Attributes: + model (str): + Required. The model's resource name. This serves as an ID + for the Model to use. + + This name should match a model name returned by the + ``ListModels`` method. + + Format: ``models/{model}`` + prompt (google.ai.generativelanguage_v1alpha.types.TextPrompt): + Required. The free-form input text given to + the model as a prompt. + """ + + model: str = proto.Field( + proto.STRING, + number=1, + ) + prompt: "TextPrompt" = proto.Field( + proto.MESSAGE, + number=2, + message="TextPrompt", + ) + + +class CountTextTokensResponse(proto.Message): + r"""A response from ``CountTextTokens``. + + It returns the model's ``token_count`` for the ``prompt``. + + Attributes: + token_count (int): + The number of tokens that the ``model`` tokenizes the + ``prompt`` into. + + Always non-negative. + """ + + token_count: int = proto.Field( + proto.INT32, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/tuned_model.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/tuned_model.py new file mode 100644 index 000000000000..d714c1f8c4d5 --- /dev/null +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1alpha/types/tuned_model.py @@ -0,0 +1,542 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.ai.generativelanguage.v1alpha", + manifest={ + "TunedModel", + "TunedModelSource", + "TuningTask", + "Hyperparameters", + "Dataset", + "TuningExamples", + "TuningPart", + "TuningContent", + "TuningMultiturnExample", + "TuningExample", + "TuningSnapshot", + }, +) + + +class TunedModel(proto.Message): + r"""A fine-tuned model created using + ModelService.CreateTunedModel. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tuned_model_source (google.ai.generativelanguage_v1alpha.types.TunedModelSource): + Optional. TunedModel to use as the starting + point for training the new model. + + This field is a member of `oneof`_ ``source_model``. + base_model (str): + Immutable. The name of the ``Model`` to tune. Example: + ``models/gemini-1.5-flash-001`` + + This field is a member of `oneof`_ ``source_model``. + name (str): + Output only. The tuned model name. A unique name will be + generated on create. Example: ``tunedModels/az2mb0bpw6i`` If + display_name is set on create, the id portion of the name + will be set by concatenating the words of the display_name + with hyphens and adding a random portion for uniqueness. + + Example: + + - display_name = ``Sentence Translator`` + - name = ``tunedModels/sentence-translator-u3b7m`` + display_name (str): + Optional. The name to display for this model + in user interfaces. The display name must be up + to 40 characters including spaces. + description (str): + Optional. A short description of this model. + temperature (float): + Optional. Controls the randomness of the output. + + Values can range over ``[0.0,1.0]``, inclusive. A value + closer to ``1.0`` will produce responses that are more + varied, while a value closer to ``0.0`` will typically + result in less surprising responses from the model. + + This value specifies default to be the one used by the base + model while creating the model. + + This field is a member of `oneof`_ ``_temperature``. + top_p (float): + Optional. For Nucleus sampling. + + Nucleus sampling considers the smallest set of tokens whose + probability sum is at least ``top_p``. + + This value specifies default to be the one used by the base + model while creating the model. + + This field is a member of `oneof`_ ``_top_p``. + top_k (int): + Optional. For Top-k sampling. + + Top-k sampling considers the set of ``top_k`` most probable + tokens. This value specifies default to be used by the + backend while making the call to the model. + + This value specifies default to be the one used by the base + model while creating the model. + + This field is a member of `oneof`_ ``_top_k``. + state (google.ai.generativelanguage_v1alpha.types.TunedModel.State): + Output only. The state of the tuned model. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this model + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this model + was updated. + tuning_task (google.ai.generativelanguage_v1alpha.types.TuningTask): + Required. The tuning task that creates the + tuned model. + reader_project_numbers (MutableSequence[int]): + Optional. List of project numbers that have + read access to the tuned model. + """ + + class State(proto.Enum): + r"""The state of the tuned model. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is unused. + CREATING (1): + The model is being created. + ACTIVE (2): + The model is ready to be used. + FAILED (3): + The model failed to be created. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + FAILED = 3 + + tuned_model_source: "TunedModelSource" = proto.Field( + proto.MESSAGE, + number=3, + oneof="source_model", + message="TunedModelSource", + ) + base_model: str = proto.Field( + proto.STRING, + number=4, + oneof="source_model", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + temperature: float = proto.Field( + proto.FLOAT, + number=11, + optional=True, + ) + top_p: float = proto.Field( + proto.FLOAT, + number=12, + optional=True, + ) + top_k: int = proto.Field( + proto.INT32, + number=13, + optional=True, + ) + state: State = proto.Field( + proto.ENUM, + number=7, + enum=State, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + tuning_task: "TuningTask" = proto.Field( + proto.MESSAGE, + number=10, + message="TuningTask", + ) + reader_project_numbers: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=14, + ) + + +class TunedModelSource(proto.Message): + r"""Tuned model as a source for training a new model. + + Attributes: + tuned_model (str): + Immutable. The name of the ``TunedModel`` to use as the + starting point for training the new model. Example: + ``tunedModels/my-tuned-model`` + base_model (str): + Output only. The name of the base ``Model`` this + ``TunedModel`` was tuned from. Example: + ``models/gemini-1.5-flash-001`` + """ + + tuned_model: str = proto.Field( + proto.STRING, + number=1, + ) + base_model: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TuningTask(proto.Message): + r"""Tuning tasks that create tuned models. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when tuning this + model started. + complete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when tuning this + model completed. + snapshots (MutableSequence[google.ai.generativelanguage_v1alpha.types.TuningSnapshot]): + Output only. Metrics collected during tuning. + training_data (google.ai.generativelanguage_v1alpha.types.Dataset): + Required. Input only. Immutable. The model + training data. + hyperparameters (google.ai.generativelanguage_v1alpha.types.Hyperparameters): + Immutable. Hyperparameters controlling the + tuning process. If not provided, default values + will be used. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + complete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + snapshots: MutableSequence["TuningSnapshot"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="TuningSnapshot", + ) + training_data: "Dataset" = proto.Field( + proto.MESSAGE, + number=4, + message="Dataset", + ) + hyperparameters: "Hyperparameters" = proto.Field( + proto.MESSAGE, + number=5, + message="Hyperparameters", + ) + + +class Hyperparameters(proto.Message): + r"""Hyperparameters controlling the tuning process. Read more at + https://ai.google.dev/docs/model_tuning_guidance + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + learning_rate (float): + Optional. Immutable. The learning rate + hyperparameter for tuning. If not set, a default + of 0.001 or 0.0002 will be calculated based on + the number of training examples. + + This field is a member of `oneof`_ ``learning_rate_option``. + learning_rate_multiplier (float): + Optional. Immutable. The learning rate multiplier is used to + calculate a final learning_rate based on the default + (recommended) value. Actual learning rate := + learning_rate_multiplier \* default learning rate Default + learning rate is dependent on base model and dataset size. + If not set, a default of 1.0 will be used. + + This field is a member of `oneof`_ ``learning_rate_option``. + epoch_count (int): + Immutable. The number of training epochs. An + epoch is one pass through the training data. If + not set, a default of 5 will be used. + + This field is a member of `oneof`_ ``_epoch_count``. + batch_size (int): + Immutable. The batch size hyperparameter for + tuning. If not set, a default of 4 or 16 will be + used based on the number of training examples. + + This field is a member of `oneof`_ ``_batch_size``. + """ + + learning_rate: float = proto.Field( + proto.FLOAT, + number=16, + oneof="learning_rate_option", + ) + learning_rate_multiplier: float = proto.Field( + proto.FLOAT, + number=17, + oneof="learning_rate_option", + ) + epoch_count: int = proto.Field( + proto.INT32, + number=14, + optional=True, + ) + batch_size: int = proto.Field( + proto.INT32, + number=15, + optional=True, + ) + + +class Dataset(proto.Message): + r"""Dataset for training or validation. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + examples (google.ai.generativelanguage_v1alpha.types.TuningExamples): + Optional. Inline examples with simple + input/output text. + + This field is a member of `oneof`_ ``dataset``. + """ + + examples: "TuningExamples" = proto.Field( + proto.MESSAGE, + number=1, + oneof="dataset", + message="TuningExamples", + ) + + +class TuningExamples(proto.Message): + r"""A set of tuning examples. Can be training or validation data. + + Attributes: + examples (MutableSequence[google.ai.generativelanguage_v1alpha.types.TuningExample]): + The examples. Example input can be for text + or discuss, but all examples in a set must be of + the same type. + multiturn_examples (MutableSequence[google.ai.generativelanguage_v1alpha.types.TuningMultiturnExample]): + Content examples. For multiturn + conversations. + """ + + examples: MutableSequence["TuningExample"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TuningExample", + ) + multiturn_examples: MutableSequence["TuningMultiturnExample"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="TuningMultiturnExample", + ) + + +class TuningPart(proto.Message): + r"""A datatype containing data that is part of a multi-part + ``TuningContent`` message. + + This is a subset of the Part used for model inference, with limited + type support. + + A ``Part`` consists of data which has an associated datatype. A + ``Part`` can only contain one of the accepted types in + ``Part.data``. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text (str): + Inline text. + + This field is a member of `oneof`_ ``data``. + """ + + text: str = proto.Field( + proto.STRING, + number=2, + oneof="data", + ) + + +class TuningContent(proto.Message): + r"""The structured datatype containing multi-part content of an example + message. + + This is a subset of the Content proto used during model inference + with limited type support. A ``Content`` includes a ``role`` field + designating the producer of the ``Content`` and a ``parts`` field + containing multi-part data that contains the content of the message + turn. + + Attributes: + parts (MutableSequence[google.ai.generativelanguage_v1alpha.types.TuningPart]): + Ordered ``Parts`` that constitute a single message. Parts + may have different MIME types. + role (str): + Optional. The producer of the content. Must + be either 'user' or 'model'. + Useful to set for multi-turn conversations, + otherwise can be left blank or unset. + """ + + parts: MutableSequence["TuningPart"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TuningPart", + ) + role: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TuningMultiturnExample(proto.Message): + r"""A tuning example with multiturn input. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + system_instruction (google.ai.generativelanguage_v1alpha.types.TuningContent): + Optional. Developer set system instructions. + Currently, text only. + + This field is a member of `oneof`_ ``_system_instruction``. + contents (MutableSequence[google.ai.generativelanguage_v1alpha.types.TuningContent]): + Each Content represents a turn in the + conversation. + """ + + system_instruction: "TuningContent" = proto.Field( + proto.MESSAGE, + number=8, + optional=True, + message="TuningContent", + ) + contents: MutableSequence["TuningContent"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="TuningContent", + ) + + +class TuningExample(proto.Message): + r"""A single example for tuning. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + text_input (str): + Optional. Text model input. + + This field is a member of `oneof`_ ``model_input``. + output (str): + Required. The expected model output. + """ + + text_input: str = proto.Field( + proto.STRING, + number=1, + oneof="model_input", + ) + output: str = proto.Field( + proto.STRING, + number=3, + ) + + +class TuningSnapshot(proto.Message): + r"""Record for a single tuning step. + + Attributes: + step (int): + Output only. The tuning step. + epoch (int): + Output only. The epoch this step was part of. + mean_loss (float): + Output only. The mean loss of the training + examples for this step. + compute_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when this metric + was computed. + """ + + step: int = proto.Field( + proto.INT32, + number=1, + ) + epoch: int = proto.Field( + proto.INT32, + number=2, + ) + mean_loss: float = proto.Field( + proto.FLOAT, + number=3, + ) + compute_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py index 73da8c53fefc..9540c3ab3502 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/__init__.py @@ -108,11 +108,14 @@ GroundingMetadata, GroundingSupport, LogprobsResult, + PrebuiltVoiceConfig, RetrievalMetadata, SearchEntryPoint, Segment, SemanticRetrieverConfig, + SpeechConfig, TaskType, + VoiceConfig, ) from .types.model import Model from .types.model_service import ( @@ -338,6 +341,7 @@ "Part", "Permission", "PermissionServiceClient", + "PrebuiltVoiceConfig", "PredictRequest", "PredictResponse", "PredictionServiceClient", @@ -355,6 +359,7 @@ "SearchEntryPoint", "Segment", "SemanticRetrieverConfig", + "SpeechConfig", "StringList", "TaskType", "TextCompletion", @@ -378,4 +383,5 @@ "UpdatePermissionRequest", "UpdateTunedModelRequest", "VideoMetadata", + "VoiceConfig", ) diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py index 0b6dbde2b051..a22e7bbe7e4a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py index 86d3bd0e8ad7..c0091e864644 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -505,6 +507,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1265,16 +1294,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1320,16 +1353,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py index 965f16b2493e..f8d9db07a578 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/cache_service/transports/rest.py @@ -136,12 +136,37 @@ def post_create_cached_content( ) -> gag_cached_content.CachedContent: """Post-rpc interceptor for create_cached_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cached_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CacheService server but before - it is returned to user code. + it is returned to user code. This `post_create_cached_content` interceptor runs + before the `post_create_cached_content_with_metadata` interceptor. """ return response + def post_create_cached_content_with_metadata( + self, + response: gag_cached_content.CachedContent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gag_cached_content.CachedContent, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_cached_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_create_cached_content_with_metadata` + interceptor in new development instead of the `post_create_cached_content` interceptor. + When both interceptors are used, this `post_create_cached_content_with_metadata` interceptor runs after the + `post_create_cached_content` interceptor. The (possibly modified) response returned by + `post_create_cached_content` will be passed to + `post_create_cached_content_with_metadata`. + """ + return response, metadata + def pre_delete_cached_content( self, request: cache_service.DeleteCachedContentRequest, @@ -176,12 +201,35 @@ def post_get_cached_content( ) -> cached_content.CachedContent: """Post-rpc interceptor for get_cached_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cached_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CacheService server but before - it is returned to user code. + it is returned to user code. This `post_get_cached_content` interceptor runs + before the `post_get_cached_content_with_metadata` interceptor. """ return response + def post_get_cached_content_with_metadata( + self, + response: cached_content.CachedContent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cached_content.CachedContent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cached_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_get_cached_content_with_metadata` + interceptor in new development instead of the `post_get_cached_content` interceptor. + When both interceptors are used, this `post_get_cached_content_with_metadata` interceptor runs after the + `post_get_cached_content` interceptor. The (possibly modified) response returned by + `post_get_cached_content` will be passed to + `post_get_cached_content_with_metadata`. + """ + return response, metadata + def pre_list_cached_contents( self, request: cache_service.ListCachedContentsRequest, @@ -201,12 +249,38 @@ def post_list_cached_contents( ) -> cache_service.ListCachedContentsResponse: """Post-rpc interceptor for list_cached_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_cached_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CacheService server but before - it is returned to user code. + it is returned to user code. This `post_list_cached_contents` interceptor runs + before the `post_list_cached_contents_with_metadata` interceptor. """ return response + def post_list_cached_contents_with_metadata( + self, + response: cache_service.ListCachedContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cache_service.ListCachedContentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_cached_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_list_cached_contents_with_metadata` + interceptor in new development instead of the `post_list_cached_contents` interceptor. + When both interceptors are used, this `post_list_cached_contents_with_metadata` interceptor runs after the + `post_list_cached_contents` interceptor. The (possibly modified) response returned by + `post_list_cached_contents` will be passed to + `post_list_cached_contents_with_metadata`. + """ + return response, metadata + def pre_update_cached_content( self, request: cache_service.UpdateCachedContentRequest, @@ -227,12 +301,37 @@ def post_update_cached_content( ) -> gag_cached_content.CachedContent: """Post-rpc interceptor for update_cached_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cached_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CacheService server but before - it is returned to user code. + it is returned to user code. This `post_update_cached_content` interceptor runs + before the `post_update_cached_content_with_metadata` interceptor. """ return response + def post_update_cached_content_with_metadata( + self, + response: gag_cached_content.CachedContent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gag_cached_content.CachedContent, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_cached_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CacheService server but before it is returned to user code. + + We recommend only using this `post_update_cached_content_with_metadata` + interceptor in new development instead of the `post_update_cached_content` interceptor. + When both interceptors are used, this `post_update_cached_content_with_metadata` interceptor runs after the + `post_update_cached_content` interceptor. The (possibly modified) response returned by + `post_update_cached_content` will be passed to + `post_update_cached_content_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -505,6 +604,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cached_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cached_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -763,6 +866,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cached_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cached_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -905,6 +1012,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_cached_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_cached_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1061,6 +1172,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cached_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cached_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py index 5d236e1acefe..b9a4a15fc61d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -482,6 +484,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1053,16 +1082,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1108,16 +1141,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py index 80a1cc50d0fc..dbe4b09ab536 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/discuss_service/transports/rest.py @@ -111,12 +111,38 @@ def post_count_message_tokens( ) -> discuss_service.CountMessageTokensResponse: """Post-rpc interceptor for count_message_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_message_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiscussService server but before - it is returned to user code. + it is returned to user code. This `post_count_message_tokens` interceptor runs + before the `post_count_message_tokens_with_metadata` interceptor. """ return response + def post_count_message_tokens_with_metadata( + self, + response: discuss_service.CountMessageTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.CountMessageTokensResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for count_message_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_count_message_tokens_with_metadata` + interceptor in new development instead of the `post_count_message_tokens` interceptor. + When both interceptors are used, this `post_count_message_tokens_with_metadata` interceptor runs after the + `post_count_message_tokens` interceptor. The (possibly modified) response returned by + `post_count_message_tokens` will be passed to + `post_count_message_tokens_with_metadata`. + """ + return response, metadata + def pre_generate_message( self, request: discuss_service.GenerateMessageRequest, @@ -136,12 +162,37 @@ def post_generate_message( ) -> discuss_service.GenerateMessageResponse: """Post-rpc interceptor for generate_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiscussService server but before - it is returned to user code. + it is returned to user code. This `post_generate_message` interceptor runs + before the `post_generate_message_with_metadata` interceptor. """ return response + def post_generate_message_with_metadata( + self, + response: discuss_service.GenerateMessageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.GenerateMessageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_generate_message_with_metadata` + interceptor in new development instead of the `post_generate_message` interceptor. + When both interceptors are used, this `post_generate_message_with_metadata` interceptor runs after the + `post_generate_message` interceptor. The (possibly modified) response returned by + `post_generate_message` will be passed to + `post_generate_message_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -415,6 +466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_message_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_message_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -571,6 +626,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py index cc4d74a856c1..a466720b829e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/async_client.py @@ -513,6 +513,8 @@ async def sample_get_file(): Returns: google.ai.generativelanguage_v1beta.types.File: A file uploaded to the API. + Next ID: 15 + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py index 3de779357670..923a9849cdca 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -479,6 +481,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -895,6 +924,8 @@ def sample_get_file(): Returns: google.ai.generativelanguage_v1beta.types.File: A file uploaded to the API. + Next ID: 15 + """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1088,16 +1119,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1143,16 +1178,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/transports/rest.py index ab839007a690..de91eb2dd2ea 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/file_service/transports/rest.py @@ -121,12 +121,37 @@ def post_create_file( ) -> file_service.CreateFileResponse: """Post-rpc interceptor for create_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FileService server but before - it is returned to user code. + it is returned to user code. This `post_create_file` interceptor runs + before the `post_create_file_with_metadata` interceptor. """ return response + def post_create_file_with_metadata( + self, + response: file_service.CreateFileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + file_service.CreateFileResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FileService server but before it is returned to user code. + + We recommend only using this `post_create_file_with_metadata` + interceptor in new development instead of the `post_create_file` interceptor. + When both interceptors are used, this `post_create_file_with_metadata` interceptor runs after the + `post_create_file` interceptor. The (possibly modified) response returned by + `post_create_file` will be passed to + `post_create_file_with_metadata`. + """ + return response, metadata + def pre_delete_file( self, request: file_service.DeleteFileRequest, @@ -154,12 +179,33 @@ def pre_get_file( def post_get_file(self, response: file.File) -> file.File: """Post-rpc interceptor for get_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FileService server but before - it is returned to user code. + it is returned to user code. This `post_get_file` interceptor runs + before the `post_get_file_with_metadata` interceptor. """ return response + def post_get_file_with_metadata( + self, response: file.File, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[file.File, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FileService server but before it is returned to user code. + + We recommend only using this `post_get_file_with_metadata` + interceptor in new development instead of the `post_get_file` interceptor. + When both interceptors are used, this `post_get_file_with_metadata` interceptor runs after the + `post_get_file` interceptor. The (possibly modified) response returned by + `post_get_file` will be passed to + `post_get_file_with_metadata`. + """ + return response, metadata + def pre_list_files( self, request: file_service.ListFilesRequest, @@ -177,12 +223,35 @@ def post_list_files( ) -> file_service.ListFilesResponse: """Post-rpc interceptor for list_files - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_files_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FileService server but before - it is returned to user code. + it is returned to user code. This `post_list_files` interceptor runs + before the `post_list_files_with_metadata` interceptor. """ return response + def post_list_files_with_metadata( + self, + response: file_service.ListFilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file_service.ListFilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_files + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FileService server but before it is returned to user code. + + We recommend only using this `post_list_files_with_metadata` + interceptor in new development instead of the `post_list_files` interceptor. + When both interceptors are used, this `post_list_files_with_metadata` interceptor runs after the + `post_list_files` interceptor. The (possibly modified) response returned by + `post_list_files` will be passed to + `post_list_files_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -447,6 +516,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -630,6 +703,8 @@ def __call__( Returns: ~.file.File: A file uploaded to the API. + Next ID: 15 + """ http_options = ( @@ -699,6 +774,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -841,6 +920,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_files(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_files_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py index 535e5cff0d45..efc2b8b8b21d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/async_client.py @@ -357,7 +357,7 @@ async def sample_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this @@ -671,7 +671,7 @@ async def sample_stream_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py index 02c6b988a806..ad896faec13d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -755,7 +784,7 @@ def sample_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this @@ -1063,7 +1092,7 @@ def sample_stream_generate_content(): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. This corresponds to the ``model`` field on the ``request`` instance; if ``request`` is provided, this @@ -1584,16 +1613,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1639,16 +1672,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py index e37b9eeb1a00..e8f00c32dbda 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/generative_service/transports/rest.py @@ -143,12 +143,38 @@ def post_batch_embed_contents( ) -> generative_service.BatchEmbedContentsResponse: """Post-rpc interceptor for batch_embed_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_embed_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_batch_embed_contents` interceptor runs + before the `post_batch_embed_contents_with_metadata` interceptor. """ return response + def post_batch_embed_contents_with_metadata( + self, + response: generative_service.BatchEmbedContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.BatchEmbedContentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_embed_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_batch_embed_contents_with_metadata` + interceptor in new development instead of the `post_batch_embed_contents` interceptor. + When both interceptors are used, this `post_batch_embed_contents_with_metadata` interceptor runs after the + `post_batch_embed_contents` interceptor. The (possibly modified) response returned by + `post_batch_embed_contents` will be passed to + `post_batch_embed_contents_with_metadata`. + """ + return response, metadata + def pre_count_tokens( self, request: generative_service.CountTokensRequest, @@ -168,12 +194,37 @@ def post_count_tokens( ) -> generative_service.CountTokensResponse: """Post-rpc interceptor for count_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_count_tokens` interceptor runs + before the `post_count_tokens_with_metadata` interceptor. """ return response + def post_count_tokens_with_metadata( + self, + response: generative_service.CountTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.CountTokensResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for count_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_count_tokens_with_metadata` + interceptor in new development instead of the `post_count_tokens` interceptor. + When both interceptors are used, this `post_count_tokens_with_metadata` interceptor runs after the + `post_count_tokens` interceptor. The (possibly modified) response returned by + `post_count_tokens` will be passed to + `post_count_tokens_with_metadata`. + """ + return response, metadata + def pre_embed_content( self, request: generative_service.EmbedContentRequest, @@ -193,12 +244,37 @@ def post_embed_content( ) -> generative_service.EmbedContentResponse: """Post-rpc interceptor for embed_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_embed_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_embed_content` interceptor runs + before the `post_embed_content_with_metadata` interceptor. """ return response + def post_embed_content_with_metadata( + self, + response: generative_service.EmbedContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.EmbedContentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for embed_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_embed_content_with_metadata` + interceptor in new development instead of the `post_embed_content` interceptor. + When both interceptors are used, this `post_embed_content_with_metadata` interceptor runs after the + `post_embed_content` interceptor. The (possibly modified) response returned by + `post_embed_content` will be passed to + `post_embed_content_with_metadata`. + """ + return response, metadata + def pre_generate_answer( self, request: generative_service.GenerateAnswerRequest, @@ -219,12 +295,38 @@ def post_generate_answer( ) -> generative_service.GenerateAnswerResponse: """Post-rpc interceptor for generate_answer - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_answer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_generate_answer` interceptor runs + before the `post_generate_answer_with_metadata` interceptor. """ return response + def post_generate_answer_with_metadata( + self, + response: generative_service.GenerateAnswerResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateAnswerResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_answer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_generate_answer_with_metadata` + interceptor in new development instead of the `post_generate_answer` interceptor. + When both interceptors are used, this `post_generate_answer_with_metadata` interceptor runs after the + `post_generate_answer` interceptor. The (possibly modified) response returned by + `post_generate_answer` will be passed to + `post_generate_answer_with_metadata`. + """ + return response, metadata + def pre_generate_content( self, request: generative_service.GenerateContentRequest, @@ -245,12 +347,38 @@ def post_generate_content( ) -> generative_service.GenerateContentResponse: """Post-rpc interceptor for generate_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_generate_content` interceptor runs + before the `post_generate_content_with_metadata` interceptor. """ return response + def post_generate_content_with_metadata( + self, + response: generative_service.GenerateContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_service.GenerateContentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_generate_content_with_metadata` + interceptor in new development instead of the `post_generate_content` interceptor. + When both interceptors are used, this `post_generate_content_with_metadata` interceptor runs after the + `post_generate_content` interceptor. The (possibly modified) response returned by + `post_generate_content` will be passed to + `post_generate_content_with_metadata`. + """ + return response, metadata + def pre_stream_generate_content( self, request: generative_service.GenerateContentRequest, @@ -271,12 +399,37 @@ def post_stream_generate_content( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for stream_generate_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stream_generate_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GenerativeService server but before - it is returned to user code. + it is returned to user code. This `post_stream_generate_content` interceptor runs + before the `post_stream_generate_content_with_metadata` interceptor. """ return response + def post_stream_generate_content_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for stream_generate_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GenerativeService server but before it is returned to user code. + + We recommend only using this `post_stream_generate_content_with_metadata` + interceptor in new development instead of the `post_stream_generate_content` interceptor. + When both interceptors are used, this `post_stream_generate_content_with_metadata` interceptor runs after the + `post_stream_generate_content` interceptor. The (possibly modified) response returned by + `post_stream_generate_content` will be passed to + `post_stream_generate_content_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -542,6 +695,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_embed_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_embed_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -698,6 +855,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -847,6 +1008,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_embed_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -999,6 +1164,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_answer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_answer_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1167,6 +1336,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1337,6 +1510,10 @@ def __call__( ) resp = self._interceptor.post_stream_generate_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stream_generate_content_with_metadata( + resp, response_metadata + ) return resp @property diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py index 702523ff103f..490a5900cbff 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -500,6 +502,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1571,16 +1600,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1626,16 +1659,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py index 88613b2bba21..40ba4b06a086 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/model_service/transports/rest.py @@ -149,12 +149,35 @@ def post_create_tuned_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_tuned_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_create_tuned_model` interceptor runs + before the `post_create_tuned_model_with_metadata` interceptor. """ return response + def post_create_tuned_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_create_tuned_model_with_metadata` + interceptor in new development instead of the `post_create_tuned_model` interceptor. + When both interceptors are used, this `post_create_tuned_model_with_metadata` interceptor runs after the + `post_create_tuned_model` interceptor. The (possibly modified) response returned by + `post_create_tuned_model` will be passed to + `post_create_tuned_model_with_metadata`. + """ + return response, metadata + def pre_delete_tuned_model( self, request: model_service.DeleteTunedModelRequest, @@ -184,12 +207,33 @@ def pre_get_model( def post_get_model(self, response: model.Model) -> model.Model: """Post-rpc interceptor for get_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. """ return response + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + def pre_get_tuned_model( self, request: model_service.GetTunedModelRequest, @@ -209,12 +253,35 @@ def post_get_tuned_model( ) -> tuned_model.TunedModel: """Post-rpc interceptor for get_tuned_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_get_tuned_model` interceptor runs + before the `post_get_tuned_model_with_metadata` interceptor. """ return response + def post_get_tuned_model_with_metadata( + self, + response: tuned_model.TunedModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tuned_model.TunedModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_tuned_model_with_metadata` + interceptor in new development instead of the `post_get_tuned_model` interceptor. + When both interceptors are used, this `post_get_tuned_model_with_metadata` interceptor runs after the + `post_get_tuned_model` interceptor. The (possibly modified) response returned by + `post_get_tuned_model` will be passed to + `post_get_tuned_model_with_metadata`. + """ + return response, metadata + def pre_list_models( self, request: model_service.ListModelsRequest, @@ -234,12 +301,37 @@ def post_list_models( ) -> model_service.ListModelsResponse: """Post-rpc interceptor for list_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. """ return response + def post_list_models_with_metadata( + self, + response: model_service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + def pre_list_tuned_models( self, request: model_service.ListTunedModelsRequest, @@ -259,12 +351,37 @@ def post_list_tuned_models( ) -> model_service.ListTunedModelsResponse: """Post-rpc interceptor for list_tuned_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tuned_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_list_tuned_models` interceptor runs + before the `post_list_tuned_models_with_metadata` interceptor. """ return response + def post_list_tuned_models_with_metadata( + self, + response: model_service.ListTunedModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListTunedModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_tuned_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_tuned_models_with_metadata` + interceptor in new development instead of the `post_list_tuned_models` interceptor. + When both interceptors are used, this `post_list_tuned_models_with_metadata` interceptor runs after the + `post_list_tuned_models` interceptor. The (possibly modified) response returned by + `post_list_tuned_models` will be passed to + `post_list_tuned_models_with_metadata`. + """ + return response, metadata + def pre_update_tuned_model( self, request: model_service.UpdateTunedModelRequest, @@ -284,12 +401,35 @@ def post_update_tuned_model( ) -> gag_tuned_model.TunedModel: """Post-rpc interceptor for update_tuned_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_update_tuned_model` interceptor runs + before the `post_update_tuned_model_with_metadata` interceptor. """ return response + def post_update_tuned_model_with_metadata( + self, + response: gag_tuned_model.TunedModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_tuned_model.TunedModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_update_tuned_model_with_metadata` + interceptor in new development instead of the `post_update_tuned_model` interceptor. + When both interceptors are used, this `post_update_tuned_model_with_metadata` interceptor runs after the + `post_update_tuned_model` interceptor. The (possibly modified) response returned by + `post_update_tuned_model` will be passed to + `post_update_tuned_model_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -607,6 +747,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tuned_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -860,6 +1004,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1003,6 +1151,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tuned_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1149,6 +1301,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1295,6 +1451,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tuned_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tuned_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1447,6 +1607,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tuned_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py index bc656856e4b8..e5cc68cc691e 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -488,6 +490,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1457,16 +1486,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1512,16 +1545,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py index 8c8f5528a444..1c742a7bfbc6 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/permission_service/transports/rest.py @@ -142,12 +142,35 @@ def post_create_permission( ) -> gag_permission.Permission: """Post-rpc interceptor for create_permission - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_create_permission` interceptor runs + before the `post_create_permission_with_metadata` interceptor. """ return response + def post_create_permission_with_metadata( + self, + response: gag_permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_create_permission_with_metadata` + interceptor in new development instead of the `post_create_permission` interceptor. + When both interceptors are used, this `post_create_permission_with_metadata` interceptor runs after the + `post_create_permission` interceptor. The (possibly modified) response returned by + `post_create_permission` will be passed to + `post_create_permission_with_metadata`. + """ + return response, metadata + def pre_delete_permission( self, request: permission_service.DeletePermissionRequest, @@ -182,12 +205,35 @@ def post_get_permission( ) -> permission.Permission: """Post-rpc interceptor for get_permission - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_get_permission` interceptor runs + before the `post_get_permission_with_metadata` interceptor. """ return response + def post_get_permission_with_metadata( + self, + response: permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_get_permission_with_metadata` + interceptor in new development instead of the `post_get_permission` interceptor. + When both interceptors are used, this `post_get_permission_with_metadata` interceptor runs after the + `post_get_permission` interceptor. The (possibly modified) response returned by + `post_get_permission` will be passed to + `post_get_permission_with_metadata`. + """ + return response, metadata + def pre_list_permissions( self, request: permission_service.ListPermissionsRequest, @@ -208,12 +254,38 @@ def post_list_permissions( ) -> permission_service.ListPermissionsResponse: """Post-rpc interceptor for list_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_list_permissions` interceptor runs + before the `post_list_permissions_with_metadata` interceptor. """ return response + def post_list_permissions_with_metadata( + self, + response: permission_service.ListPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.ListPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_list_permissions_with_metadata` + interceptor in new development instead of the `post_list_permissions` interceptor. + When both interceptors are used, this `post_list_permissions_with_metadata` interceptor runs after the + `post_list_permissions` interceptor. The (possibly modified) response returned by + `post_list_permissions` will be passed to + `post_list_permissions_with_metadata`. + """ + return response, metadata + def pre_transfer_ownership( self, request: permission_service.TransferOwnershipRequest, @@ -234,12 +306,38 @@ def post_transfer_ownership( ) -> permission_service.TransferOwnershipResponse: """Post-rpc interceptor for transfer_ownership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_transfer_ownership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_transfer_ownership` interceptor runs + before the `post_transfer_ownership_with_metadata` interceptor. """ return response + def post_transfer_ownership_with_metadata( + self, + response: permission_service.TransferOwnershipResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.TransferOwnershipResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for transfer_ownership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_transfer_ownership_with_metadata` + interceptor in new development instead of the `post_transfer_ownership` interceptor. + When both interceptors are used, this `post_transfer_ownership_with_metadata` interceptor runs after the + `post_transfer_ownership` interceptor. The (possibly modified) response returned by + `post_transfer_ownership` will be passed to + `post_transfer_ownership_with_metadata`. + """ + return response, metadata + def pre_update_permission( self, request: permission_service.UpdatePermissionRequest, @@ -260,12 +358,35 @@ def post_update_permission( ) -> gag_permission.Permission: """Post-rpc interceptor for update_permission - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_update_permission` interceptor runs + before the `post_update_permission_with_metadata` interceptor. """ return response + def post_update_permission_with_metadata( + self, + response: gag_permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_update_permission_with_metadata` + interceptor in new development instead of the `post_update_permission` interceptor. + When both interceptors are used, this `post_update_permission_with_metadata` interceptor runs after the + `post_update_permission` interceptor. The (possibly modified) response returned by + `post_update_permission` will be passed to + `post_update_permission_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -553,6 +674,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_permission_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -827,6 +952,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_permission_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -972,6 +1101,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1124,6 +1257,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_transfer_ownership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_transfer_ownership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1298,6 +1435,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_permission_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py index 38034e60d0d8..64ba1dd39e5c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -479,6 +481,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -854,16 +883,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -909,16 +942,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py index dcd026c2e771..81551293ad63 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/prediction_service/transports/rest.py @@ -102,12 +102,37 @@ def post_predict( ) -> prediction_service.PredictResponse: """Post-rpc interceptor for predict - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_predict_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PredictionService server but before - it is returned to user code. + it is returned to user code. This `post_predict` interceptor runs + before the `post_predict_with_metadata` interceptor. """ return response + def post_predict_with_metadata( + self, + response: prediction_service.PredictResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + prediction_service.PredictResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for predict + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PredictionService server but before it is returned to user code. + + We recommend only using this `post_predict_with_metadata` + interceptor in new development instead of the `post_predict` interceptor. + When both interceptors are used, this `post_predict_with_metadata` interceptor runs after the + `post_predict` interceptor. The (possibly modified) response returned by + `post_predict` will be passed to + `post_predict_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -373,6 +398,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_predict(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_predict_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py index 61d6ebc8278a..5344ea53f35a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -522,6 +524,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2800,16 +2829,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2855,16 +2888,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py index ef7dafcc451e..ec3f85c30308 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/retriever_service/transports/rest.py @@ -240,12 +240,38 @@ def post_batch_create_chunks( ) -> retriever_service.BatchCreateChunksResponse: """Post-rpc interceptor for batch_create_chunks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_chunks` interceptor runs + before the `post_batch_create_chunks_with_metadata` interceptor. """ return response + def post_batch_create_chunks_with_metadata( + self, + response: retriever_service.BatchCreateChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchCreateChunksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_create_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_batch_create_chunks_with_metadata` + interceptor in new development instead of the `post_batch_create_chunks` interceptor. + When both interceptors are used, this `post_batch_create_chunks_with_metadata` interceptor runs after the + `post_batch_create_chunks` interceptor. The (possibly modified) response returned by + `post_batch_create_chunks` will be passed to + `post_batch_create_chunks_with_metadata`. + """ + return response, metadata + def pre_batch_delete_chunks( self, request: retriever_service.BatchDeleteChunksRequest, @@ -281,12 +307,38 @@ def post_batch_update_chunks( ) -> retriever_service.BatchUpdateChunksResponse: """Post-rpc interceptor for batch_update_chunks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_chunks` interceptor runs + before the `post_batch_update_chunks_with_metadata` interceptor. """ return response + def post_batch_update_chunks_with_metadata( + self, + response: retriever_service.BatchUpdateChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.BatchUpdateChunksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_update_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_batch_update_chunks_with_metadata` + interceptor in new development instead of the `post_batch_update_chunks` interceptor. + When both interceptors are used, this `post_batch_update_chunks_with_metadata` interceptor runs after the + `post_batch_update_chunks` interceptor. The (possibly modified) response returned by + `post_batch_update_chunks` will be passed to + `post_batch_update_chunks_with_metadata`. + """ + return response, metadata + def pre_create_chunk( self, request: retriever_service.CreateChunkRequest, @@ -304,12 +356,35 @@ def pre_create_chunk( def post_create_chunk(self, response: retriever.Chunk) -> retriever.Chunk: """Post-rpc interceptor for create_chunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_create_chunk` interceptor runs + before the `post_create_chunk_with_metadata` interceptor. """ return response + def post_create_chunk_with_metadata( + self, + response: retriever.Chunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_create_chunk_with_metadata` + interceptor in new development instead of the `post_create_chunk` interceptor. + When both interceptors are used, this `post_create_chunk_with_metadata` interceptor runs after the + `post_create_chunk` interceptor. The (possibly modified) response returned by + `post_create_chunk` will be passed to + `post_create_chunk_with_metadata`. + """ + return response, metadata + def pre_create_corpus( self, request: retriever_service.CreateCorpusRequest, @@ -327,12 +402,35 @@ def pre_create_corpus( def post_create_corpus(self, response: retriever.Corpus) -> retriever.Corpus: """Post-rpc interceptor for create_corpus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_create_corpus` interceptor runs + before the `post_create_corpus_with_metadata` interceptor. """ return response + def post_create_corpus_with_metadata( + self, + response: retriever.Corpus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Corpus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_create_corpus_with_metadata` + interceptor in new development instead of the `post_create_corpus` interceptor. + When both interceptors are used, this `post_create_corpus_with_metadata` interceptor runs after the + `post_create_corpus` interceptor. The (possibly modified) response returned by + `post_create_corpus` will be passed to + `post_create_corpus_with_metadata`. + """ + return response, metadata + def pre_create_document( self, request: retriever_service.CreateDocumentRequest, @@ -350,12 +448,35 @@ def pre_create_document( def post_create_document(self, response: retriever.Document) -> retriever.Document: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: retriever.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_chunk( self, request: retriever_service.DeleteChunkRequest, @@ -415,12 +536,35 @@ def pre_get_chunk( def post_get_chunk(self, response: retriever.Chunk) -> retriever.Chunk: """Post-rpc interceptor for get_chunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_get_chunk` interceptor runs + before the `post_get_chunk_with_metadata` interceptor. """ return response + def post_get_chunk_with_metadata( + self, + response: retriever.Chunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_get_chunk_with_metadata` + interceptor in new development instead of the `post_get_chunk` interceptor. + When both interceptors are used, this `post_get_chunk_with_metadata` interceptor runs after the + `post_get_chunk` interceptor. The (possibly modified) response returned by + `post_get_chunk` will be passed to + `post_get_chunk_with_metadata`. + """ + return response, metadata + def pre_get_corpus( self, request: retriever_service.GetCorpusRequest, @@ -438,12 +582,35 @@ def pre_get_corpus( def post_get_corpus(self, response: retriever.Corpus) -> retriever.Corpus: """Post-rpc interceptor for get_corpus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_get_corpus` interceptor runs + before the `post_get_corpus_with_metadata` interceptor. """ return response + def post_get_corpus_with_metadata( + self, + response: retriever.Corpus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Corpus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_get_corpus_with_metadata` + interceptor in new development instead of the `post_get_corpus` interceptor. + When both interceptors are used, this `post_get_corpus_with_metadata` interceptor runs after the + `post_get_corpus` interceptor. The (possibly modified) response returned by + `post_get_corpus` will be passed to + `post_get_corpus_with_metadata`. + """ + return response, metadata + def pre_get_document( self, request: retriever_service.GetDocumentRequest, @@ -461,12 +628,35 @@ def pre_get_document( def post_get_document(self, response: retriever.Document) -> retriever.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: retriever.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_list_chunks( self, request: retriever_service.ListChunksRequest, @@ -486,12 +676,37 @@ def post_list_chunks( ) -> retriever_service.ListChunksResponse: """Post-rpc interceptor for list_chunks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_list_chunks` interceptor runs + before the `post_list_chunks_with_metadata` interceptor. """ return response + def post_list_chunks_with_metadata( + self, + response: retriever_service.ListChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListChunksResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_list_chunks_with_metadata` + interceptor in new development instead of the `post_list_chunks` interceptor. + When both interceptors are used, this `post_list_chunks_with_metadata` interceptor runs after the + `post_list_chunks` interceptor. The (possibly modified) response returned by + `post_list_chunks` will be passed to + `post_list_chunks_with_metadata`. + """ + return response, metadata + def pre_list_corpora( self, request: retriever_service.ListCorporaRequest, @@ -511,12 +726,37 @@ def post_list_corpora( ) -> retriever_service.ListCorporaResponse: """Post-rpc interceptor for list_corpora - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_corpora_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_list_corpora` interceptor runs + before the `post_list_corpora_with_metadata` interceptor. """ return response + def post_list_corpora_with_metadata( + self, + response: retriever_service.ListCorporaResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListCorporaResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_corpora + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_list_corpora_with_metadata` + interceptor in new development instead of the `post_list_corpora` interceptor. + When both interceptors are used, this `post_list_corpora_with_metadata` interceptor runs after the + `post_list_corpora` interceptor. The (possibly modified) response returned by + `post_list_corpora` will be passed to + `post_list_corpora_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: retriever_service.ListDocumentsRequest, @@ -536,12 +776,37 @@ def post_list_documents( ) -> retriever_service.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: retriever_service.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_query_corpus( self, request: retriever_service.QueryCorpusRequest, @@ -561,12 +826,37 @@ def post_query_corpus( ) -> retriever_service.QueryCorpusResponse: """Post-rpc interceptor for query_corpus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_query_corpus` interceptor runs + before the `post_query_corpus_with_metadata` interceptor. """ return response + def post_query_corpus_with_metadata( + self, + response: retriever_service.QueryCorpusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.QueryCorpusResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for query_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_query_corpus_with_metadata` + interceptor in new development instead of the `post_query_corpus` interceptor. + When both interceptors are used, this `post_query_corpus_with_metadata` interceptor runs after the + `post_query_corpus` interceptor. The (possibly modified) response returned by + `post_query_corpus` will be passed to + `post_query_corpus_with_metadata`. + """ + return response, metadata + def pre_query_document( self, request: retriever_service.QueryDocumentRequest, @@ -586,12 +876,37 @@ def post_query_document( ) -> retriever_service.QueryDocumentResponse: """Post-rpc interceptor for query_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_query_document` interceptor runs + before the `post_query_document_with_metadata` interceptor. """ return response + def post_query_document_with_metadata( + self, + response: retriever_service.QueryDocumentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + retriever_service.QueryDocumentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for query_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_query_document_with_metadata` + interceptor in new development instead of the `post_query_document` interceptor. + When both interceptors are used, this `post_query_document_with_metadata` interceptor runs after the + `post_query_document` interceptor. The (possibly modified) response returned by + `post_query_document` will be passed to + `post_query_document_with_metadata`. + """ + return response, metadata + def pre_update_chunk( self, request: retriever_service.UpdateChunkRequest, @@ -609,12 +924,35 @@ def pre_update_chunk( def post_update_chunk(self, response: retriever.Chunk) -> retriever.Chunk: """Post-rpc interceptor for update_chunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_update_chunk` interceptor runs + before the `post_update_chunk_with_metadata` interceptor. """ return response + def post_update_chunk_with_metadata( + self, + response: retriever.Chunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_update_chunk_with_metadata` + interceptor in new development instead of the `post_update_chunk` interceptor. + When both interceptors are used, this `post_update_chunk_with_metadata` interceptor runs after the + `post_update_chunk` interceptor. The (possibly modified) response returned by + `post_update_chunk` will be passed to + `post_update_chunk_with_metadata`. + """ + return response, metadata + def pre_update_corpus( self, request: retriever_service.UpdateCorpusRequest, @@ -632,12 +970,35 @@ def pre_update_corpus( def post_update_corpus(self, response: retriever.Corpus) -> retriever.Corpus: """Post-rpc interceptor for update_corpus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_corpus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_update_corpus` interceptor runs + before the `post_update_corpus_with_metadata` interceptor. """ return response + def post_update_corpus_with_metadata( + self, + response: retriever.Corpus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Corpus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_corpus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_update_corpus_with_metadata` + interceptor in new development instead of the `post_update_corpus` interceptor. + When both interceptors are used, this `post_update_corpus_with_metadata` interceptor runs after the + `post_update_corpus` interceptor. The (possibly modified) response returned by + `post_update_corpus` will be passed to + `post_update_corpus_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: retriever_service.UpdateDocumentRequest, @@ -655,12 +1016,35 @@ def pre_update_document( def post_update_document(self, response: retriever.Document) -> retriever.Document: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RetrieverService server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: retriever.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[retriever.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RetrieverService server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -927,6 +1311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_chunks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1195,6 +1583,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_chunks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1347,6 +1739,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_chunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1495,6 +1891,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_corpus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1643,6 +2043,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2108,6 +2512,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_chunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2251,6 +2659,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_corpus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2394,6 +2806,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2537,6 +2953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_chunks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2682,6 +3102,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_corpora(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_corpora_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2827,6 +3251,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2977,6 +3405,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_corpus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3127,6 +3559,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3279,6 +3715,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_chunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3427,6 +3867,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_corpus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_corpus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3575,6 +4019,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py index 7043738543b9..cf1830a62280 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -480,6 +482,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1311,16 +1340,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1366,16 +1399,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py index 2fdc0c445f2e..260bc5d231ed 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/services/text_service/transports/rest.py @@ -126,12 +126,37 @@ def post_batch_embed_text( ) -> text_service.BatchEmbedTextResponse: """Post-rpc interceptor for batch_embed_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_batch_embed_text` interceptor runs + before the `post_batch_embed_text_with_metadata` interceptor. """ return response + def post_batch_embed_text_with_metadata( + self, + response: text_service.BatchEmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.BatchEmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for batch_embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_batch_embed_text_with_metadata` + interceptor in new development instead of the `post_batch_embed_text` interceptor. + When both interceptors are used, this `post_batch_embed_text_with_metadata` interceptor runs after the + `post_batch_embed_text` interceptor. The (possibly modified) response returned by + `post_batch_embed_text` will be passed to + `post_batch_embed_text_with_metadata`. + """ + return response, metadata + def pre_count_text_tokens( self, request: text_service.CountTextTokensRequest, @@ -151,12 +176,37 @@ def post_count_text_tokens( ) -> text_service.CountTextTokensResponse: """Post-rpc interceptor for count_text_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_text_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_count_text_tokens` interceptor runs + before the `post_count_text_tokens_with_metadata` interceptor. """ return response + def post_count_text_tokens_with_metadata( + self, + response: text_service.CountTextTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.CountTextTokensResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for count_text_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_count_text_tokens_with_metadata` + interceptor in new development instead of the `post_count_text_tokens` interceptor. + When both interceptors are used, this `post_count_text_tokens_with_metadata` interceptor runs after the + `post_count_text_tokens` interceptor. The (possibly modified) response returned by + `post_count_text_tokens` will be passed to + `post_count_text_tokens_with_metadata`. + """ + return response, metadata + def pre_embed_text( self, request: text_service.EmbedTextRequest, @@ -174,12 +224,35 @@ def post_embed_text( ) -> text_service.EmbedTextResponse: """Post-rpc interceptor for embed_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_embed_text` interceptor runs + before the `post_embed_text_with_metadata` interceptor. """ return response + def post_embed_text_with_metadata( + self, + response: text_service.EmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[text_service.EmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_embed_text_with_metadata` + interceptor in new development instead of the `post_embed_text` interceptor. + When both interceptors are used, this `post_embed_text_with_metadata` interceptor runs after the + `post_embed_text` interceptor. The (possibly modified) response returned by + `post_embed_text` will be passed to + `post_embed_text_with_metadata`. + """ + return response, metadata + def pre_generate_text( self, request: text_service.GenerateTextRequest, @@ -199,12 +272,37 @@ def post_generate_text( ) -> text_service.GenerateTextResponse: """Post-rpc interceptor for generate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_generate_text` interceptor runs + before the `post_generate_text_with_metadata` interceptor. """ return response + def post_generate_text_with_metadata( + self, + response: text_service.GenerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.GenerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_generate_text_with_metadata` + interceptor in new development instead of the `post_generate_text` interceptor. + When both interceptors are used, this `post_generate_text_with_metadata` interceptor runs after the + `post_generate_text` interceptor. The (possibly modified) response returned by + `post_generate_text` will be passed to + `post_generate_text_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -471,6 +569,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_embed_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -629,6 +731,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_text_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_text_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -780,6 +886,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -935,6 +1045,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py index 9dd7a564142d..19e8bf5d18bb 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/__init__.py @@ -82,11 +82,14 @@ GroundingMetadata, GroundingSupport, LogprobsResult, + PrebuiltVoiceConfig, RetrievalMetadata, SearchEntryPoint, Segment, SemanticRetrieverConfig, + SpeechConfig, TaskType, + VoiceConfig, ) from .model import Model from .model_service import ( @@ -247,10 +250,13 @@ "GroundingMetadata", "GroundingSupport", "LogprobsResult", + "PrebuiltVoiceConfig", "RetrievalMetadata", "SearchEntryPoint", "Segment", "SemanticRetrieverConfig", + "SpeechConfig", + "VoiceConfig", "TaskType", "Model", "CreateTunedModelMetadata", diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py index 6b5d37cd15ce..04712f6f88df 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/content.py @@ -371,8 +371,18 @@ class Tool(proto.Message): code_execution (google.ai.generativelanguage_v1beta.types.CodeExecution): Optional. Enables the model to execute code as part of generation. + google_search (google.ai.generativelanguage_v1beta.types.Tool.GoogleSearch): + Optional. GoogleSearch tool type. + Tool to support Google Search in Model. Powered + by Google. """ + class GoogleSearch(proto.Message): + r"""GoogleSearch tool type. + Tool to support Google Search in Model. Powered by Google. + + """ + function_declarations: MutableSequence["FunctionDeclaration"] = proto.RepeatedField( proto.MESSAGE, number=1, @@ -388,6 +398,11 @@ class Tool(proto.Message): number=3, message="CodeExecution", ) + google_search: GoogleSearch = proto.Field( + proto.MESSAGE, + number=4, + message=GoogleSearch, + ) class GoogleSearchRetrieval(proto.Message): @@ -560,6 +575,14 @@ class FunctionDeclaration(proto.Message): parameter. This field is a member of `oneof`_ ``_parameters``. + response (google.ai.generativelanguage_v1beta.types.Schema): + Optional. Describes the output from this + function in JSON Schema format. Reflects the + Open API 3.03 Response Object. The Schema + defines the type used for the response value of + the function. + + This field is a member of `oneof`_ ``_response``. """ name: str = proto.Field( @@ -576,6 +599,12 @@ class FunctionDeclaration(proto.Message): optional=True, message="Schema", ) + response: "Schema" = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message="Schema", + ) class FunctionCall(proto.Message): @@ -587,6 +616,10 @@ class FunctionCall(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + id (str): + Optional. The unique id of the function call. If populated, + the client to execute the ``function_call`` and return the + response with the matching ``id``. name (str): Required. The name of the function to call. Must be a-z, A-Z, 0-9, or contain underscores @@ -598,6 +631,10 @@ class FunctionCall(proto.Message): This field is a member of `oneof`_ ``_args``. """ + id: str = proto.Field( + proto.STRING, + number=3, + ) name: str = proto.Field( proto.STRING, number=1, @@ -618,6 +655,10 @@ class FunctionResponse(proto.Message): made based on model prediction. Attributes: + id (str): + Optional. The id of the function call this response is for. + Populated by the client to match the corresponding function + call ``id``. name (str): Required. The name of the function to call. Must be a-z, A-Z, 0-9, or contain underscores @@ -627,6 +668,10 @@ class FunctionResponse(proto.Message): object format. """ + id: str = proto.Field( + proto.STRING, + number=3, + ) name: str = proto.Field( proto.STRING, number=1, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py index 387d00aafbf7..b5621298c671 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/file.py @@ -33,6 +33,8 @@ class File(proto.Message): r"""A file uploaded to the API. + Next ID: 15 + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py index 27a344b99ec5..c66908d84188 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta/types/generative_service.py @@ -28,6 +28,9 @@ manifest={ "TaskType", "GenerateContentRequest", + "PrebuiltVoiceConfig", + "VoiceConfig", + "SpeechConfig", "GenerationConfig", "SemanticRetrieverConfig", "GenerateContentResponse", @@ -103,7 +106,7 @@ class GenerateContentRequest(proto.Message): Required. The name of the ``Model`` to use for generating the completion. - Format: ``name=models/{model}``. + Format: ``models/{model}``. system_instruction (google.ai.generativelanguage_v1beta.types.Content): Optional. Developer set `system instruction(s) `__. @@ -153,8 +156,8 @@ class GenerateContentRequest(proto.Message): will use the default safety setting for that category. Harm categories HARM_CATEGORY_HATE_SPEECH, HARM_CATEGORY_SEXUALLY_EXPLICIT, - HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT - are supported. Refer to the + HARM_CATEGORY_DANGEROUS_CONTENT, HARM_CATEGORY_HARASSMENT, + HARM_CATEGORY_CIVIC_INTEGRITY are supported. Refer to the `guide `__ for detailed information on available safety settings. Also refer to the `Safety @@ -218,6 +221,61 @@ class GenerateContentRequest(proto.Message): ) +class PrebuiltVoiceConfig(proto.Message): + r"""The configuration for the prebuilt speaker to use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + voice_name (str): + The name of the preset voice to use. + + This field is a member of `oneof`_ ``_voice_name``. + """ + + voice_name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + +class VoiceConfig(proto.Message): + r"""The configuration for the voice to use. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + prebuilt_voice_config (google.ai.generativelanguage_v1beta.types.PrebuiltVoiceConfig): + The configuration for the prebuilt voice to + use. + + This field is a member of `oneof`_ ``voice_config``. + """ + + prebuilt_voice_config: "PrebuiltVoiceConfig" = proto.Field( + proto.MESSAGE, + number=1, + oneof="voice_config", + message="PrebuiltVoiceConfig", + ) + + +class SpeechConfig(proto.Message): + r"""The speech generation config. + + Attributes: + voice_config (google.ai.generativelanguage_v1beta.types.VoiceConfig): + The configuration for the speaker to use. + """ + + voice_config: "VoiceConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="VoiceConfig", + ) + + class GenerationConfig(proto.Message): r"""Configuration options for model generation and outputs. Not all parameters are configurable for every model. @@ -363,8 +421,49 @@ class GenerationConfig(proto.Message): [Candidate.logprobs_result][google.ai.generativelanguage.v1beta.Candidate.logprobs_result]. This field is a member of `oneof`_ ``_logprobs``. + enable_enhanced_civic_answers (bool): + Optional. Enables enhanced civic answers. It + may not be available for all models. + + This field is a member of `oneof`_ ``_enable_enhanced_civic_answers``. + response_modalities (MutableSequence[google.ai.generativelanguage_v1beta.types.GenerationConfig.Modality]): + Optional. The requested modalities of the + response. Represents the set of modalities that + the model can return, and should be expected in + the response. This is an exact match to the + modalities of the response. + + A model may have multiple combinations of + supported modalities. If the requested + modalities do not match any of the supported + combinations, an error will be returned. + + An empty list is equivalent to requesting only + text. + speech_config (google.ai.generativelanguage_v1beta.types.SpeechConfig): + Optional. The speech generation config. + + This field is a member of `oneof`_ ``_speech_config``. """ + class Modality(proto.Enum): + r"""Supported modalities of the response. + + Values: + MODALITY_UNSPECIFIED (0): + Default value. + TEXT (1): + Indicates the model should return text. + IMAGE (2): + Indicates the model should return images. + AUDIO (3): + Indicates the model should return audio. + """ + MODALITY_UNSPECIFIED = 0 + TEXT = 1 + IMAGE = 2 + AUDIO = 3 + candidate_count: int = proto.Field( proto.INT32, number=1, @@ -423,6 +522,22 @@ class GenerationConfig(proto.Message): number=18, optional=True, ) + enable_enhanced_civic_answers: bool = proto.Field( + proto.BOOL, + number=19, + optional=True, + ) + response_modalities: MutableSequence[Modality] = proto.RepeatedField( + proto.ENUM, + number=20, + enum=Modality, + ) + speech_config: "SpeechConfig" = proto.Field( + proto.MESSAGE, + number=21, + optional=True, + message="SpeechConfig", + ) class SemanticRetrieverConfig(proto.Message): @@ -537,12 +652,16 @@ class BlockReason(proto.Enum): included from the terminology blocklist. PROHIBITED_CONTENT (4): Prompt was blocked due to prohibited content. + IMAGE_SAFETY (5): + Candidates blocked due to unsafe image + generation content. """ BLOCK_REASON_UNSPECIFIED = 0 SAFETY = 1 OTHER = 2 BLOCKLIST = 3 PROHIBITED_CONTENT = 4 + IMAGE_SAFETY = 5 block_reason: "GenerateContentResponse.PromptFeedback.BlockReason" = ( proto.Field( @@ -700,6 +819,9 @@ class FinishReason(proto.Enum): MALFORMED_FUNCTION_CALL (10): The function call generated by the model is invalid. + IMAGE_SAFETY (11): + Token generation stopped because generated + images contain safety violations. """ FINISH_REASON_UNSPECIFIED = 0 STOP = 1 @@ -712,6 +834,7 @@ class FinishReason(proto.Enum): PROHIBITED_CONTENT = 8 SPII = 9 MALFORMED_FUNCTION_CALL = 10 + IMAGE_SAFETY = 11 index: int = proto.Field( proto.INT32, diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py index 0b6dbde2b051..a22e7bbe7e4a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py index 4d2db83aacba..8aacad49752f 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -480,6 +482,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py index a3f6d345ef27..8afd439d6dc1 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/discuss_service/transports/rest.py @@ -110,12 +110,38 @@ def post_count_message_tokens( ) -> discuss_service.CountMessageTokensResponse: """Post-rpc interceptor for count_message_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_message_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiscussService server but before - it is returned to user code. + it is returned to user code. This `post_count_message_tokens` interceptor runs + before the `post_count_message_tokens_with_metadata` interceptor. """ return response + def post_count_message_tokens_with_metadata( + self, + response: discuss_service.CountMessageTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.CountMessageTokensResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for count_message_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_count_message_tokens_with_metadata` + interceptor in new development instead of the `post_count_message_tokens` interceptor. + When both interceptors are used, this `post_count_message_tokens_with_metadata` interceptor runs after the + `post_count_message_tokens` interceptor. The (possibly modified) response returned by + `post_count_message_tokens` will be passed to + `post_count_message_tokens_with_metadata`. + """ + return response, metadata + def pre_generate_message( self, request: discuss_service.GenerateMessageRequest, @@ -135,12 +161,37 @@ def post_generate_message( ) -> discuss_service.GenerateMessageResponse: """Post-rpc interceptor for generate_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiscussService server but before - it is returned to user code. + it is returned to user code. This `post_generate_message` interceptor runs + before the `post_generate_message_with_metadata` interceptor. """ return response + def post_generate_message_with_metadata( + self, + response: discuss_service.GenerateMessageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.GenerateMessageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_generate_message_with_metadata` + interceptor in new development instead of the `post_generate_message` interceptor. + When both interceptors are used, this `post_generate_message_with_metadata` interceptor runs after the + `post_generate_message` interceptor. The (possibly modified) response returned by + `post_generate_message` will be passed to + `post_generate_message_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DiscussServiceRestStub: @@ -364,6 +415,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_message_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_message_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -520,6 +575,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py index 81b78eb9aebb..f4d592ac197c 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -477,6 +479,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py index 87bc85f49a7e..2ff1f5d178f4 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/model_service/transports/rest.py @@ -105,12 +105,33 @@ def pre_get_model( def post_get_model(self, response: model.Model) -> model.Model: """Post-rpc interceptor for get_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. """ return response + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + def pre_list_models( self, request: model_service.ListModelsRequest, @@ -130,12 +151,37 @@ def post_list_models( ) -> model_service.ListModelsResponse: """Post-rpc interceptor for list_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. """ return response + def post_list_models_with_metadata( + self, + response: model_service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ModelServiceRestStub: @@ -346,6 +392,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -492,6 +542,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py index 0fffeee6b68b..21093740f96a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -478,6 +480,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py index ece0f776739c..b0af51a7ef65 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta2/services/text_service/transports/rest.py @@ -107,12 +107,35 @@ def post_embed_text( ) -> text_service.EmbedTextResponse: """Post-rpc interceptor for embed_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_embed_text` interceptor runs + before the `post_embed_text_with_metadata` interceptor. """ return response + def post_embed_text_with_metadata( + self, + response: text_service.EmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[text_service.EmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_embed_text_with_metadata` + interceptor in new development instead of the `post_embed_text` interceptor. + When both interceptors are used, this `post_embed_text_with_metadata` interceptor runs after the + `post_embed_text` interceptor. The (possibly modified) response returned by + `post_embed_text` will be passed to + `post_embed_text_with_metadata`. + """ + return response, metadata + def pre_generate_text( self, request: text_service.GenerateTextRequest, @@ -132,12 +155,37 @@ def post_generate_text( ) -> text_service.GenerateTextResponse: """Post-rpc interceptor for generate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_generate_text` interceptor runs + before the `post_generate_text_with_metadata` interceptor. """ return response + def post_generate_text_with_metadata( + self, + response: text_service.GenerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.GenerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_generate_text_with_metadata` + interceptor in new development instead of the `post_generate_text` interceptor. + When both interceptors are used, this `post_generate_text_with_metadata` interceptor runs after the + `post_generate_text` interceptor. The (possibly modified) response returned by + `post_generate_text` will be passed to + `post_generate_text_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TextServiceRestStub: @@ -354,6 +402,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -509,6 +561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py index 0b6dbde2b051..a22e7bbe7e4a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py index 8b0c864b00f4..e42243c00e8b 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -482,6 +484,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/transports/rest.py index 19d8130c40e8..565a9e6464ce 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/discuss_service/transports/rest.py @@ -111,12 +111,38 @@ def post_count_message_tokens( ) -> discuss_service.CountMessageTokensResponse: """Post-rpc interceptor for count_message_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_message_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiscussService server but before - it is returned to user code. + it is returned to user code. This `post_count_message_tokens` interceptor runs + before the `post_count_message_tokens_with_metadata` interceptor. """ return response + def post_count_message_tokens_with_metadata( + self, + response: discuss_service.CountMessageTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.CountMessageTokensResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for count_message_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_count_message_tokens_with_metadata` + interceptor in new development instead of the `post_count_message_tokens` interceptor. + When both interceptors are used, this `post_count_message_tokens_with_metadata` interceptor runs after the + `post_count_message_tokens` interceptor. The (possibly modified) response returned by + `post_count_message_tokens` will be passed to + `post_count_message_tokens_with_metadata`. + """ + return response, metadata + def pre_generate_message( self, request: discuss_service.GenerateMessageRequest, @@ -136,12 +162,37 @@ def post_generate_message( ) -> discuss_service.GenerateMessageResponse: """Post-rpc interceptor for generate_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiscussService server but before - it is returned to user code. + it is returned to user code. This `post_generate_message` interceptor runs + before the `post_generate_message_with_metadata` interceptor. """ return response + def post_generate_message_with_metadata( + self, + response: discuss_service.GenerateMessageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + discuss_service.GenerateMessageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiscussService server but before it is returned to user code. + + We recommend only using this `post_generate_message_with_metadata` + interceptor in new development instead of the `post_generate_message` interceptor. + When both interceptors are used, this `post_generate_message_with_metadata` interceptor runs after the + `post_generate_message` interceptor. The (possibly modified) response returned by + `post_generate_message` will be passed to + `post_generate_message_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DiscussServiceRestStub: @@ -365,6 +416,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_message_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_message_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -521,6 +576,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py index 0f6ab9afaf39..3ae12ebdc147 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -500,6 +502,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/transports/rest.py index ba17fc8955bd..0b1be8fa076a 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/model_service/transports/rest.py @@ -149,12 +149,35 @@ def post_create_tuned_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_tuned_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_create_tuned_model` interceptor runs + before the `post_create_tuned_model_with_metadata` interceptor. """ return response + def post_create_tuned_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_create_tuned_model_with_metadata` + interceptor in new development instead of the `post_create_tuned_model` interceptor. + When both interceptors are used, this `post_create_tuned_model_with_metadata` interceptor runs after the + `post_create_tuned_model` interceptor. The (possibly modified) response returned by + `post_create_tuned_model` will be passed to + `post_create_tuned_model_with_metadata`. + """ + return response, metadata + def pre_delete_tuned_model( self, request: model_service.DeleteTunedModelRequest, @@ -184,12 +207,33 @@ def pre_get_model( def post_get_model(self, response: model.Model) -> model.Model: """Post-rpc interceptor for get_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. """ return response + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + def pre_get_tuned_model( self, request: model_service.GetTunedModelRequest, @@ -209,12 +253,35 @@ def post_get_tuned_model( ) -> tuned_model.TunedModel: """Post-rpc interceptor for get_tuned_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_get_tuned_model` interceptor runs + before the `post_get_tuned_model_with_metadata` interceptor. """ return response + def post_get_tuned_model_with_metadata( + self, + response: tuned_model.TunedModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tuned_model.TunedModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_get_tuned_model_with_metadata` + interceptor in new development instead of the `post_get_tuned_model` interceptor. + When both interceptors are used, this `post_get_tuned_model_with_metadata` interceptor runs after the + `post_get_tuned_model` interceptor. The (possibly modified) response returned by + `post_get_tuned_model` will be passed to + `post_get_tuned_model_with_metadata`. + """ + return response, metadata + def pre_list_models( self, request: model_service.ListModelsRequest, @@ -234,12 +301,37 @@ def post_list_models( ) -> model_service.ListModelsResponse: """Post-rpc interceptor for list_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. """ return response + def post_list_models_with_metadata( + self, + response: model_service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + def pre_list_tuned_models( self, request: model_service.ListTunedModelsRequest, @@ -259,12 +351,37 @@ def post_list_tuned_models( ) -> model_service.ListTunedModelsResponse: """Post-rpc interceptor for list_tuned_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tuned_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_list_tuned_models` interceptor runs + before the `post_list_tuned_models_with_metadata` interceptor. """ return response + def post_list_tuned_models_with_metadata( + self, + response: model_service.ListTunedModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_service.ListTunedModelsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_tuned_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_list_tuned_models_with_metadata` + interceptor in new development instead of the `post_list_tuned_models` interceptor. + When both interceptors are used, this `post_list_tuned_models_with_metadata` interceptor runs after the + `post_list_tuned_models` interceptor. The (possibly modified) response returned by + `post_list_tuned_models` will be passed to + `post_list_tuned_models_with_metadata`. + """ + return response, metadata + def pre_update_tuned_model( self, request: model_service.UpdateTunedModelRequest, @@ -284,12 +401,35 @@ def post_update_tuned_model( ) -> gag_tuned_model.TunedModel: """Post-rpc interceptor for update_tuned_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_tuned_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ModelService server but before - it is returned to user code. + it is returned to user code. This `post_update_tuned_model` interceptor runs + before the `post_update_tuned_model_with_metadata` interceptor. """ return response + def post_update_tuned_model_with_metadata( + self, + response: gag_tuned_model.TunedModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_tuned_model.TunedModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tuned_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ModelService server but before it is returned to user code. + + We recommend only using this `post_update_tuned_model_with_metadata` + interceptor in new development instead of the `post_update_tuned_model` interceptor. + When both interceptors are used, this `post_update_tuned_model_with_metadata` interceptor runs after the + `post_update_tuned_model` interceptor. The (possibly modified) response returned by + `post_update_tuned_model` will be passed to + `post_update_tuned_model_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ModelServiceRestStub: @@ -532,6 +672,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tuned_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -785,6 +929,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -928,6 +1076,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tuned_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1074,6 +1226,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1220,6 +1376,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tuned_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tuned_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1372,6 +1532,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_tuned_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tuned_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py index c3ce5cd4cb63..5a418e18d57b 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -503,6 +505,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py index b97f34e8cd62..e011a16734d3 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/permission_service/transports/rest.py @@ -142,12 +142,35 @@ def post_create_permission( ) -> gag_permission.Permission: """Post-rpc interceptor for create_permission - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_create_permission` interceptor runs + before the `post_create_permission_with_metadata` interceptor. """ return response + def post_create_permission_with_metadata( + self, + response: gag_permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_create_permission_with_metadata` + interceptor in new development instead of the `post_create_permission` interceptor. + When both interceptors are used, this `post_create_permission_with_metadata` interceptor runs after the + `post_create_permission` interceptor. The (possibly modified) response returned by + `post_create_permission` will be passed to + `post_create_permission_with_metadata`. + """ + return response, metadata + def pre_delete_permission( self, request: permission_service.DeletePermissionRequest, @@ -182,12 +205,35 @@ def post_get_permission( ) -> permission.Permission: """Post-rpc interceptor for get_permission - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_get_permission` interceptor runs + before the `post_get_permission_with_metadata` interceptor. """ return response + def post_get_permission_with_metadata( + self, + response: permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_get_permission_with_metadata` + interceptor in new development instead of the `post_get_permission` interceptor. + When both interceptors are used, this `post_get_permission_with_metadata` interceptor runs after the + `post_get_permission` interceptor. The (possibly modified) response returned by + `post_get_permission` will be passed to + `post_get_permission_with_metadata`. + """ + return response, metadata + def pre_list_permissions( self, request: permission_service.ListPermissionsRequest, @@ -208,12 +254,38 @@ def post_list_permissions( ) -> permission_service.ListPermissionsResponse: """Post-rpc interceptor for list_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_list_permissions` interceptor runs + before the `post_list_permissions_with_metadata` interceptor. """ return response + def post_list_permissions_with_metadata( + self, + response: permission_service.ListPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.ListPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_list_permissions_with_metadata` + interceptor in new development instead of the `post_list_permissions` interceptor. + When both interceptors are used, this `post_list_permissions_with_metadata` interceptor runs after the + `post_list_permissions` interceptor. The (possibly modified) response returned by + `post_list_permissions` will be passed to + `post_list_permissions_with_metadata`. + """ + return response, metadata + def pre_transfer_ownership( self, request: permission_service.TransferOwnershipRequest, @@ -234,12 +306,38 @@ def post_transfer_ownership( ) -> permission_service.TransferOwnershipResponse: """Post-rpc interceptor for transfer_ownership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_transfer_ownership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_transfer_ownership` interceptor runs + before the `post_transfer_ownership_with_metadata` interceptor. """ return response + def post_transfer_ownership_with_metadata( + self, + response: permission_service.TransferOwnershipResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + permission_service.TransferOwnershipResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for transfer_ownership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_transfer_ownership_with_metadata` + interceptor in new development instead of the `post_transfer_ownership` interceptor. + When both interceptors are used, this `post_transfer_ownership_with_metadata` interceptor runs after the + `post_transfer_ownership` interceptor. The (possibly modified) response returned by + `post_transfer_ownership` will be passed to + `post_transfer_ownership_with_metadata`. + """ + return response, metadata + def pre_update_permission( self, request: permission_service.UpdatePermissionRequest, @@ -260,12 +358,35 @@ def post_update_permission( ) -> gag_permission.Permission: """Post-rpc interceptor for update_permission - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_permission_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PermissionService server but before - it is returned to user code. + it is returned to user code. This `post_update_permission` interceptor runs + before the `post_update_permission_with_metadata` interceptor. """ return response + def post_update_permission_with_metadata( + self, + response: gag_permission.Permission, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gag_permission.Permission, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_permission + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PermissionService server but before it is returned to user code. + + We recommend only using this `post_update_permission_with_metadata` + interceptor in new development instead of the `post_update_permission` interceptor. + When both interceptors are used, this `post_update_permission_with_metadata` interceptor runs after the + `post_update_permission` interceptor. The (possibly modified) response returned by + `post_update_permission` will be passed to + `post_update_permission_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PermissionServiceRestStub: @@ -503,6 +624,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_permission_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -777,6 +902,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_permission_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -922,6 +1051,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1074,6 +1207,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_transfer_ownership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_transfer_ownership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1248,6 +1385,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_permission(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_permission_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py index 7e3cdb3458fe..45b795163bab 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -480,6 +482,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/transports/rest.py b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/transports/rest.py index 033bddb0f427..9968dbb7677d 100644 --- a/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/transports/rest.py +++ b/packages/google-ai-generativelanguage/google/ai/generativelanguage_v1beta3/services/text_service/transports/rest.py @@ -126,12 +126,37 @@ def post_batch_embed_text( ) -> text_service.BatchEmbedTextResponse: """Post-rpc interceptor for batch_embed_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_batch_embed_text` interceptor runs + before the `post_batch_embed_text_with_metadata` interceptor. """ return response + def post_batch_embed_text_with_metadata( + self, + response: text_service.BatchEmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.BatchEmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for batch_embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_batch_embed_text_with_metadata` + interceptor in new development instead of the `post_batch_embed_text` interceptor. + When both interceptors are used, this `post_batch_embed_text_with_metadata` interceptor runs after the + `post_batch_embed_text` interceptor. The (possibly modified) response returned by + `post_batch_embed_text` will be passed to + `post_batch_embed_text_with_metadata`. + """ + return response, metadata + def pre_count_text_tokens( self, request: text_service.CountTextTokensRequest, @@ -151,12 +176,37 @@ def post_count_text_tokens( ) -> text_service.CountTextTokensResponse: """Post-rpc interceptor for count_text_tokens - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_count_text_tokens_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_count_text_tokens` interceptor runs + before the `post_count_text_tokens_with_metadata` interceptor. """ return response + def post_count_text_tokens_with_metadata( + self, + response: text_service.CountTextTokensResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.CountTextTokensResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for count_text_tokens + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_count_text_tokens_with_metadata` + interceptor in new development instead of the `post_count_text_tokens` interceptor. + When both interceptors are used, this `post_count_text_tokens_with_metadata` interceptor runs after the + `post_count_text_tokens` interceptor. The (possibly modified) response returned by + `post_count_text_tokens` will be passed to + `post_count_text_tokens_with_metadata`. + """ + return response, metadata + def pre_embed_text( self, request: text_service.EmbedTextRequest, @@ -174,12 +224,35 @@ def post_embed_text( ) -> text_service.EmbedTextResponse: """Post-rpc interceptor for embed_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_embed_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_embed_text` interceptor runs + before the `post_embed_text_with_metadata` interceptor. """ return response + def post_embed_text_with_metadata( + self, + response: text_service.EmbedTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[text_service.EmbedTextResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for embed_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_embed_text_with_metadata` + interceptor in new development instead of the `post_embed_text` interceptor. + When both interceptors are used, this `post_embed_text_with_metadata` interceptor runs after the + `post_embed_text` interceptor. The (possibly modified) response returned by + `post_embed_text` will be passed to + `post_embed_text_with_metadata`. + """ + return response, metadata + def pre_generate_text( self, request: text_service.GenerateTextRequest, @@ -199,12 +272,37 @@ def post_generate_text( ) -> text_service.GenerateTextResponse: """Post-rpc interceptor for generate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TextService server but before - it is returned to user code. + it is returned to user code. This `post_generate_text` interceptor runs + before the `post_generate_text_with_metadata` interceptor. """ return response + def post_generate_text_with_metadata( + self, + response: text_service.GenerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + text_service.GenerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TextService server but before it is returned to user code. + + We recommend only using this `post_generate_text_with_metadata` + interceptor in new development instead of the `post_generate_text` interceptor. + When both interceptors are used, this `post_generate_text_with_metadata` interceptor runs after the + `post_generate_text` interceptor. The (possibly modified) response returned by + `post_generate_text` will be passed to + `post_generate_text_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TextServiceRestStub: @@ -421,6 +519,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_embed_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -579,6 +681,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_count_text_tokens(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_count_text_tokens_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -730,6 +836,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_embed_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_embed_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -885,6 +995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_create_cached_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_create_cached_content_async.py new file mode 100644 index 000000000000..99169d1f155c --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_create_cached_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_CreateCachedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCachedContentRequest( + ) + + # Make the request + response = await client.create_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_CreateCachedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_create_cached_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_create_cached_content_sync.py new file mode 100644 index 000000000000..a1b3a5386fce --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_create_cached_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_CreateCachedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCachedContentRequest( + ) + + # Make the request + response = client.create_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_CreateCachedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_delete_cached_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_delete_cached_content_async.py new file mode 100644 index 000000000000..af9ef6361c2f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_delete_cached_content_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_DeleteCachedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCachedContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_cached_content(request=request) + + +# [END generativelanguage_v1alpha_generated_CacheService_DeleteCachedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_delete_cached_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_delete_cached_content_sync.py new file mode 100644 index 000000000000..9735af0fb8ea --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_delete_cached_content_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_DeleteCachedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCachedContentRequest( + name="name_value", + ) + + # Make the request + client.delete_cached_content(request=request) + + +# [END generativelanguage_v1alpha_generated_CacheService_DeleteCachedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_get_cached_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_get_cached_content_async.py new file mode 100644 index 000000000000..2b328840fe16 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_get_cached_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_GetCachedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCachedContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_GetCachedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_get_cached_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_get_cached_content_sync.py new file mode 100644 index 000000000000..2ed52f862be9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_get_cached_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_GetCachedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCachedContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_GetCachedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_list_cached_contents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_list_cached_contents_async.py new file mode 100644 index 000000000000..1641340b03eb --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_list_cached_contents_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCachedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_ListCachedContents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_ListCachedContents_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_list_cached_contents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_list_cached_contents_sync.py new file mode 100644 index 000000000000..48987878ea18 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_list_cached_contents_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCachedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_ListCachedContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_cached_contents(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCachedContentsRequest( + ) + + # Make the request + page_result = client.list_cached_contents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_ListCachedContents_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_update_cached_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_update_cached_content_async.py new file mode 100644 index 000000000000..75ccf93a8333 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_update_cached_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_UpdateCachedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCachedContentRequest( + ) + + # Make the request + response = await client.update_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_UpdateCachedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_update_cached_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_update_cached_content_sync.py new file mode 100644 index 000000000000..8d08f40b1964 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_cache_service_update_cached_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCachedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_CacheService_UpdateCachedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_update_cached_content(): + # Create a client + client = generativelanguage_v1alpha.CacheServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCachedContentRequest( + ) + + # Make the request + response = client.update_cached_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_CacheService_UpdateCachedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_async.py new file mode 100644 index 000000000000..d500fa672e3f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountMessageTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_DiscussService_CountMessageTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_message_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_DiscussService_CountMessageTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_sync.py new file mode 100644 index 000000000000..223eeae8a2b0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountMessageTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_DiscussService_CountMessageTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_count_message_tokens(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.CountMessageTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_message_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_DiscussService_CountMessageTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_generate_message_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_generate_message_async.py new file mode 100644 index 000000000000..a3f95f2e9bc0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_generate_message_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_DiscussService_GenerateMessage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_generate_message(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_message(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_DiscussService_GenerateMessage_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_generate_message_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_generate_message_sync.py new file mode 100644 index 000000000000..1f9247a0e59f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_discuss_service_generate_message_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_DiscussService_GenerateMessage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_generate_message(): + # Create a client + client = generativelanguage_v1alpha.DiscussServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.MessagePrompt() + prompt.messages.content = "content_value" + + request = generativelanguage_v1alpha.GenerateMessageRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_message(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_DiscussService_GenerateMessage_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_create_file_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_create_file_async.py new file mode 100644 index 000000000000..f43d178c8c6b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_create_file_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_CreateFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateFileRequest( + ) + + # Make the request + response = await client.create_file(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_FileService_CreateFile_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_create_file_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_create_file_sync.py new file mode 100644 index 000000000000..c5a52005687f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_create_file_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_CreateFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateFileRequest( + ) + + # Make the request + response = client.create_file(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_FileService_CreateFile_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_delete_file_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_delete_file_async.py new file mode 100644 index 000000000000..43417ec52240 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_delete_file_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_DeleteFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteFileRequest( + name="name_value", + ) + + # Make the request + await client.delete_file(request=request) + + +# [END generativelanguage_v1alpha_generated_FileService_DeleteFile_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_delete_file_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_delete_file_sync.py new file mode 100644 index 000000000000..d82d529013ed --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_delete_file_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_DeleteFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteFileRequest( + name="name_value", + ) + + # Make the request + client.delete_file(request=request) + + +# [END generativelanguage_v1alpha_generated_FileService_DeleteFile_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_get_file_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_get_file_async.py new file mode 100644 index 000000000000..a412847208b9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_get_file_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_GetFile_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetFileRequest( + name="name_value", + ) + + # Make the request + response = await client.get_file(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_FileService_GetFile_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_get_file_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_get_file_sync.py new file mode 100644 index 000000000000..a079c262d8d5 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_get_file_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFile +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_GetFile_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_file(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetFileRequest( + name="name_value", + ) + + # Make the request + response = client.get_file(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_FileService_GetFile_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_list_files_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_list_files_async.py new file mode 100644 index 000000000000..ef2e48665528 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_list_files_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_ListFiles_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_files(): + # Create a client + client = generativelanguage_v1alpha.FileServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListFilesRequest( + ) + + # Make the request + page_result = client.list_files(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_FileService_ListFiles_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_list_files_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_list_files_sync.py new file mode 100644 index 000000000000..959d14b0f484 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_file_service_list_files_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFiles +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_FileService_ListFiles_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_files(): + # Create a client + client = generativelanguage_v1alpha.FileServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListFilesRequest( + ) + + # Make the request + page_result = client.list_files(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_FileService_ListFiles_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_async.py new file mode 100644 index 000000000000..101edd5d7237 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_BatchEmbedContents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1alpha.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = await client.batch_embed_contents(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_BatchEmbedContents_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_sync.py new file mode 100644 index 000000000000..cc778d437a35 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedContents +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_BatchEmbedContents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_batch_embed_contents(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.EmbedContentRequest() + requests.model = "model_value" + + request = generativelanguage_v1alpha.BatchEmbedContentsRequest( + model="model_value", + requests=requests, + ) + + # Make the request + response = client.batch_embed_contents(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_BatchEmbedContents_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_async.py new file mode 100644 index 000000000000..64acc497b4c7 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BidiGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_BidiGenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_bidi_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + setup = generativelanguage_v1alpha.BidiGenerateContentSetup() + setup.model = "model_value" + + request = generativelanguage_v1alpha.BidiGenerateContentClientMessage( + setup=setup, + ) + + # This method expects an iterator which contains + # 'generativelanguage_v1alpha.BidiGenerateContentClientMessage' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.bidi_generate_content(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_BidiGenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_sync.py new file mode 100644 index 000000000000..3327c9b41c86 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BidiGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_BidiGenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_bidi_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + setup = generativelanguage_v1alpha.BidiGenerateContentSetup() + setup.model = "model_value" + + request = generativelanguage_v1alpha.BidiGenerateContentClientMessage( + setup=setup, + ) + + # This method expects an iterator which contains + # 'generativelanguage_v1alpha.BidiGenerateContentClientMessage' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.bidi_generate_content(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_BidiGenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_count_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_count_tokens_async.py new file mode 100644 index 000000000000..bcfa04fdd133 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_count_tokens_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_CountTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_count_tokens(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = await client.count_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_CountTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_count_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_count_tokens_sync.py new file mode 100644 index 000000000000..01fd050b7bcd --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_count_tokens_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_CountTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_count_tokens(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CountTokensRequest( + model="model_value", + ) + + # Make the request + response = client.count_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_CountTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_embed_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_embed_content_async.py new file mode 100644 index 000000000000..3ef00ab390ed --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_embed_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_EmbedContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_embed_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_EmbedContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_embed_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_embed_content_sync.py new file mode 100644 index 000000000000..713e100205ac --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_embed_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_EmbedContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_embed_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedContentRequest( + model="model_value", + ) + + # Make the request + response = client.embed_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_EmbedContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_answer_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_answer_async.py new file mode 100644 index 000000000000..f1c6dac52aa3 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_answer_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAnswer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_GenerateAnswer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_generate_answer(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = await client.generate_answer(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_GenerateAnswer_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_answer_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_answer_sync.py new file mode 100644 index 000000000000..f535aee82342 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_answer_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAnswer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_GenerateAnswer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_generate_answer(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateAnswerRequest( + model="model_value", + answer_style="VERBOSE", + ) + + # Make the request + response = client.generate_answer(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_GenerateAnswer_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_content_async.py new file mode 100644 index 000000000000..dadd6494fc01 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_content_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_GenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = await client.generate_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_GenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_content_sync.py new file mode 100644 index 000000000000..e0f37948b76d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_generate_content_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_GenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + response = client.generate_content(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_GenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_stream_generate_content_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_stream_generate_content_async.py new file mode 100644 index 000000000000..cf3958d6400f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_stream_generate_content_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_StreamGenerateContent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = await client.stream_generate_content(request=request) + + # Handle the response + async for response in stream: + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_StreamGenerateContent_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_stream_generate_content_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_stream_generate_content_sync.py new file mode 100644 index 000000000000..057f8d11ff2e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_generative_service_stream_generate_content_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamGenerateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_GenerativeService_StreamGenerateContent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_stream_generate_content(): + # Create a client + client = generativelanguage_v1alpha.GenerativeServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GenerateContentRequest( + model="model_value", + ) + + # Make the request + stream = client.stream_generate_content(request=request) + + # Handle the response + for response in stream: + print(response) + +# [END generativelanguage_v1alpha_generated_GenerativeService_StreamGenerateContent_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_create_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_create_tuned_model_async.py new file mode 100644 index 000000000000..8b78c9a1fd41 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_create_tuned_model_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_CreateTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateTunedModelRequest( + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_CreateTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_create_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_create_tuned_model_sync.py new file mode 100644 index 000000000000..5242694677d7 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_create_tuned_model_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_CreateTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateTunedModelRequest( + ) + + # Make the request + operation = client.create_tuned_model(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_CreateTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_delete_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_delete_tuned_model_async.py new file mode 100644 index 000000000000..5f154afa887e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_delete_tuned_model_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_DeleteTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + await client.delete_tuned_model(request=request) + + +# [END generativelanguage_v1alpha_generated_ModelService_DeleteTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_delete_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_delete_tuned_model_sync.py new file mode 100644 index 000000000000..cc63534199d0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_delete_tuned_model_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_DeleteTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteTunedModelRequest( + name="name_value", + ) + + # Make the request + client.delete_tuned_model(request=request) + + +# [END generativelanguage_v1alpha_generated_ModelService_DeleteTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_model_async.py new file mode 100644 index 000000000000..2e492bb9af48 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_GetModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_GetModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_model_sync.py new file mode 100644 index 000000000000..f2f357bfc072 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_GetModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_GetModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_tuned_model_async.py new file mode 100644 index 000000000000..2a2cd50e76b1 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_tuned_model_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_GetTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_GetTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_tuned_model_sync.py new file mode 100644 index 000000000000..2d0d781c88d0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_get_tuned_model_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_GetTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetTunedModelRequest( + name="name_value", + ) + + # Make the request + response = client.get_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_GetTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_models_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_models_async.py new file mode 100644 index 000000000000..345322778756 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_ListModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_ListModels_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_models_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_models_sync.py new file mode 100644 index 000000000000..9a8e06503616 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_ListModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListModelsRequest( + ) + + # Make the request + page_result = client.list_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_ListModels_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_tuned_models_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_tuned_models_async.py new file mode 100644 index 000000000000..61ae3811566a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_tuned_models_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTunedModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_ListTunedModels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_ListTunedModels_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_tuned_models_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_tuned_models_sync.py new file mode 100644 index 000000000000..70cda88c1414 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_list_tuned_models_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTunedModels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_ListTunedModels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_tuned_models(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListTunedModelsRequest( + ) + + # Make the request + page_result = client.list_tuned_models(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_ListTunedModels_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_update_tuned_model_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_update_tuned_model_async.py new file mode 100644 index 000000000000..9fa52de8a1b0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_update_tuned_model_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_UpdateTunedModel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateTunedModelRequest( + ) + + # Make the request + response = await client.update_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_UpdateTunedModel_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_update_tuned_model_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_update_tuned_model_sync.py new file mode 100644 index 000000000000..c7092fd8ca61 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_model_service_update_tuned_model_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTunedModel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_ModelService_UpdateTunedModel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_update_tuned_model(): + # Create a client + client = generativelanguage_v1alpha.ModelServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateTunedModelRequest( + ) + + # Make the request + response = client.update_tuned_model(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_ModelService_UpdateTunedModel_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_create_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_create_permission_async.py new file mode 100644 index 000000000000..80e2d7160063 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_create_permission_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_CreatePermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_CreatePermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_create_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_create_permission_sync.py new file mode 100644 index 000000000000..587f8302cee5 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_create_permission_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_CreatePermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreatePermissionRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_CreatePermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_delete_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_delete_permission_async.py new file mode 100644 index 000000000000..ce903e5cf86e --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_delete_permission_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_DeletePermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + await client.delete_permission(request=request) + + +# [END generativelanguage_v1alpha_generated_PermissionService_DeletePermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_delete_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_delete_permission_sync.py new file mode 100644 index 000000000000..c1e4b4080053 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_delete_permission_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_DeletePermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeletePermissionRequest( + name="name_value", + ) + + # Make the request + client.delete_permission(request=request) + + +# [END generativelanguage_v1alpha_generated_PermissionService_DeletePermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_get_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_get_permission_async.py new file mode 100644 index 000000000000..8b5ff9e2f79d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_get_permission_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_GetPermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_GetPermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_get_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_get_permission_sync.py new file mode 100644 index 000000000000..2fa4ed614567 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_get_permission_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_GetPermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetPermissionRequest( + name="name_value", + ) + + # Make the request + response = client.get_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_GetPermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_list_permissions_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_list_permissions_async.py new file mode 100644 index 000000000000..01ef5734f136 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_list_permissions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_ListPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_permissions(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_ListPermissions_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_list_permissions_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_list_permissions_sync.py new file mode 100644 index 000000000000..3b52a1da909b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_list_permissions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_ListPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_permissions(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListPermissionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_permissions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_ListPermissions_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_transfer_ownership_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_transfer_ownership_async.py new file mode 100644 index 000000000000..968f6c2023ef --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_transfer_ownership_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TransferOwnership +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_TransferOwnership_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = await client.transfer_ownership(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_TransferOwnership_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_transfer_ownership_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_transfer_ownership_sync.py new file mode 100644 index 000000000000..d2b3e909d6c8 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_transfer_ownership_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TransferOwnership +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_TransferOwnership_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_transfer_ownership(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Make the request + response = client.transfer_ownership(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_TransferOwnership_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_update_permission_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_update_permission_async.py new file mode 100644 index 000000000000..e956061d1c31 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_update_permission_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_UpdatePermission_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_update_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdatePermissionRequest( + ) + + # Make the request + response = await client.update_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_UpdatePermission_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_update_permission_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_update_permission_sync.py new file mode 100644 index 000000000000..1e5fbe25f536 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_permission_service_update_permission_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePermission +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PermissionService_UpdatePermission_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_update_permission(): + # Create a client + client = generativelanguage_v1alpha.PermissionServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdatePermissionRequest( + ) + + # Make the request + response = client.update_permission(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PermissionService_UpdatePermission_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_prediction_service_predict_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_prediction_service_predict_async.py new file mode 100644 index 000000000000..2f3022b0a953 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_prediction_service_predict_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PredictionService_Predict_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_predict(): + # Create a client + client = generativelanguage_v1alpha.PredictionServiceAsyncClient() + + # Initialize request argument(s) + instances = generativelanguage_v1alpha.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1alpha.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = await client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PredictionService_Predict_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_prediction_service_predict_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_prediction_service_predict_sync.py new file mode 100644 index 000000000000..800ae88ad788 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_prediction_service_predict_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Predict +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_PredictionService_Predict_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_predict(): + # Create a client + client = generativelanguage_v1alpha.PredictionServiceClient() + + # Initialize request argument(s) + instances = generativelanguage_v1alpha.Value() + instances.null_value = "NULL_VALUE" + + request = generativelanguage_v1alpha.PredictRequest( + model="model_value", + instances=instances, + ) + + # Make the request + response = client.predict(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_PredictionService_Predict_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_async.py new file mode 100644 index 000000000000..295179010026 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_BatchCreateChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_create_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_BatchCreateChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_sync.py new file mode 100644 index 000000000000..6155e653e118 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchCreateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_BatchCreateChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_batch_create_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.CreateChunkRequest() + requests.parent = "parent_value" + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchCreateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_create_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_BatchCreateChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_async.py new file mode 100644 index 000000000000..a83d4749248b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_BatchDeleteChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1alpha.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + await client.batch_delete_chunks(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_BatchDeleteChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_sync.py new file mode 100644 index 000000000000..6bc1526c61a5 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchDeleteChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_BatchDeleteChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_batch_delete_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.DeleteChunkRequest() + requests.name = "name_value" + + request = generativelanguage_v1alpha.BatchDeleteChunksRequest( + requests=requests, + ) + + # Make the request + client.batch_delete_chunks(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_BatchDeleteChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_async.py new file mode 100644 index 000000000000..bbf4fdb530e9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_BatchUpdateChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = await client.batch_update_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_BatchUpdateChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_sync.py new file mode 100644 index 000000000000..2502a6617d1f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchUpdateChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_BatchUpdateChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_batch_update_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + requests = generativelanguage_v1alpha.UpdateChunkRequest() + requests.chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.BatchUpdateChunksRequest( + requests=requests, + ) + + # Make the request + response = client.batch_update_chunks(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_BatchUpdateChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_chunk_async.py new file mode 100644 index 000000000000..ffaa5adbf680 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_chunk_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_CreateChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = await client.create_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_CreateChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_chunk_sync.py new file mode 100644 index 000000000000..da7f5dd66659 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_chunk_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_CreateChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.CreateChunkRequest( + parent="parent_value", + chunk=chunk, + ) + + # Make the request + response = client.create_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_CreateChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_corpus_async.py new file mode 100644 index 000000000000..4807923df231 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_corpus_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_CreateCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCorpusRequest( + ) + + # Make the request + response = await client.create_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_CreateCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_corpus_sync.py new file mode 100644 index 000000000000..ffbf03f3980c --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_corpus_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_CreateCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateCorpusRequest( + ) + + # Make the request + response = client.create_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_CreateCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_document_async.py new file mode 100644 index 000000000000..27690e826e2c --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_CreateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_create_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_CreateDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_document_sync.py new file mode 100644 index 000000000000..8b848860c70d --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_create_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_CreateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_create_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.CreateDocumentRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_CreateDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_chunk_async.py new file mode 100644 index 000000000000..67aa1505ea7f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_chunk_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_DeleteChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + await client.delete_chunk(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_DeleteChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_chunk_sync.py new file mode 100644 index 000000000000..3c3b40963e51 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_chunk_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_DeleteChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteChunkRequest( + name="name_value", + ) + + # Make the request + client.delete_chunk(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_DeleteChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_corpus_async.py new file mode 100644 index 000000000000..86b2d27ac527 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_corpus_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_DeleteCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + await client.delete_corpus(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_DeleteCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_corpus_sync.py new file mode 100644 index 000000000000..958c31c514a6 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_corpus_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_DeleteCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteCorpusRequest( + name="name_value", + ) + + # Make the request + client.delete_corpus(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_DeleteCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_document_async.py new file mode 100644 index 000000000000..de5657b962c0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_document_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_DeleteDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_delete_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + await client.delete_document(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_DeleteDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_document_sync.py new file mode 100644 index 000000000000..3f55752e5ccd --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_delete_document_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_DeleteDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_delete_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.DeleteDocumentRequest( + name="name_value", + ) + + # Make the request + client.delete_document(request=request) + + +# [END generativelanguage_v1alpha_generated_RetrieverService_DeleteDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_chunk_async.py new file mode 100644 index 000000000000..70b201278e7a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_chunk_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_GetChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_GetChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_chunk_sync.py new file mode 100644 index 000000000000..dbd3db21212a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_chunk_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_GetChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetChunkRequest( + name="name_value", + ) + + # Make the request + response = client.get_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_GetChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_corpus_async.py new file mode 100644 index 000000000000..5dfb52b613d5 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_corpus_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_GetCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = await client.get_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_GetCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_corpus_sync.py new file mode 100644 index 000000000000..6fe326141045 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_corpus_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_GetCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetCorpusRequest( + name="name_value", + ) + + # Make the request + response = client.get_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_GetCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_document_async.py new file mode 100644 index 000000000000..c100b61c41cf --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_document_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_GetDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_get_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_GetDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_document_sync.py new file mode 100644 index 000000000000..1bca4cad2d00 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_get_document_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_GetDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_get_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.GetDocumentRequest( + name="name_value", + ) + + # Make the request + response = client.get_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_GetDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_chunks_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_chunks_async.py new file mode 100644 index 000000000000..817ee4bf01d1 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_chunks_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_ListChunks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_ListChunks_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_chunks_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_chunks_sync.py new file mode 100644 index 000000000000..a30732170055 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_chunks_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChunks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_ListChunks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_chunks(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListChunksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_chunks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_ListChunks_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_corpora_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_corpora_async.py new file mode 100644 index 000000000000..2a37061a4636 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_corpora_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCorpora +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_ListCorpora_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_corpora(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_ListCorpora_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_corpora_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_corpora_sync.py new file mode 100644 index 000000000000..b33c84c3e88f --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_corpora_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListCorpora +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_ListCorpora_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_corpora(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListCorporaRequest( + ) + + # Make the request + page_result = client.list_corpora(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_ListCorpora_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_documents_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_documents_async.py new file mode 100644 index 000000000000..7cff65392bf8 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_documents_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_ListDocuments_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_list_documents(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_ListDocuments_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_documents_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_documents_sync.py new file mode 100644 index 000000000000..934bd69738a4 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_list_documents_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDocuments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_ListDocuments_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_list_documents(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.ListDocumentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_documents(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_ListDocuments_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_corpus_async.py new file mode 100644 index 000000000000..ad55c5da5caf --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_corpus_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_QueryCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_query_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_QueryCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_corpus_sync.py new file mode 100644 index 000000000000..52f35ca4ad01 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_corpus_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_QueryCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_query_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_QueryCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_document_async.py new file mode 100644 index 000000000000..908be016cc95 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_document_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_QueryDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_query_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = await client.query_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_QueryDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_document_sync.py new file mode 100644 index 000000000000..5d3b84398329 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_query_document_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_QueryDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_query_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Make the request + response = client.query_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_QueryDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_chunk_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_chunk_async.py new file mode 100644 index 000000000000..62395c93331a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_chunk_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_UpdateChunk_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_update_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = await client.update_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_UpdateChunk_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_chunk_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_chunk_sync.py new file mode 100644 index 000000000000..c542e8e2e302 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_chunk_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChunk +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_UpdateChunk_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_update_chunk(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + chunk = generativelanguage_v1alpha.Chunk() + chunk.data.string_value = "string_value_value" + + request = generativelanguage_v1alpha.UpdateChunkRequest( + chunk=chunk, + ) + + # Make the request + response = client.update_chunk(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_UpdateChunk_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_corpus_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_corpus_async.py new file mode 100644 index 000000000000..6b29f4119273 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_corpus_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_UpdateCorpus_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_update_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCorpusRequest( + ) + + # Make the request + response = await client.update_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_UpdateCorpus_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_corpus_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_corpus_sync.py new file mode 100644 index 000000000000..a364eff802db --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_corpus_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCorpus +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_UpdateCorpus_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_update_corpus(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateCorpusRequest( + ) + + # Make the request + response = client.update_corpus(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_UpdateCorpus_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_document_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_document_async.py new file mode 100644 index 000000000000..a532c772f723 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_document_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_UpdateDocument_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_update_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateDocumentRequest( + ) + + # Make the request + response = await client.update_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_UpdateDocument_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_document_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_document_sync.py new file mode 100644 index 000000000000..0037a5f02359 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_retriever_service_update_document_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDocument +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_RetrieverService_UpdateDocument_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_update_document(): + # Create a client + client = generativelanguage_v1alpha.RetrieverServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.UpdateDocumentRequest( + ) + + # Make the request + response = client.update_document(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_RetrieverService_UpdateDocument_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_batch_embed_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_batch_embed_text_async.py new file mode 100644 index 000000000000..3a27fec77706 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_batch_embed_text_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_BatchEmbedText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.batch_embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_BatchEmbedText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_batch_embed_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_batch_embed_text_sync.py new file mode 100644 index 000000000000..c01f0f7057e9 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_batch_embed_text_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchEmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_BatchEmbedText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_batch_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.BatchEmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.batch_embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_BatchEmbedText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_count_text_tokens_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_count_text_tokens_async.py new file mode 100644 index 000000000000..499086b4372b --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_count_text_tokens_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTextTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_CountTextTokens_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.count_text_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_CountTextTokens_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_count_text_tokens_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_count_text_tokens_sync.py new file mode 100644 index 000000000000..933c2887e538 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_count_text_tokens_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CountTextTokens +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_CountTextTokens_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_count_text_tokens(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.CountTextTokensRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.count_text_tokens(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_CountTextTokens_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_embed_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_embed_text_async.py new file mode 100644 index 000000000000..2e72d0bb0d20 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_embed_text_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_EmbedText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = await client.embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_EmbedText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_embed_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_embed_text_sync.py new file mode 100644 index 000000000000..fe370d4fa9e0 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_embed_text_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for EmbedText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_EmbedText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_embed_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + request = generativelanguage_v1alpha.EmbedTextRequest( + model="model_value", + ) + + # Make the request + response = client.embed_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_EmbedText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_generate_text_async.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_generate_text_async.py new file mode 100644 index 000000000000..f7d6e946807a --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_generate_text_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_GenerateText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +async def sample_generate_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceAsyncClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = await client.generate_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_GenerateText_async] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_generate_text_sync.py b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_generate_text_sync.py new file mode 100644 index 000000000000..db71efe2b5cd --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/generativelanguage_v1alpha_generated_text_service_generate_text_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ai-generativelanguage + + +# [START generativelanguage_v1alpha_generated_TextService_GenerateText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ai import generativelanguage_v1alpha + + +def sample_generate_text(): + # Create a client + client = generativelanguage_v1alpha.TextServiceClient() + + # Initialize request argument(s) + prompt = generativelanguage_v1alpha.TextPrompt() + prompt.text = "text_value" + + request = generativelanguage_v1alpha.GenerateTextRequest( + model="model_value", + prompt=prompt, + ) + + # Make the request + response = client.generate_text(request=request) + + # Handle the response + print(response) + +# [END generativelanguage_v1alpha_generated_TextService_GenerateText_sync] diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json index 32123624e9e5..d7cdf74402a6 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.14" + "version": "0.6.16" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1alpha.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1alpha.json new file mode 100644 index 000000000000..a22b786bcc27 --- /dev/null +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1alpha.json @@ -0,0 +1,9183 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.ai.generativelanguage.v1alpha", + "version": "v1alpha" + } + ], + "language": "PYTHON", + "name": "google-ai-generativelanguage", + "version": "0.6.16" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.create_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.CreateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "CreateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1alpha.types.CachedContent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CachedContent", + "shortName": "create_cached_content" + }, + "description": "Sample for CreateCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_create_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_CreateCachedContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_create_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient.create_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.CreateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "CreateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1alpha.types.CachedContent" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CachedContent", + "shortName": "create_cached_content" + }, + "description": "Sample for CreateCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_create_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_CreateCachedContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_create_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.delete_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.DeleteCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "DeleteCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_cached_content" + }, + "description": "Sample for DeleteCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_delete_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_DeleteCachedContent_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_delete_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient.delete_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.DeleteCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "DeleteCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_cached_content" + }, + "description": "Sample for DeleteCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_delete_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_DeleteCachedContent_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_delete_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.get_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.GetCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "GetCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CachedContent", + "shortName": "get_cached_content" + }, + "description": "Sample for GetCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_get_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_GetCachedContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_get_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient.get_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.GetCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "GetCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetCachedContentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CachedContent", + "shortName": "get_cached_content" + }, + "description": "Sample for GetCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_get_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_GetCachedContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_get_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.list_cached_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.ListCachedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "ListCachedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListCachedContentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.cache_service.pagers.ListCachedContentsAsyncPager", + "shortName": "list_cached_contents" + }, + "description": "Sample for ListCachedContents", + "file": "generativelanguage_v1alpha_generated_cache_service_list_cached_contents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_ListCachedContents_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_list_cached_contents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient.list_cached_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.ListCachedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "ListCachedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListCachedContentsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.cache_service.pagers.ListCachedContentsPager", + "shortName": "list_cached_contents" + }, + "description": "Sample for ListCachedContents", + "file": "generativelanguage_v1alpha_generated_cache_service_list_cached_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_ListCachedContents_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_list_cached_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient", + "shortName": "CacheServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceAsyncClient.update_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.UpdateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "UpdateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1alpha.types.CachedContent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CachedContent", + "shortName": "update_cached_content" + }, + "description": "Sample for UpdateCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_update_cached_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_UpdateCachedContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_update_cached_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient", + "shortName": "CacheServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.CacheServiceClient.update_cached_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService.UpdateCachedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.CacheService", + "shortName": "CacheService" + }, + "shortName": "UpdateCachedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateCachedContentRequest" + }, + { + "name": "cached_content", + "type": "google.ai.generativelanguage_v1alpha.types.CachedContent" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CachedContent", + "shortName": "update_cached_content" + }, + "description": "Sample for UpdateCachedContent", + "file": "generativelanguage_v1alpha_generated_cache_service_update_cached_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_CacheService_UpdateCachedContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_cache_service_update_cached_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient", + "shortName": "DiscussServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient.count_message_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService.CountMessageTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "CountMessageTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CountMessageTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.MessagePrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CountMessageTokensResponse", + "shortName": "count_message_tokens" + }, + "description": "Sample for CountMessageTokens", + "file": "generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_DiscussService_CountMessageTokens_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceClient", + "shortName": "DiscussServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceClient.count_message_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService.CountMessageTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "CountMessageTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CountMessageTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.MessagePrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CountMessageTokensResponse", + "shortName": "count_message_tokens" + }, + "description": "Sample for CountMessageTokens", + "file": "generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_DiscussService_CountMessageTokens_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_discuss_service_count_message_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient", + "shortName": "DiscussServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceAsyncClient.generate_message", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService.GenerateMessage", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "GenerateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateMessageRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.MessagePrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateMessageResponse", + "shortName": "generate_message" + }, + "description": "Sample for GenerateMessage", + "file": "generativelanguage_v1alpha_generated_discuss_service_generate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_DiscussService_GenerateMessage_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_discuss_service_generate_message_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceClient", + "shortName": "DiscussServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.DiscussServiceClient.generate_message", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService.GenerateMessage", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.DiscussService", + "shortName": "DiscussService" + }, + "shortName": "GenerateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateMessageRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.MessagePrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateMessageResponse", + "shortName": "generate_message" + }, + "description": "Sample for GenerateMessage", + "file": "generativelanguage_v1alpha_generated_discuss_service_generate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_DiscussService_GenerateMessage_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_discuss_service_generate_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient", + "shortName": "FileServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient.create_file", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.CreateFile", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "CreateFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateFileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CreateFileResponse", + "shortName": "create_file" + }, + "description": "Sample for CreateFile", + "file": "generativelanguage_v1alpha_generated_file_service_create_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_CreateFile_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_create_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient", + "shortName": "FileServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient.create_file", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.CreateFile", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "CreateFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateFileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CreateFileResponse", + "shortName": "create_file" + }, + "description": "Sample for CreateFile", + "file": "generativelanguage_v1alpha_generated_file_service_create_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_CreateFile_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_create_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient", + "shortName": "FileServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient.delete_file", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.DeleteFile", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "DeleteFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_file" + }, + "description": "Sample for DeleteFile", + "file": "generativelanguage_v1alpha_generated_file_service_delete_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_DeleteFile_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_delete_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient", + "shortName": "FileServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient.delete_file", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.DeleteFile", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "DeleteFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_file" + }, + "description": "Sample for DeleteFile", + "file": "generativelanguage_v1alpha_generated_file_service_delete_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_DeleteFile_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_delete_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient", + "shortName": "FileServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient.get_file", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.GetFile", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "GetFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.File", + "shortName": "get_file" + }, + "description": "Sample for GetFile", + "file": "generativelanguage_v1alpha_generated_file_service_get_file_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_GetFile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_get_file_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient", + "shortName": "FileServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient.get_file", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.GetFile", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "GetFile" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetFileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.File", + "shortName": "get_file" + }, + "description": "Sample for GetFile", + "file": "generativelanguage_v1alpha_generated_file_service_get_file_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_GetFile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_get_file_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient", + "shortName": "FileServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceAsyncClient.list_files", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.ListFiles", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "ListFiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListFilesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.file_service.pagers.ListFilesAsyncPager", + "shortName": "list_files" + }, + "description": "Sample for ListFiles", + "file": "generativelanguage_v1alpha_generated_file_service_list_files_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_ListFiles_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_list_files_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient", + "shortName": "FileServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.FileServiceClient.list_files", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService.ListFiles", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.FileService", + "shortName": "FileService" + }, + "shortName": "ListFiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListFilesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.file_service.pagers.ListFilesPager", + "shortName": "list_files" + }, + "description": "Sample for ListFiles", + "file": "generativelanguage_v1alpha_generated_file_service_list_files_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_FileService_ListFiles_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_file_service_list_files_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.batch_embed_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.BatchEmbedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BatchEmbedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsResponse", + "shortName": "batch_embed_contents" + }, + "description": "Sample for BatchEmbedContents", + "file": "generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_BatchEmbedContents_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.batch_embed_contents", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.BatchEmbedContents", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BatchEmbedContents" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.EmbedContentRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchEmbedContentsResponse", + "shortName": "batch_embed_contents" + }, + "description": "Sample for BatchEmbedContents", + "file": "generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_BatchEmbedContents_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_batch_embed_contents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.bidi_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.BidiGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BidiGenerateContent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentClientMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentServerMessage]", + "shortName": "bidi_generate_content" + }, + "description": "Sample for BidiGenerateContent", + "file": "generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_BidiGenerateContent_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.bidi_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.BidiGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "BidiGenerateContent" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentClientMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1alpha.types.BidiGenerateContentServerMessage]", + "shortName": "bidi_generate_content" + }, + "description": "Sample for BidiGenerateContent", + "file": "generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_BidiGenerateContent_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 58, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 59, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_bidi_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.count_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.CountTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "CountTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CountTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CountTokensResponse", + "shortName": "count_tokens" + }, + "description": "Sample for CountTokens", + "file": "generativelanguage_v1alpha_generated_generative_service_count_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_CountTokens_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_count_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.count_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.CountTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "CountTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CountTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CountTokensResponse", + "shortName": "count_tokens" + }, + "description": "Sample for CountTokens", + "file": "generativelanguage_v1alpha_generated_generative_service_count_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_CountTokens_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_count_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.embed_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.EmbedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "EmbedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.EmbedContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "content", + "type": "google.ai.generativelanguage_v1alpha.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.EmbedContentResponse", + "shortName": "embed_content" + }, + "description": "Sample for EmbedContent", + "file": "generativelanguage_v1alpha_generated_generative_service_embed_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_EmbedContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_embed_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.embed_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.EmbedContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "EmbedContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.EmbedContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "content", + "type": "google.ai.generativelanguage_v1alpha.types.Content" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.EmbedContentResponse", + "shortName": "embed_content" + }, + "description": "Sample for EmbedContent", + "file": "generativelanguage_v1alpha_generated_generative_service_embed_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_EmbedContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_embed_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.generate_answer", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.GenerateAnswer", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateAnswer" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "safety_settings", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]" + }, + { + "name": "answer_style", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest.AnswerStyle" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateAnswerResponse", + "shortName": "generate_answer" + }, + "description": "Sample for GenerateAnswer", + "file": "generativelanguage_v1alpha_generated_generative_service_generate_answer_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_GenerateAnswer_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_generate_answer_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.generate_answer", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.GenerateAnswer", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateAnswer" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "safety_settings", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.SafetySetting]" + }, + { + "name": "answer_style", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateAnswerRequest.AnswerStyle" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateAnswerResponse", + "shortName": "generate_answer" + }, + "description": "Sample for GenerateAnswer", + "file": "generativelanguage_v1alpha_generated_generative_service_generate_answer_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_GenerateAnswer_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_generate_answer_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.GenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateContentResponse", + "shortName": "generate_content" + }, + "description": "Sample for GenerateContent", + "file": "generativelanguage_v1alpha_generated_generative_service_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_GenerateContent_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.GenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "GenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateContentResponse", + "shortName": "generate_content" + }, + "description": "Sample for GenerateContent", + "file": "generativelanguage_v1alpha_generated_generative_service_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_GenerateContent_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient", + "shortName": "GenerativeServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceAsyncClient.stream_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.StreamGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "StreamGenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1alpha.types.GenerateContentResponse]", + "shortName": "stream_generate_content" + }, + "description": "Sample for StreamGenerateContent", + "file": "generativelanguage_v1alpha_generated_generative_service_stream_generate_content_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_StreamGenerateContent_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_stream_generate_content_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient", + "shortName": "GenerativeServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.GenerativeServiceClient.stream_generate_content", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService.StreamGenerateContent", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.GenerativeService", + "shortName": "GenerativeService" + }, + "shortName": "StreamGenerateContent" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateContentRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "contents", + "type": "MutableSequence[google.ai.generativelanguage_v1alpha.types.Content]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.ai.generativelanguage_v1alpha.types.GenerateContentResponse]", + "shortName": "stream_generate_content" + }, + "description": "Sample for StreamGenerateContent", + "file": "generativelanguage_v1alpha_generated_generative_service_stream_generate_content_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_GenerativeService_StreamGenerateContent_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_generative_service_stream_generate_content_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.create_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.CreateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "CreateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1alpha.types.TunedModel" + }, + { + "name": "tuned_model_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_tuned_model" + }, + "description": "Sample for CreateTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_create_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_CreateTunedModel_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_create_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.create_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.CreateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "CreateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1alpha.types.TunedModel" + }, + { + "name": "tuned_model_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_tuned_model" + }, + "description": "Sample for CreateTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_create_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_CreateTunedModel_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_create_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.delete_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.DeleteTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "DeleteTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_tuned_model" + }, + "description": "Sample for DeleteTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_delete_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_DeleteTunedModel_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_delete_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.delete_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.DeleteTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "DeleteTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_tuned_model" + }, + "description": "Sample for DeleteTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_delete_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_DeleteTunedModel_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_delete_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1alpha_generated_model_service_get_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_GetModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_get_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.get_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.GetModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Model", + "shortName": "get_model" + }, + "description": "Sample for GetModel", + "file": "generativelanguage_v1alpha_generated_model_service_get_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_GetModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_get_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.get_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.GetTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.TunedModel", + "shortName": "get_tuned_model" + }, + "description": "Sample for GetTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_get_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_GetTunedModel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_get_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.get_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.GetTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "GetTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetTunedModelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.TunedModel", + "shortName": "get_tuned_model" + }, + "description": "Sample for GetTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_get_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_GetTunedModel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_get_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListModelsAsyncPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1alpha_generated_model_service_list_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_ListModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_list_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.list_models", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.ListModels", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListModelsPager", + "shortName": "list_models" + }, + "description": "Sample for ListModels", + "file": "generativelanguage_v1alpha_generated_model_service_list_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_ListModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_list_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.list_tuned_models", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.ListTunedModels", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListTunedModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListTunedModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListTunedModelsAsyncPager", + "shortName": "list_tuned_models" + }, + "description": "Sample for ListTunedModels", + "file": "generativelanguage_v1alpha_generated_model_service_list_tuned_models_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_ListTunedModels_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_list_tuned_models_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.list_tuned_models", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.ListTunedModels", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "ListTunedModels" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListTunedModelsRequest" + }, + { + "name": "page_size", + "type": "int" + }, + { + "name": "page_token", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.model_service.pagers.ListTunedModelsPager", + "shortName": "list_tuned_models" + }, + "description": "Sample for ListTunedModels", + "file": "generativelanguage_v1alpha_generated_model_service_list_tuned_models_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_ListTunedModels_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_list_tuned_models_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient", + "shortName": "ModelServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceAsyncClient.update_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.UpdateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "UpdateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1alpha.types.TunedModel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.TunedModel", + "shortName": "update_tuned_model" + }, + "description": "Sample for UpdateTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_update_tuned_model_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_UpdateTunedModel_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_update_tuned_model_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient", + "shortName": "ModelServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.ModelServiceClient.update_tuned_model", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService.UpdateTunedModel", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.ModelService", + "shortName": "ModelService" + }, + "shortName": "UpdateTunedModel" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateTunedModelRequest" + }, + { + "name": "tuned_model", + "type": "google.ai.generativelanguage_v1alpha.types.TunedModel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.TunedModel", + "shortName": "update_tuned_model" + }, + "description": "Sample for UpdateTunedModel", + "file": "generativelanguage_v1alpha_generated_model_service_update_tuned_model_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_ModelService_UpdateTunedModel_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_model_service_update_tuned_model_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.create_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.CreatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "CreatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreatePermissionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1alpha.types.Permission" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Permission", + "shortName": "create_permission" + }, + "description": "Sample for CreatePermission", + "file": "generativelanguage_v1alpha_generated_permission_service_create_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_CreatePermission_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_create_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient.create_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.CreatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "CreatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreatePermissionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1alpha.types.Permission" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Permission", + "shortName": "create_permission" + }, + "description": "Sample for CreatePermission", + "file": "generativelanguage_v1alpha_generated_permission_service_create_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_CreatePermission_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_create_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.delete_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.DeletePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "DeletePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeletePermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_permission" + }, + "description": "Sample for DeletePermission", + "file": "generativelanguage_v1alpha_generated_permission_service_delete_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_DeletePermission_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_delete_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient.delete_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.DeletePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "DeletePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeletePermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_permission" + }, + "description": "Sample for DeletePermission", + "file": "generativelanguage_v1alpha_generated_permission_service_delete_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_DeletePermission_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_delete_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.get_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.GetPermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "GetPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Permission", + "shortName": "get_permission" + }, + "description": "Sample for GetPermission", + "file": "generativelanguage_v1alpha_generated_permission_service_get_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_GetPermission_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_get_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient.get_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.GetPermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "GetPermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetPermissionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Permission", + "shortName": "get_permission" + }, + "description": "Sample for GetPermission", + "file": "generativelanguage_v1alpha_generated_permission_service_get_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_GetPermission_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_get_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.list_permissions", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.ListPermissions", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "ListPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListPermissionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.permission_service.pagers.ListPermissionsAsyncPager", + "shortName": "list_permissions" + }, + "description": "Sample for ListPermissions", + "file": "generativelanguage_v1alpha_generated_permission_service_list_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_ListPermissions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_list_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient.list_permissions", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.ListPermissions", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "ListPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListPermissionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.permission_service.pagers.ListPermissionsPager", + "shortName": "list_permissions" + }, + "description": "Sample for ListPermissions", + "file": "generativelanguage_v1alpha_generated_permission_service_list_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_ListPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_list_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.transfer_ownership", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.TransferOwnership", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "TransferOwnership" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.TransferOwnershipRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.TransferOwnershipResponse", + "shortName": "transfer_ownership" + }, + "description": "Sample for TransferOwnership", + "file": "generativelanguage_v1alpha_generated_permission_service_transfer_ownership_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_TransferOwnership_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_transfer_ownership_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient.transfer_ownership", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.TransferOwnership", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "TransferOwnership" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.TransferOwnershipRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.TransferOwnershipResponse", + "shortName": "transfer_ownership" + }, + "description": "Sample for TransferOwnership", + "file": "generativelanguage_v1alpha_generated_permission_service_transfer_ownership_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_TransferOwnership_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_transfer_ownership_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient", + "shortName": "PermissionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceAsyncClient.update_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.UpdatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "UpdatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdatePermissionRequest" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1alpha.types.Permission" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Permission", + "shortName": "update_permission" + }, + "description": "Sample for UpdatePermission", + "file": "generativelanguage_v1alpha_generated_permission_service_update_permission_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_UpdatePermission_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_update_permission_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient", + "shortName": "PermissionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PermissionServiceClient.update_permission", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService.UpdatePermission", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PermissionService", + "shortName": "PermissionService" + }, + "shortName": "UpdatePermission" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdatePermissionRequest" + }, + { + "name": "permission", + "type": "google.ai.generativelanguage_v1alpha.types.Permission" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Permission", + "shortName": "update_permission" + }, + "description": "Sample for UpdatePermission", + "file": "generativelanguage_v1alpha_generated_permission_service_update_permission_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PermissionService_UpdatePermission_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_permission_service_update_permission_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PredictionServiceAsyncClient", + "shortName": "PredictionServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PredictionServiceAsyncClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1alpha_generated_prediction_service_predict_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PredictionService_Predict_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_prediction_service_predict_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.PredictionServiceClient", + "shortName": "PredictionServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.PredictionServiceClient.predict", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.PredictionService.Predict", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.PredictionService", + "shortName": "PredictionService" + }, + "shortName": "Predict" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.PredictRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "instances", + "type": "MutableSequence[google.protobuf.struct_pb2.Value]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.PredictResponse", + "shortName": "predict" + }, + "description": "Sample for Predict", + "file": "generativelanguage_v1alpha_generated_prediction_service_predict_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_PredictionService_Predict_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_prediction_service_predict_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.batch_create_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.BatchCreateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchCreateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchCreateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchCreateChunksResponse", + "shortName": "batch_create_chunks" + }, + "description": "Sample for BatchCreateChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_BatchCreateChunks_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.batch_create_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.BatchCreateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchCreateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchCreateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchCreateChunksResponse", + "shortName": "batch_create_chunks" + }, + "description": "Sample for BatchCreateChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_BatchCreateChunks_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_batch_create_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.batch_delete_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.BatchDeleteChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchDeleteChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchDeleteChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "batch_delete_chunks" + }, + "description": "Sample for BatchDeleteChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_BatchDeleteChunks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.batch_delete_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.BatchDeleteChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchDeleteChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchDeleteChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "batch_delete_chunks" + }, + "description": "Sample for BatchDeleteChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_BatchDeleteChunks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_batch_delete_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.batch_update_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.BatchUpdateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchUpdateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksResponse", + "shortName": "batch_update_chunks" + }, + "description": "Sample for BatchUpdateChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_BatchUpdateChunks_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.batch_update_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.BatchUpdateChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "BatchUpdateChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchUpdateChunksResponse", + "shortName": "batch_update_chunks" + }, + "description": "Sample for BatchUpdateChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_BatchUpdateChunks_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_batch_update_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.create_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.CreateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateChunkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1alpha.types.Chunk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Chunk", + "shortName": "create_chunk" + }, + "description": "Sample for CreateChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_create_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_CreateChunk_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_create_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.create_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.CreateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateChunkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1alpha.types.Chunk" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Chunk", + "shortName": "create_chunk" + }, + "description": "Sample for CreateChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_create_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_CreateChunk_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_create_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.create_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.CreateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1alpha.types.Corpus" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Corpus", + "shortName": "create_corpus" + }, + "description": "Sample for CreateCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_create_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_CreateCorpus_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_create_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.create_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.CreateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1alpha.types.Corpus" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Corpus", + "shortName": "create_corpus" + }, + "description": "Sample for CreateCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_create_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_CreateCorpus_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_create_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.create_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.CreateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1alpha.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_create_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_CreateDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_create_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.create_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.CreateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "CreateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CreateDocumentRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1alpha.types.Document" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Document", + "shortName": "create_document" + }, + "description": "Sample for CreateDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_create_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_CreateDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_create_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.delete_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.DeleteChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_chunk" + }, + "description": "Sample for DeleteChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_delete_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_DeleteChunk_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_delete_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.delete_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.DeleteChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_chunk" + }, + "description": "Sample for DeleteChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_delete_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_DeleteChunk_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_delete_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.delete_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.DeleteCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_corpus" + }, + "description": "Sample for DeleteCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_delete_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_DeleteCorpus_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_delete_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.delete_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.DeleteCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_corpus" + }, + "description": "Sample for DeleteCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_delete_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_DeleteCorpus_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_delete_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.delete_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.DeleteDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_delete_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_DeleteDocument_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_delete_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.delete_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.DeleteDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "DeleteDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.DeleteDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_document" + }, + "description": "Sample for DeleteDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_delete_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_DeleteDocument_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_delete_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.get_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.GetChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Chunk", + "shortName": "get_chunk" + }, + "description": "Sample for GetChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_get_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_GetChunk_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_get_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.get_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.GetChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetChunkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Chunk", + "shortName": "get_chunk" + }, + "description": "Sample for GetChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_get_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_GetChunk_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_get_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.get_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.GetCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Corpus", + "shortName": "get_corpus" + }, + "description": "Sample for GetCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_get_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_GetCorpus_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_get_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.get_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.GetCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetCorpusRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Corpus", + "shortName": "get_corpus" + }, + "description": "Sample for GetCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_get_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_GetCorpus_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_get_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.get_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.GetDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_get_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_GetDocument_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_get_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.get_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.GetDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "GetDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GetDocumentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Document", + "shortName": "get_document" + }, + "description": "Sample for GetDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_get_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_GetDocument_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_get_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.list_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.ListChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListChunksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListChunksAsyncPager", + "shortName": "list_chunks" + }, + "description": "Sample for ListChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_list_chunks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_ListChunks_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_list_chunks_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.list_chunks", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.ListChunks", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListChunks" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListChunksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListChunksPager", + "shortName": "list_chunks" + }, + "description": "Sample for ListChunks", + "file": "generativelanguage_v1alpha_generated_retriever_service_list_chunks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_ListChunks_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_list_chunks_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.list_corpora", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.ListCorpora", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListCorpora" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListCorporaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListCorporaAsyncPager", + "shortName": "list_corpora" + }, + "description": "Sample for ListCorpora", + "file": "generativelanguage_v1alpha_generated_retriever_service_list_corpora_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_ListCorpora_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_list_corpora_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.list_corpora", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.ListCorpora", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListCorpora" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListCorporaRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListCorporaPager", + "shortName": "list_corpora" + }, + "description": "Sample for ListCorpora", + "file": "generativelanguage_v1alpha_generated_retriever_service_list_corpora_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_ListCorpora_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_list_corpora_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.list_documents", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.ListDocuments", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListDocumentsAsyncPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "generativelanguage_v1alpha_generated_retriever_service_list_documents_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_ListDocuments_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_list_documents_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.list_documents", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.ListDocuments", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "ListDocuments" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.ListDocumentsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.services.retriever_service.pagers.ListDocumentsPager", + "shortName": "list_documents" + }, + "description": "Sample for ListDocuments", + "file": "generativelanguage_v1alpha_generated_retriever_service_list_documents_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_ListDocuments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_list_documents_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.query_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.QueryCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.QueryCorpusRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.QueryCorpusResponse", + "shortName": "query_corpus" + }, + "description": "Sample for QueryCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_query_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_QueryCorpus_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_query_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.query_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.QueryCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.QueryCorpusRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.QueryCorpusResponse", + "shortName": "query_corpus" + }, + "description": "Sample for QueryCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_query_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_QueryCorpus_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_query_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.query_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.QueryDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.QueryDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.QueryDocumentResponse", + "shortName": "query_document" + }, + "description": "Sample for QueryDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_query_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_QueryDocument_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_query_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.query_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.QueryDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "QueryDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.QueryDocumentRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.QueryDocumentResponse", + "shortName": "query_document" + }, + "description": "Sample for QueryDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_query_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_QueryDocument_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_query_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.update_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.UpdateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateChunkRequest" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1alpha.types.Chunk" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Chunk", + "shortName": "update_chunk" + }, + "description": "Sample for UpdateChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_update_chunk_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_UpdateChunk_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_update_chunk_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.update_chunk", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.UpdateChunk", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateChunk" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateChunkRequest" + }, + { + "name": "chunk", + "type": "google.ai.generativelanguage_v1alpha.types.Chunk" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Chunk", + "shortName": "update_chunk" + }, + "description": "Sample for UpdateChunk", + "file": "generativelanguage_v1alpha_generated_retriever_service_update_chunk_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_UpdateChunk_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_update_chunk_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.update_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.UpdateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1alpha.types.Corpus" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Corpus", + "shortName": "update_corpus" + }, + "description": "Sample for UpdateCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_update_corpus_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_UpdateCorpus_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_update_corpus_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.update_corpus", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.UpdateCorpus", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateCorpus" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateCorpusRequest" + }, + { + "name": "corpus", + "type": "google.ai.generativelanguage_v1alpha.types.Corpus" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Corpus", + "shortName": "update_corpus" + }, + "description": "Sample for UpdateCorpus", + "file": "generativelanguage_v1alpha_generated_retriever_service_update_corpus_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_UpdateCorpus_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_update_corpus_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient", + "shortName": "RetrieverServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceAsyncClient.update_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.UpdateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1alpha.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_update_document_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_UpdateDocument_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_update_document_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient", + "shortName": "RetrieverServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.RetrieverServiceClient.update_document", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService.UpdateDocument", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.RetrieverService", + "shortName": "RetrieverService" + }, + "shortName": "UpdateDocument" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.UpdateDocumentRequest" + }, + { + "name": "document", + "type": "google.ai.generativelanguage_v1alpha.types.Document" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.Document", + "shortName": "update_document" + }, + "description": "Sample for UpdateDocument", + "file": "generativelanguage_v1alpha_generated_retriever_service_update_document_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_RetrieverService_UpdateDocument_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_retriever_service_update_document_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient.batch_embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.BatchEmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "BatchEmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchEmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "texts", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchEmbedTextResponse", + "shortName": "batch_embed_text" + }, + "description": "Sample for BatchEmbedText", + "file": "generativelanguage_v1alpha_generated_text_service_batch_embed_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_BatchEmbedText_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_batch_embed_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient.batch_embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.BatchEmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "BatchEmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.BatchEmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "texts", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.BatchEmbedTextResponse", + "shortName": "batch_embed_text" + }, + "description": "Sample for BatchEmbedText", + "file": "generativelanguage_v1alpha_generated_text_service_batch_embed_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_BatchEmbedText_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_batch_embed_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient.count_text_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.CountTextTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "CountTextTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CountTextTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.TextPrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CountTextTokensResponse", + "shortName": "count_text_tokens" + }, + "description": "Sample for CountTextTokens", + "file": "generativelanguage_v1alpha_generated_text_service_count_text_tokens_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_CountTextTokens_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_count_text_tokens_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient.count_text_tokens", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.CountTextTokens", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "CountTextTokens" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.CountTextTokensRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.TextPrompt" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.CountTextTokensResponse", + "shortName": "count_text_tokens" + }, + "description": "Sample for CountTextTokens", + "file": "generativelanguage_v1alpha_generated_text_service_count_text_tokens_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_CountTextTokens_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_count_text_tokens_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient.embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.EmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "EmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.EmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "text", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.EmbedTextResponse", + "shortName": "embed_text" + }, + "description": "Sample for EmbedText", + "file": "generativelanguage_v1alpha_generated_text_service_embed_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_EmbedText_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_embed_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient.embed_text", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.EmbedText", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "EmbedText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.EmbedTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "text", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.EmbedTextResponse", + "shortName": "embed_text" + }, + "description": "Sample for EmbedText", + "file": "generativelanguage_v1alpha_generated_text_service_embed_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_EmbedText_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_embed_text_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient", + "shortName": "TextServiceAsyncClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceAsyncClient.generate_text", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.GenerateText", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "GenerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.TextPrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "max_output_tokens", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateTextResponse", + "shortName": "generate_text" + }, + "description": "Sample for GenerateText", + "file": "generativelanguage_v1alpha_generated_text_service_generate_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_GenerateText_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_generate_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient", + "shortName": "TextServiceClient" + }, + "fullName": "google.ai.generativelanguage_v1alpha.TextServiceClient.generate_text", + "method": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService.GenerateText", + "service": { + "fullName": "google.ai.generativelanguage.v1alpha.TextService", + "shortName": "TextService" + }, + "shortName": "GenerateText" + }, + "parameters": [ + { + "name": "request", + "type": "google.ai.generativelanguage_v1alpha.types.GenerateTextRequest" + }, + { + "name": "model", + "type": "str" + }, + { + "name": "prompt", + "type": "google.ai.generativelanguage_v1alpha.types.TextPrompt" + }, + { + "name": "temperature", + "type": "float" + }, + { + "name": "candidate_count", + "type": "int" + }, + { + "name": "max_output_tokens", + "type": "int" + }, + { + "name": "top_p", + "type": "float" + }, + { + "name": "top_k", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ai.generativelanguage_v1alpha.types.GenerateTextResponse", + "shortName": "generate_text" + }, + "description": "Sample for GenerateText", + "file": "generativelanguage_v1alpha_generated_text_service_generate_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "generativelanguage_v1alpha_generated_TextService_GenerateText_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "generativelanguage_v1alpha_generated_text_service_generate_text_sync.py" + } + ] +} diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json index 3f701f0c95ae..d0fc8436b841 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.14" + "version": "0.6.16" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json index 6711a3cbfb9f..8a49a96f6dd4 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.14" + "version": "0.6.16" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json index 117d37b5d52f..89c896b9ef5d 100644 --- a/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json +++ b/packages/google-ai-generativelanguage/samples/generated_samples/snippet_metadata_google.ai.generativelanguage.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-ai-generativelanguage", - "version": "0.6.14" + "version": "0.6.16" }, "snippets": [ { diff --git a/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1alpha_keywords.py b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1alpha_keywords.py new file mode 100644 index 000000000000..1e4d9e291ab0 --- /dev/null +++ b/packages/google-ai-generativelanguage/scripts/fixup_generativelanguage_v1alpha_keywords.py @@ -0,0 +1,231 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class generativelanguageCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'batch_create_chunks': ('requests', 'parent', ), + 'batch_delete_chunks': ('requests', 'parent', ), + 'batch_embed_contents': ('model', 'requests', ), + 'batch_embed_text': ('model', 'texts', 'requests', ), + 'batch_update_chunks': ('requests', 'parent', ), + 'bidi_generate_content': ('setup', 'client_content', 'realtime_input', 'tool_response', ), + 'count_message_tokens': ('model', 'prompt', ), + 'count_text_tokens': ('model', 'prompt', ), + 'count_tokens': ('model', 'contents', 'generate_content_request', ), + 'create_cached_content': ('cached_content', ), + 'create_chunk': ('parent', 'chunk', ), + 'create_corpus': ('corpus', ), + 'create_document': ('parent', 'document', ), + 'create_file': ('file', ), + 'create_permission': ('parent', 'permission', ), + 'create_tuned_model': ('tuned_model', 'tuned_model_id', ), + 'delete_cached_content': ('name', ), + 'delete_chunk': ('name', ), + 'delete_corpus': ('name', 'force', ), + 'delete_document': ('name', 'force', ), + 'delete_file': ('name', ), + 'delete_permission': ('name', ), + 'delete_tuned_model': ('name', ), + 'embed_content': ('model', 'content', 'task_type', 'title', 'output_dimensionality', ), + 'embed_text': ('model', 'text', ), + 'generate_answer': ('model', 'contents', 'answer_style', 'inline_passages', 'semantic_retriever', 'safety_settings', 'temperature', ), + 'generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), + 'generate_message': ('model', 'prompt', 'temperature', 'candidate_count', 'top_p', 'top_k', ), + 'generate_text': ('model', 'prompt', 'temperature', 'candidate_count', 'max_output_tokens', 'top_p', 'top_k', 'safety_settings', 'stop_sequences', ), + 'get_cached_content': ('name', ), + 'get_chunk': ('name', ), + 'get_corpus': ('name', ), + 'get_document': ('name', ), + 'get_file': ('name', ), + 'get_model': ('name', ), + 'get_permission': ('name', ), + 'get_tuned_model': ('name', ), + 'list_cached_contents': ('page_size', 'page_token', ), + 'list_chunks': ('parent', 'page_size', 'page_token', ), + 'list_corpora': ('page_size', 'page_token', ), + 'list_documents': ('parent', 'page_size', 'page_token', ), + 'list_files': ('page_size', 'page_token', ), + 'list_models': ('page_size', 'page_token', ), + 'list_permissions': ('parent', 'page_size', 'page_token', ), + 'list_tuned_models': ('page_size', 'page_token', 'filter', ), + 'predict': ('model', 'instances', 'parameters', ), + 'query_corpus': ('name', 'query', 'metadata_filters', 'results_count', ), + 'query_document': ('name', 'query', 'results_count', 'metadata_filters', ), + 'stream_generate_content': ('model', 'contents', 'system_instruction', 'tools', 'tool_config', 'safety_settings', 'generation_config', 'cached_content', ), + 'transfer_ownership': ('name', 'email_address', ), + 'update_cached_content': ('cached_content', 'update_mask', ), + 'update_chunk': ('chunk', 'update_mask', ), + 'update_corpus': ('corpus', 'update_mask', ), + 'update_document': ('document', 'update_mask', ), + 'update_permission': ('permission', 'update_mask', ), + 'update_tuned_model': ('tuned_model', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=generativelanguageCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the generativelanguage client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py index 3f811fb7f4af..a684d970a183 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_generative_service.py @@ -62,6 +62,13 @@ from google.ai.generativelanguage_v1.types import content as gag_content from google.ai.generativelanguage_v1.types import generative_service, safety +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GenerativeServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GenerativeServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4193,10 +4243,14 @@ def test_generate_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_generate_content" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_generate_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_generate_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.GenerateContentRequest.pb( generative_service.GenerateContentRequest() ) @@ -4222,6 +4276,10 @@ def test_generate_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.GenerateContentResponse() + post_with_metadata.return_value = ( + generative_service.GenerateContentResponse(), + metadata, + ) client.generate_content( request, @@ -4233,6 +4291,7 @@ def test_generate_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stream_generate_content_rest_bad_request( @@ -4321,10 +4380,14 @@ def test_stream_generate_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_stream_generate_content" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_stream_generate_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_stream_generate_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.GenerateContentRequest.pb( generative_service.GenerateContentRequest() ) @@ -4350,6 +4413,10 @@ def test_stream_generate_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.GenerateContentResponse() + post_with_metadata.return_value = ( + generative_service.GenerateContentResponse(), + metadata, + ) client.stream_generate_content( request, @@ -4361,6 +4428,7 @@ def test_stream_generate_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_embed_content_rest_bad_request( @@ -4442,10 +4510,13 @@ def test_embed_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_embed_content" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_embed_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_embed_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.EmbedContentRequest.pb( generative_service.EmbedContentRequest() ) @@ -4471,6 +4542,10 @@ def test_embed_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.EmbedContentResponse() + post_with_metadata.return_value = ( + generative_service.EmbedContentResponse(), + metadata, + ) client.embed_content( request, @@ -4482,6 +4557,7 @@ def test_embed_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_embed_contents_rest_bad_request( @@ -4563,10 +4639,14 @@ def test_batch_embed_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_batch_embed_contents" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_batch_embed_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_batch_embed_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.BatchEmbedContentsRequest.pb( generative_service.BatchEmbedContentsRequest() ) @@ -4592,6 +4672,10 @@ def test_batch_embed_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.BatchEmbedContentsResponse() + post_with_metadata.return_value = ( + generative_service.BatchEmbedContentsResponse(), + metadata, + ) client.batch_embed_contents( request, @@ -4603,6 +4687,7 @@ def test_batch_embed_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_tokens_rest_bad_request( @@ -4687,10 +4772,13 @@ def test_count_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_count_tokens" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_count_tokens_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_count_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.CountTokensRequest.pb( generative_service.CountTokensRequest() ) @@ -4716,6 +4804,10 @@ def test_count_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.CountTokensResponse() + post_with_metadata.return_value = ( + generative_service.CountTokensResponse(), + metadata, + ) client.count_tokens( request, @@ -4727,6 +4819,7 @@ def test_count_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py index d2403acffb14..1f65cb794424 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1/test_model_service.py @@ -61,6 +61,13 @@ ) from google.ai.generativelanguage_v1.types import model, model_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2531,10 +2581,13 @@ def test_get_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_get_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_get_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) transcode.return_value = { "method": "post", @@ -2556,6 +2609,7 @@ def test_get_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata client.get_model( request, @@ -2567,6 +2621,7 @@ def test_get_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_models_rest_bad_request(request_type=model_service.ListModelsRequest): @@ -2649,10 +2704,13 @@ def test_list_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_list_models" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_list_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.ListModelsRequest.pb( model_service.ListModelsRequest() ) @@ -2678,6 +2736,7 @@ def test_list_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_service.ListModelsResponse() + post_with_metadata.return_value = model_service.ListModelsResponse(), metadata client.list_models( request, @@ -2689,6 +2748,7 @@ def test_list_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/__init__.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_cache_service.py new file mode 100644 index 000000000000..a8015a1afd55 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_cache_service.py @@ -0,0 +1,6187 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.cache_service import ( + CacheServiceAsyncClient, + CacheServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1alpha.types import ( + cached_content as gag_cached_content, +) +from google.ai.generativelanguage_v1alpha.types import cache_service +from google.ai.generativelanguage_v1alpha.types import cached_content +from google.ai.generativelanguage_v1alpha.types import content + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CacheServiceClient._get_default_mtls_endpoint(None) is None + assert ( + CacheServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + CacheServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + CacheServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + CacheServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert CacheServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert CacheServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CacheServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CacheServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + CacheServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CacheServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CacheServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CacheServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CacheServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CacheServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CacheServiceClient._get_client_cert_source(None, False) is None + assert ( + CacheServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + CacheServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + CacheServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + CacheServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CacheServiceClient._DEFAULT_UNIVERSE + default_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + CacheServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + CacheServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == CacheServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, default_universe, "always") + == CacheServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CacheServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == CacheServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + CacheServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + CacheServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + CacheServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + CacheServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + CacheServiceClient._get_universe_domain(None, None) + == CacheServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + CacheServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CacheServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CacheServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CacheServiceClient, "grpc"), + (CacheServiceAsyncClient, "grpc_asyncio"), + (CacheServiceClient, "rest"), + ], +) +def test_cache_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.CacheServiceGrpcTransport, "grpc"), + (transports.CacheServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CacheServiceRestTransport, "rest"), + ], +) +def test_cache_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (CacheServiceClient, "grpc"), + (CacheServiceAsyncClient, "grpc_asyncio"), + (CacheServiceClient, "rest"), + ], +) +def test_cache_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_cache_service_client_get_transport_class(): + transport = CacheServiceClient.get_transport_class() + available_transports = [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceRestTransport, + ] + assert transport in available_transports + + transport = CacheServiceClient.get_transport_class("grpc") + assert transport == transports.CacheServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +def test_cache_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CacheServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CacheServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc", "true"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc", "false"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest", "true"), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cache_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [CacheServiceClient, CacheServiceAsyncClient]) +@mock.patch.object( + CacheServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CacheServiceClient) +) +@mock.patch.object( + CacheServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CacheServiceAsyncClient), +) +def test_cache_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [CacheServiceClient, CacheServiceAsyncClient]) +@mock.patch.object( + CacheServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceClient), +) +@mock.patch.object( + CacheServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(CacheServiceAsyncClient), +) +def test_cache_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CacheServiceClient._DEFAULT_UNIVERSE + default_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = CacheServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport, "grpc"), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest"), + ], +) +def test_cache_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CacheServiceClient, + transports.CacheServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (CacheServiceClient, transports.CacheServiceRestTransport, "rest", None), + ], +) +def test_cache_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cache_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.cache_service.transports.CacheServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = CacheServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + CacheServiceClient, + transports.CacheServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + CacheServiceAsyncClient, + transports.CacheServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_cache_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.ListCachedContentsRequest, + dict, + ], +) +def test_list_cached_contents(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.ListCachedContentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCachedContentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_cached_contents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.ListCachedContentsRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_cached_contents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.ListCachedContentsRequest( + page_token="page_token_value", + ) + + +def test_list_cached_contents_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cached_contents in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cached_contents + ] = mock_rpc + request = {} + client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cached_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_cached_contents + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_cached_contents + ] = mock_rpc + + request = {} + await client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_cached_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_cached_contents_async( + transport: str = "grpc_asyncio", + request_type=cache_service.ListCachedContentsRequest, +): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.ListCachedContentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCachedContentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_from_dict(): + await test_list_cached_contents_async(request_type=dict) + + +def test_list_cached_contents_pager(transport_name: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_cached_contents(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cached_content.CachedContent) for i in results) + + +def test_list_cached_contents_pages(transport_name: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + pages = list(client.list_cached_contents(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_pager(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_cached_contents( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cached_content.CachedContent) for i in responses) + + +@pytest.mark.asyncio +async def test_list_cached_contents_async_pages(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_cached_contents(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.CreateCachedContentRequest, + dict, + ], +) +def test_create_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + response = client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.CreateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_create_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.CreateCachedContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.CreateCachedContentRequest() + + +def test_create_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cached_content + ] = mock_rpc + request = {} + client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_cached_content + ] = mock_rpc + + request = {} + await client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_cached_content_async( + transport: str = "grpc_asyncio", + request_type=cache_service.CreateCachedContentRequest, +): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.CreateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.asyncio +async def test_create_cached_content_async_from_dict(): + await test_create_cached_content_async(request_type=dict) + + +def test_create_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + + +def test_create_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cached_content( + cache_service.CreateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +@pytest.mark.asyncio +async def test_create_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_cached_content( + cache_service.CreateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.GetCachedContentRequest, + dict, + ], +) +def test_get_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + response = client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.GetCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_get_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.GetCachedContentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.GetCachedContentRequest( + name="name_value", + ) + + +def test_get_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cached_content in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cached_content + ] = mock_rpc + request = {} + client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cached_content + ] = mock_rpc + + request = {} + await client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_cached_content_async( + transport: str = "grpc_asyncio", request_type=cache_service.GetCachedContentRequest +): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.GetCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.asyncio +async def test_get_cached_content_async_from_dict(): + await test_get_cached_content_async(request_type=dict) + + +def test_get_cached_content_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.GetCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value = cached_content.CachedContent() + client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_cached_content_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.GetCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent() + ) + await client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cached_content.CachedContent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cached_content( + cache_service.GetCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cached_content.CachedContent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_cached_content( + cache_service.GetCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.UpdateCachedContentRequest, + dict, + ], +) +def test_update_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + response = client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.UpdateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +def test_update_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.UpdateCachedContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.UpdateCachedContentRequest() + + +def test_update_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cached_content + ] = mock_rpc + request = {} + client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cached_content + ] = mock_rpc + + request = {} + await client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_cached_content_async( + transport: str = "grpc_asyncio", + request_type=cache_service.UpdateCachedContentRequest, +): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + response = await client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.UpdateCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.asyncio +async def test_update_cached_content_async_from_dict(): + await test_update_cached_content_async(request_type=dict) + + +def test_update_cached_content_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.UpdateCachedContentRequest() + + request.cached_content.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value = gag_cached_content.CachedContent() + client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cached_content.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_cached_content_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.UpdateCachedContentRequest() + + request.cached_content.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent() + ) + await client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "cached_content.name=name_value", + ) in kw["metadata"] + + +def test_update_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cached_content( + cache_service.UpdateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_cached_content.CachedContent() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_cached_content( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].cached_content + mock_val = gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_cached_content( + cache_service.UpdateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.DeleteCachedContentRequest, + dict, + ], +) +def test_delete_cached_content(request_type, transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cache_service.DeleteCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_cached_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cache_service.DeleteCachedContentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_cached_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cache_service.DeleteCachedContentRequest( + name="name_value", + ) + + +def test_delete_cached_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cached_content + ] = mock_rpc + request = {} + client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cached_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_cached_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_cached_content + ] = mock_rpc + + request = {} + await client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_cached_content_async( + transport: str = "grpc_asyncio", + request_type=cache_service.DeleteCachedContentRequest, +): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cache_service.DeleteCachedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_cached_content_async_from_dict(): + await test_delete_cached_content_async(request_type=dict) + + +def test_delete_cached_content_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.DeleteCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value = None + client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_cached_content_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cache_service.DeleteCachedContentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_cached_content_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_cached_content_flattened_error(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cached_content( + cache_service.DeleteCachedContentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_cached_content_flattened_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_cached_content( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_cached_content_flattened_error_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_cached_content( + cache_service.DeleteCachedContentRequest(), + name="name_value", + ) + + +def test_list_cached_contents_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_cached_contents in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_cached_contents + ] = mock_rpc + + request = {} + client.list_cached_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_cached_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_cached_contents_rest_pager(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + next_page_token="abc", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[], + next_page_token="def", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + ], + next_page_token="ghi", + ), + cache_service.ListCachedContentsResponse( + cached_contents=[ + cached_content.CachedContent(), + cached_content.CachedContent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cache_service.ListCachedContentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_cached_contents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cached_content.CachedContent) for i in results) + + pages = list(client.list_cached_contents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_cached_content + ] = mock_rpc + + request = {} + client.create_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_cached_content_rest_required_fields( + request_type=cache_service.CreateCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("cachedContent",))) + + +def test_create_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/cachedContents" % client.transport._host, args[1] + ) + + +def test_create_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_cached_content( + cache_service.CreateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +def test_get_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_cached_content in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cached_content + ] = mock_rpc + + request = {} + client.get_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_cached_content_rest_required_fields( + request_type=cache_service.GetCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cached_content.CachedContent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cached_content.CachedContent() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "cachedContents/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=cachedContents/*}" % client.transport._host, args[1] + ) + + +def test_get_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_cached_content( + cache_service.GetCachedContentRequest(), + name="name_value", + ) + + +def test_update_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cached_content + ] = mock_rpc + + request = {} + client.update_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_cached_content_rest_required_fields( + request_type=cache_service.UpdateCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_cached_content._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("cachedContent",))) + + +def test_update_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent() + + # get arguments that satisfy an http rule for this method + sample_request = {"cached_content": {"name": "cachedContents/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{cached_content.name=cachedContents/*}" + % client.transport._host, + args[1], + ) + + +def test_update_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_cached_content( + cache_service.UpdateCachedContentRequest(), + cached_content=gag_cached_content.CachedContent( + expire_time=timestamp_pb2.Timestamp(seconds=751) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_cached_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_cached_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_cached_content + ] = mock_rpc + + request = {} + client.delete_cached_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_cached_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_cached_content_rest_required_fields( + request_type=cache_service.DeleteCachedContentRequest, +): + transport_class = transports.CacheServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_cached_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_cached_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_cached_content_rest_unset_required_fields(): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_cached_content._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_cached_content_rest_flattened(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "cachedContents/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_cached_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=cachedContents/*}" % client.transport._host, args[1] + ) + + +def test_delete_cached_content_rest_flattened_error(transport: str = "rest"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_cached_content( + cache_service.DeleteCachedContentRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CacheServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CacheServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CacheServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CacheServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceGrpcAsyncIOTransport, + transports.CacheServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = CacheServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_cached_contents_empty_call_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + call.return_value = cache_service.ListCachedContentsResponse() + client.list_cached_contents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.ListCachedContentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_cached_content_empty_call_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + call.return_value = gag_cached_content.CachedContent() + client.create_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.CreateCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_cached_content_empty_call_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + call.return_value = cached_content.CachedContent() + client.get_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.GetCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_cached_content_empty_call_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + call.return_value = gag_cached_content.CachedContent() + client.update_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.UpdateCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_cached_content_empty_call_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + call.return_value = None + client.delete_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.DeleteCachedContentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CacheServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_cached_contents_empty_call_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_cached_contents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.ListCachedContentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_cached_content_empty_call_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + await client.create_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.CreateCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_cached_content_empty_call_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + await client.get_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.GetCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_cached_content_empty_call_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + ) + await client.update_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.UpdateCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_cached_content_empty_call_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.DeleteCachedContentRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CacheServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_cached_contents_rest_bad_request( + request_type=cache_service.ListCachedContentsRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_cached_contents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.ListCachedContentsRequest, + dict, + ], +) +def test_list_cached_contents_rest_call_success(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cache_service.ListCachedContentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cache_service.ListCachedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_cached_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCachedContentsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_cached_contents_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_list_cached_contents" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, + "post_list_cached_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_list_cached_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cache_service.ListCachedContentsRequest.pb( + cache_service.ListCachedContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cache_service.ListCachedContentsResponse.to_json( + cache_service.ListCachedContentsResponse() + ) + req.return_value.content = return_value + + request = cache_service.ListCachedContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cache_service.ListCachedContentsResponse() + post_with_metadata.return_value = ( + cache_service.ListCachedContentsResponse(), + metadata, + ) + + client.list_cached_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_cached_content_rest_bad_request( + request_type=cache_service.CreateCachedContentRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_cached_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.CreateCachedContentRequest, + dict, + ], +) +def test_create_cached_content_rest_call_success(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["cached_content"] = { + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "name_value", + "display_name": "display_name_value", + "model": "model_value", + "system_instruction": { + "parts": [ + { + "text": "text_value", + "inline_data": { + "mime_type": "mime_type_value", + "data": b"data_blob", + }, + "function_call": { + "id": "id_value", + "name": "name_value", + "args": {"fields": {}}, + }, + "function_response": { + "id": "id_value", + "name": "name_value", + "response": {}, + }, + "file_data": { + "mime_type": "mime_type_value", + "file_uri": "file_uri_value", + }, + "executable_code": {"language": 1, "code": "code_value"}, + "code_execution_result": {"outcome": 1, "output": "output_value"}, + } + ], + "role": "role_value", + }, + "contents": {}, + "tools": [ + { + "function_declarations": [ + { + "name": "name_value", + "description": "description_value", + "parameters": { + "type_": 1, + "format_": "format__value", + "description": "description_value", + "nullable": True, + "enum": ["enum_value1", "enum_value2"], + "items": {}, + "max_items": 967, + "min_items": 965, + "properties": {}, + "required": ["required_value1", "required_value2"], + }, + "response": {}, + } + ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, + "code_execution": {}, + "google_search": {}, + } + ], + "tool_config": { + "function_calling_config": { + "mode": 1, + "allowed_function_names": [ + "allowed_function_names_value1", + "allowed_function_names_value2", + ], + } + }, + "create_time": {}, + "update_time": {}, + "usage_metadata": {"total_token_count": 1836}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cache_service.CreateCachedContentRequest.meta.fields["cached_content"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cached_content"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cached_content"][field])): + del request_init["cached_content"][field][i][subfield] + else: + del request_init["cached_content"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_cached_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_create_cached_content" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, + "post_create_cached_content_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_create_cached_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cache_service.CreateCachedContentRequest.pb( + cache_service.CreateCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gag_cached_content.CachedContent.to_json( + gag_cached_content.CachedContent() + ) + req.return_value.content = return_value + + request = cache_service.CreateCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_cached_content.CachedContent() + post_with_metadata.return_value = gag_cached_content.CachedContent(), metadata + + client.create_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_cached_content_rest_bad_request( + request_type=cache_service.GetCachedContentRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_cached_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.GetCachedContentRequest, + dict, + ], +) +def test_get_cached_content_rest_call_success(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_cached_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_get_cached_content" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_get_cached_content_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_get_cached_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cache_service.GetCachedContentRequest.pb( + cache_service.GetCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cached_content.CachedContent.to_json( + cached_content.CachedContent() + ) + req.return_value.content = return_value + + request = cache_service.GetCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cached_content.CachedContent() + post_with_metadata.return_value = cached_content.CachedContent(), metadata + + client.get_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_cached_content_rest_bad_request( + request_type=cache_service.UpdateCachedContentRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"cached_content": {"name": "cachedContents/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_cached_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.UpdateCachedContentRequest, + dict, + ], +) +def test_update_cached_content_rest_call_success(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"cached_content": {"name": "cachedContents/sample1"}} + request_init["cached_content"] = { + "expire_time": {"seconds": 751, "nanos": 543}, + "ttl": {"seconds": 751, "nanos": 543}, + "name": "cachedContents/sample1", + "display_name": "display_name_value", + "model": "model_value", + "system_instruction": { + "parts": [ + { + "text": "text_value", + "inline_data": { + "mime_type": "mime_type_value", + "data": b"data_blob", + }, + "function_call": { + "id": "id_value", + "name": "name_value", + "args": {"fields": {}}, + }, + "function_response": { + "id": "id_value", + "name": "name_value", + "response": {}, + }, + "file_data": { + "mime_type": "mime_type_value", + "file_uri": "file_uri_value", + }, + "executable_code": {"language": 1, "code": "code_value"}, + "code_execution_result": {"outcome": 1, "output": "output_value"}, + } + ], + "role": "role_value", + }, + "contents": {}, + "tools": [ + { + "function_declarations": [ + { + "name": "name_value", + "description": "description_value", + "parameters": { + "type_": 1, + "format_": "format__value", + "description": "description_value", + "nullable": True, + "enum": ["enum_value1", "enum_value2"], + "items": {}, + "max_items": 967, + "min_items": 965, + "properties": {}, + "required": ["required_value1", "required_value2"], + }, + "response": {}, + } + ], + "google_search_retrieval": { + "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} + }, + "code_execution": {}, + "google_search": {}, + } + ], + "tool_config": { + "function_calling_config": { + "mode": 1, + "allowed_function_names": [ + "allowed_function_names_value1", + "allowed_function_names_value2", + ], + } + }, + "create_time": {}, + "update_time": {}, + "usage_metadata": {"total_token_count": 1836}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cache_service.UpdateCachedContentRequest.meta.fields["cached_content"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["cached_content"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["cached_content"][field])): + del request_init["cached_content"][field][i][subfield] + else: + del request_init["cached_content"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_cached_content.CachedContent( + name="name_value", + display_name="display_name_value", + model="model_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_cached_content.CachedContent.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_cached_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_cached_content.CachedContent) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.model == "model_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_update_cached_content" + ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, + "post_update_cached_content_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_update_cached_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cache_service.UpdateCachedContentRequest.pb( + cache_service.UpdateCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gag_cached_content.CachedContent.to_json( + gag_cached_content.CachedContent() + ) + req.return_value.content = return_value + + request = cache_service.UpdateCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_cached_content.CachedContent() + post_with_metadata.return_value = gag_cached_content.CachedContent(), metadata + + client.update_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_cached_content_rest_bad_request( + request_type=cache_service.DeleteCachedContentRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_cached_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + cache_service.DeleteCachedContentRequest, + dict, + ], +) +def test_delete_cached_content_rest_call_success(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "cachedContents/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_cached_content(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_cached_content_rest_interceptors(null_interceptor): + transport = transports.CacheServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CacheServiceRestInterceptor(), + ) + client = CacheServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CacheServiceRestInterceptor, "pre_delete_cached_content" + ) as pre: + pre.assert_not_called() + pb_message = cache_service.DeleteCachedContentRequest.pb( + cache_service.DeleteCachedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = cache_service.DeleteCachedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_cached_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_cached_contents_empty_call_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_cached_contents), "__call__" + ) as call: + client.list_cached_contents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.ListCachedContentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_cached_content_empty_call_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_cached_content), "__call__" + ) as call: + client.create_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.CreateCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_cached_content_empty_call_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_cached_content), "__call__" + ) as call: + client.get_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.GetCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_cached_content_empty_call_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_cached_content), "__call__" + ) as call: + client.update_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.UpdateCachedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_cached_content_empty_call_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_cached_content), "__call__" + ) as call: + client.delete_cached_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cache_service.DeleteCachedContentRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CacheServiceGrpcTransport, + ) + + +def test_cache_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CacheServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cache_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.cache_service.transports.CacheServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CacheServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_cached_contents", + "create_cached_content", + "get_cached_content", + "update_cached_content", + "delete_cached_content", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cache_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.cache_service.transports.CacheServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CacheServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_cache_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.cache_service.transports.CacheServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CacheServiceTransport() + adc.assert_called_once() + + +def test_cache_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CacheServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceGrpcAsyncIOTransport, + ], +) +def test_cache_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CacheServiceGrpcTransport, + transports.CacheServiceGrpcAsyncIOTransport, + transports.CacheServiceRestTransport, + ], +) +def test_cache_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CacheServiceGrpcTransport, grpc_helpers), + (transports.CacheServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_cache_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.CacheServiceGrpcTransport, transports.CacheServiceGrpcAsyncIOTransport], +) +def test_cache_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_cache_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CacheServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cache_service_host_no_port(transport_name): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_cache_service_host_with_port(transport_name): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cache_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CacheServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CacheServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_cached_contents._session + session2 = client2.transport.list_cached_contents._session + assert session1 != session2 + session1 = client1.transport.create_cached_content._session + session2 = client2.transport.create_cached_content._session + assert session1 != session2 + session1 = client1.transport.get_cached_content._session + session2 = client2.transport.get_cached_content._session + assert session1 != session2 + session1 = client1.transport.update_cached_content._session + session2 = client2.transport.update_cached_content._session + assert session1 != session2 + session1 = client1.transport.delete_cached_content._session + session2 = client2.transport.delete_cached_content._session + assert session1 != session2 + + +def test_cache_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CacheServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cache_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CacheServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CacheServiceGrpcTransport, transports.CacheServiceGrpcAsyncIOTransport], +) +def test_cache_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.CacheServiceGrpcTransport, transports.CacheServiceGrpcAsyncIOTransport], +) +def test_cache_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cached_content_path(): + id = "squid" + expected = "cachedContents/{id}".format( + id=id, + ) + actual = CacheServiceClient.cached_content_path(id) + assert expected == actual + + +def test_parse_cached_content_path(): + expected = { + "id": "clam", + } + path = CacheServiceClient.cached_content_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_cached_content_path(path) + assert expected == actual + + +def test_model_path(): + model = "whelk" + expected = "models/{model}".format( + model=model, + ) + actual = CacheServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "octopus", + } + path = CacheServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CacheServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = CacheServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = CacheServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = CacheServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = CacheServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = CacheServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = CacheServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = CacheServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = CacheServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = CacheServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CacheServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CacheServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CacheServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CacheServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CacheServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = CacheServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (CacheServiceClient, transports.CacheServiceGrpcTransport), + (CacheServiceAsyncClient, transports.CacheServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_discuss_service.py new file mode 100644 index 000000000000..e85fde131759 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_discuss_service.py @@ -0,0 +1,3823 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account + +from google.ai.generativelanguage_v1alpha.services.discuss_service import ( + DiscussServiceAsyncClient, + DiscussServiceClient, + transports, +) +from google.ai.generativelanguage_v1alpha.types import citation, discuss_service, safety + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DiscussServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DiscussServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DiscussServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert DiscussServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DiscussServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DiscussServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DiscussServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DiscussServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DiscussServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DiscussServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DiscussServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DiscussServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DiscussServiceClient._get_client_cert_source(None, False) is None + assert ( + DiscussServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DiscussServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DiscussServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DiscussServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DiscussServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DiscussServiceClient._DEFAULT_UNIVERSE + default_endpoint = DiscussServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DiscussServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DiscussServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DiscussServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DiscussServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DiscussServiceClient._get_api_endpoint(None, None, default_universe, "always") + == DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DiscussServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DiscussServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DiscussServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DiscussServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DiscussServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DiscussServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DiscussServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DiscussServiceClient._get_universe_domain(None, None) + == DiscussServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DiscussServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DiscussServiceClient, "grpc"), + (DiscussServiceAsyncClient, "grpc_asyncio"), + (DiscussServiceClient, "rest"), + ], +) +def test_discuss_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DiscussServiceGrpcTransport, "grpc"), + (transports.DiscussServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DiscussServiceRestTransport, "rest"), + ], +) +def test_discuss_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DiscussServiceClient, "grpc"), + (DiscussServiceAsyncClient, "grpc_asyncio"), + (DiscussServiceClient, "rest"), + ], +) +def test_discuss_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_discuss_service_client_get_transport_class(): + transport = DiscussServiceClient.get_transport_class() + available_transports = [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceRestTransport, + ] + assert transport in available_transports + + transport = DiscussServiceClient.get_transport_class("grpc") + assert transport == transports.DiscussServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DiscussServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceAsyncClient), +) +def test_discuss_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DiscussServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DiscussServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc", "true"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc", "false"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", "true"), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + DiscussServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_discuss_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DiscussServiceClient, DiscussServiceAsyncClient] +) +@mock.patch.object( + DiscussServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DiscussServiceAsyncClient), +) +def test_discuss_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [DiscussServiceClient, DiscussServiceAsyncClient] +) +@mock.patch.object( + DiscussServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceClient), +) +@mock.patch.object( + DiscussServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DiscussServiceAsyncClient), +) +def test_discuss_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DiscussServiceClient._DEFAULT_UNIVERSE + default_endpoint = DiscussServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DiscussServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport, "grpc"), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest"), + ], +) +def test_discuss_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DiscussServiceClient, + transports.DiscussServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (DiscussServiceClient, transports.DiscussServiceRestTransport, "rest", None), + ], +) +def test_discuss_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_discuss_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.discuss_service.transports.DiscussServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DiscussServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DiscussServiceClient, + transports.DiscussServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DiscussServiceAsyncClient, + transports.DiscussServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_discuss_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.GenerateMessageRequest, + dict, + ], +) +def test_generate_message(request_type, transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + response = client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = discuss_service.GenerateMessageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +def test_generate_message_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = discuss_service.GenerateMessageRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_message(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.GenerateMessageRequest( + model="model_value", + ) + + +def test_generate_message_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_message + ] = mock_rpc + request = {} + client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_message_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_message + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_message + ] = mock_rpc + + request = {} + await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_message_async( + transport: str = "grpc_asyncio", request_type=discuss_service.GenerateMessageRequest +): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + response = await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = discuss_service.GenerateMessageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +@pytest.mark.asyncio +async def test_generate_message_async_from_dict(): + await test_generate_message_async(request_type=dict) + + +def test_generate_message_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.GenerateMessageRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = discuss_service.GenerateMessageResponse() + client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_message_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.GenerateMessageRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + await client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_message_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_message( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +def test_generate_message_flattened_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.asyncio +async def test_generate_message_flattened_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.GenerateMessageResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_message( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_message_flattened_error_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.CountMessageTokensRequest, + dict, + ], +) +def test_count_message_tokens(request_type, transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + response = client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = discuss_service.CountMessageTokensRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +def test_count_message_tokens_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = discuss_service.CountMessageTokensRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.count_message_tokens(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == discuss_service.CountMessageTokensRequest( + model="model_value", + ) + + +def test_count_message_tokens_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.count_message_tokens in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.count_message_tokens + ] = mock_rpc + request = {} + client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.count_message_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_count_message_tokens_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.count_message_tokens + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.count_message_tokens + ] = mock_rpc + + request = {} + await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.count_message_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_count_message_tokens_async( + transport: str = "grpc_asyncio", + request_type=discuss_service.CountMessageTokensRequest, +): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + ) + response = await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = discuss_service.CountMessageTokensRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.asyncio +async def test_count_message_tokens_async_from_dict(): + await test_count_message_tokens_async(request_type=dict) + + +def test_count_message_tokens_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.CountMessageTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = discuss_service.CountMessageTokensResponse() + client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_message_tokens_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = discuss_service.CountMessageTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse() + ) + await client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_message_tokens_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_message_tokens( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + + +def test_count_message_tokens_flattened_error(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +@pytest.mark.asyncio +async def test_count_message_tokens_flattened_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = discuss_service.CountMessageTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_message_tokens( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = discuss_service.MessagePrompt(context="context_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_message_tokens_flattened_error_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +def test_generate_message_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_message + ] = mock_rpc + + request = {} + client.generate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_message_rest_required_fields( + request_type=discuss_service.GenerateMessageRequest, +): + transport_class = transports.DiscussServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.generate_message(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_message_rest_unset_required_fields(): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_message._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +def test_generate_message_rest_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.generate_message(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:generateMessage" % client.transport._host, + args[1], + ) + + +def test_generate_message_rest_flattened_error(transport: str = "rest"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_message( + discuss_service.GenerateMessageRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + temperature=0.1198, + candidate_count=1573, + top_p=0.546, + top_k=541, + ) + + +def test_count_message_tokens_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.count_message_tokens in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.count_message_tokens + ] = mock_rpc + + request = {} + client.count_message_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.count_message_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_count_message_tokens_rest_required_fields( + request_type=discuss_service.CountMessageTokensRequest, +): + transport_class = transports.DiscussServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_message_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_message_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.count_message_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_message_tokens_rest_unset_required_fields(): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_message_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +def test_count_message_tokens_rest_flattened(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.count_message_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:countMessageTokens" % client.transport._host, + args[1], + ) + + +def test_count_message_tokens_rest_flattened_error(transport: str = "rest"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_message_tokens( + discuss_service.CountMessageTokensRequest(), + model="model_value", + prompt=discuss_service.MessagePrompt(context="context_value"), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DiscussServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DiscussServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DiscussServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DiscussServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + transports.DiscussServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DiscussServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_message_empty_call_grpc(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + call.return_value = discuss_service.GenerateMessageResponse() + client.generate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = discuss_service.GenerateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_count_message_tokens_empty_call_grpc(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + call.return_value = discuss_service.CountMessageTokensResponse() + client.count_message_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = discuss_service.CountMessageTokensRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DiscussServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_message_empty_call_grpc_asyncio(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.GenerateMessageResponse() + ) + await client.generate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = discuss_service.GenerateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_count_message_tokens_empty_call_grpc_asyncio(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + ) + await client.count_message_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = discuss_service.CountMessageTokensRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DiscussServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_generate_message_rest_bad_request( + request_type=discuss_service.GenerateMessageRequest, +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_message(request) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.GenerateMessageRequest, + dict, + ], +) +def test_generate_message_rest_call_success(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.GenerateMessageResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discuss_service.GenerateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.GenerateMessageResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_message_rest_interceptors(null_interceptor): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DiscussServiceRestInterceptor(), + ) + client = DiscussServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message" + ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.DiscussServiceRestInterceptor, "pre_generate_message" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = discuss_service.GenerateMessageRequest.pb( + discuss_service.GenerateMessageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = discuss_service.GenerateMessageResponse.to_json( + discuss_service.GenerateMessageResponse() + ) + req.return_value.content = return_value + + request = discuss_service.GenerateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discuss_service.GenerateMessageResponse() + post_with_metadata.return_value = ( + discuss_service.GenerateMessageResponse(), + metadata, + ) + + client.generate_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_count_message_tokens_rest_bad_request( + request_type=discuss_service.CountMessageTokensRequest, +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.count_message_tokens(request) + + +@pytest.mark.parametrize( + "request_type", + [ + discuss_service.CountMessageTokensRequest, + dict, + ], +) +def test_count_message_tokens_rest_call_success(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = discuss_service.CountMessageTokensResponse( + token_count=1193, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discuss_service.CountMessageTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.count_message_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discuss_service.CountMessageTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_message_tokens_rest_interceptors(null_interceptor): + transport = transports.DiscussServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DiscussServiceRestInterceptor(), + ) + client = DiscussServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_count_message_tokens" + ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, + "post_count_message_tokens_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DiscussServiceRestInterceptor, "pre_count_message_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = discuss_service.CountMessageTokensRequest.pb( + discuss_service.CountMessageTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = discuss_service.CountMessageTokensResponse.to_json( + discuss_service.CountMessageTokensResponse() + ) + req.return_value.content = return_value + + request = discuss_service.CountMessageTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discuss_service.CountMessageTokensResponse() + post_with_metadata.return_value = ( + discuss_service.CountMessageTokensResponse(), + metadata, + ) + + client.count_message_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_message_empty_call_rest(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_message), "__call__") as call: + client.generate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = discuss_service.GenerateMessageRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_count_message_tokens_empty_call_rest(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.count_message_tokens), "__call__" + ) as call: + client.count_message_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = discuss_service.CountMessageTokensRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DiscussServiceGrpcTransport, + ) + + +def test_discuss_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DiscussServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_discuss_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.discuss_service.transports.DiscussServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DiscussServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_message", + "count_message_tokens", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_discuss_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.discuss_service.transports.DiscussServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiscussServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_discuss_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.discuss_service.transports.DiscussServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DiscussServiceTransport() + adc.assert_called_once() + + +def test_discuss_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DiscussServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + transports.DiscussServiceRestTransport, + ], +) +def test_discuss_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DiscussServiceGrpcTransport, grpc_helpers), + (transports.DiscussServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_discuss_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_discuss_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DiscussServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_discuss_service_host_no_port(transport_name): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_discuss_service_host_with_port(transport_name): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_discuss_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DiscussServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DiscussServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_message._session + session2 = client2.transport.generate_message._session + assert session1 != session2 + session1 = client1.transport.count_message_tokens._session + session2 = client2.transport.count_message_tokens._session + assert session1 != session2 + + +def test_discuss_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DiscussServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_discuss_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DiscussServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DiscussServiceGrpcTransport, + transports.DiscussServiceGrpcAsyncIOTransport, + ], +) +def test_discuss_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = DiscussServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = DiscussServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DiscussServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = DiscussServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DiscussServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DiscussServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DiscussServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DiscussServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = DiscussServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DiscussServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DiscussServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = DiscussServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DiscussServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DiscussServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DiscussServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DiscussServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DiscussServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DiscussServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DiscussServiceClient, transports.DiscussServiceGrpcTransport), + (DiscussServiceAsyncClient, transports.DiscussServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_file_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_file_service.py new file mode 100644 index 000000000000..c1be7c92979c --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_file_service.py @@ -0,0 +1,4709 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.file_service import ( + FileServiceAsyncClient, + FileServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1alpha.types import file, file_service + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FileServiceClient._get_default_mtls_endpoint(None) is None + assert ( + FileServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + FileServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + FileServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + FileServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert FileServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert FileServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert FileServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert FileServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + FileServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert FileServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert FileServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert FileServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + FileServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert FileServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert FileServiceClient._get_client_cert_source(None, False) is None + assert ( + FileServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + FileServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + FileServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + FileServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + FileServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceClient), +) +@mock.patch.object( + FileServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = FileServiceClient._DEFAULT_UNIVERSE + default_endpoint = FileServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FileServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + FileServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + FileServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == FileServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + FileServiceClient._get_api_endpoint(None, None, default_universe, "always") + == FileServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == FileServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + FileServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + FileServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + FileServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + FileServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + FileServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + FileServiceClient._get_universe_domain(None, None) + == FileServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + FileServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FileServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FileServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FileServiceClient, "grpc"), + (FileServiceAsyncClient, "grpc_asyncio"), + (FileServiceClient, "rest"), + ], +) +def test_file_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.FileServiceGrpcTransport, "grpc"), + (transports.FileServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FileServiceRestTransport, "rest"), + ], +) +def test_file_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (FileServiceClient, "grpc"), + (FileServiceAsyncClient, "grpc_asyncio"), + (FileServiceClient, "rest"), + ], +) +def test_file_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_file_service_client_get_transport_class(): + transport = FileServiceClient.get_transport_class() + available_transports = [ + transports.FileServiceGrpcTransport, + transports.FileServiceRestTransport, + ] + assert transport in available_transports + + transport = FileServiceClient.get_transport_class("grpc") + assert transport == transports.FileServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileServiceClient, transports.FileServiceGrpcTransport, "grpc"), + ( + FileServiceAsyncClient, + transports.FileServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FileServiceClient, transports.FileServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + FileServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceClient), +) +@mock.patch.object( + FileServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceAsyncClient), +) +def test_file_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FileServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FileServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (FileServiceClient, transports.FileServiceGrpcTransport, "grpc", "true"), + ( + FileServiceAsyncClient, + transports.FileServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (FileServiceClient, transports.FileServiceGrpcTransport, "grpc", "false"), + ( + FileServiceAsyncClient, + transports.FileServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (FileServiceClient, transports.FileServiceRestTransport, "rest", "true"), + (FileServiceClient, transports.FileServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + FileServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceClient), +) +@mock.patch.object( + FileServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_file_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [FileServiceClient, FileServiceAsyncClient]) +@mock.patch.object( + FileServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FileServiceClient) +) +@mock.patch.object( + FileServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(FileServiceAsyncClient), +) +def test_file_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [FileServiceClient, FileServiceAsyncClient]) +@mock.patch.object( + FileServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceClient), +) +@mock.patch.object( + FileServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(FileServiceAsyncClient), +) +def test_file_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = FileServiceClient._DEFAULT_UNIVERSE + default_endpoint = FileServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = FileServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (FileServiceClient, transports.FileServiceGrpcTransport, "grpc"), + ( + FileServiceAsyncClient, + transports.FileServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (FileServiceClient, transports.FileServiceRestTransport, "rest"), + ], +) +def test_file_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (FileServiceClient, transports.FileServiceGrpcTransport, "grpc", grpc_helpers), + ( + FileServiceAsyncClient, + transports.FileServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (FileServiceClient, transports.FileServiceRestTransport, "rest", None), + ], +) +def test_file_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_file_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.file_service.transports.FileServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = FileServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (FileServiceClient, transports.FileServiceGrpcTransport, "grpc", grpc_helpers), + ( + FileServiceAsyncClient, + transports.FileServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_file_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.CreateFileRequest, + dict, + ], +) +def test_create_file(request_type, transport: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = file_service.CreateFileResponse() + response = client.create_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = file_service.CreateFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, file_service.CreateFileResponse) + + +def test_create_file_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = file_service.CreateFileRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_file), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_file(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == file_service.CreateFileRequest() + + +def test_create_file_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_file] = mock_rpc + request = {} + client.create_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_file + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_file + ] = mock_rpc + + request = {} + await client.create_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_file_async( + transport: str = "grpc_asyncio", request_type=file_service.CreateFileRequest +): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file_service.CreateFileResponse() + ) + response = await client.create_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = file_service.CreateFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, file_service.CreateFileResponse) + + +@pytest.mark.asyncio +async def test_create_file_async_from_dict(): + await test_create_file_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.ListFilesRequest, + dict, + ], +) +def test_list_files(request_type, transport: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = file_service.ListFilesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = file_service.ListFilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFilesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_files_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = file_service.ListFilesRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_files(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == file_service.ListFilesRequest( + page_token="page_token_value", + ) + + +def test_list_files_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_files in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_files] = mock_rpc + request = {} + client.list_files(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_files(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_files_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_files + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_files + ] = mock_rpc + + request = {} + await client.list_files(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_files(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_files_async( + transport: str = "grpc_asyncio", request_type=file_service.ListFilesRequest +): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file_service.ListFilesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_files(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = file_service.ListFilesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFilesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_files_async_from_dict(): + await test_list_files_async(request_type=dict) + + +def test_list_files_pager(transport_name: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + file.File(), + ], + next_page_token="abc", + ), + file_service.ListFilesResponse( + files=[], + next_page_token="def", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + ], + next_page_token="ghi", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_files(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, file.File) for i in results) + + +def test_list_files_pages(transport_name: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + file.File(), + ], + next_page_token="abc", + ), + file_service.ListFilesResponse( + files=[], + next_page_token="def", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + ], + next_page_token="ghi", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + ], + ), + RuntimeError, + ) + pages = list(client.list_files(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_files_async_pager(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_files), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + file.File(), + ], + next_page_token="abc", + ), + file_service.ListFilesResponse( + files=[], + next_page_token="def", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + ], + next_page_token="ghi", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_files( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, file.File) for i in responses) + + +@pytest.mark.asyncio +async def test_list_files_async_pages(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_files), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + file.File(), + ], + next_page_token="abc", + ), + file_service.ListFilesResponse( + files=[], + next_page_token="def", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + ], + next_page_token="ghi", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_files(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.GetFileRequest, + dict, + ], +) +def test_get_file(request_type, transport: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = file.File( + name="name_value", + display_name="display_name_value", + mime_type="mime_type_value", + size_bytes=1089, + sha256_hash=b"sha256_hash_blob", + uri="uri_value", + state=file.File.State.PROCESSING, + ) + response = client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = file_service.GetFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, file.File) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.mime_type == "mime_type_value" + assert response.size_bytes == 1089 + assert response.sha256_hash == b"sha256_hash_blob" + assert response.uri == "uri_value" + assert response.state == file.File.State.PROCESSING + + +def test_get_file_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = file_service.GetFileRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_file(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == file_service.GetFileRequest( + name="name_value", + ) + + +def test_get_file_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file] = mock_rpc + request = {} + client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_file_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_file + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_file + ] = mock_rpc + + request = {} + await client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_file_async( + transport: str = "grpc_asyncio", request_type=file_service.GetFileRequest +): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file.File( + name="name_value", + display_name="display_name_value", + mime_type="mime_type_value", + size_bytes=1089, + sha256_hash=b"sha256_hash_blob", + uri="uri_value", + state=file.File.State.PROCESSING, + ) + ) + response = await client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = file_service.GetFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, file.File) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.mime_type == "mime_type_value" + assert response.size_bytes == 1089 + assert response.sha256_hash == b"sha256_hash_blob" + assert response.uri == "uri_value" + assert response.state == file.File.State.PROCESSING + + +@pytest.mark.asyncio +async def test_get_file_async_from_dict(): + await test_get_file_async(request_type=dict) + + +def test_get_file_field_headers(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = file_service.GetFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + call.return_value = file.File() + client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_file_field_headers_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = file_service.GetFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(file.File()) + await client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_file_flattened(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = file.File() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_file_flattened_error(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file( + file_service.GetFileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_file_flattened_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = file.File() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(file.File()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_file_flattened_error_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_file( + file_service.GetFileRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.DeleteFileRequest, + dict, + ], +) +def test_delete_file(request_type, transport: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = file_service.DeleteFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_file_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = file_service.DeleteFileRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_file(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == file_service.DeleteFileRequest( + name="name_value", + ) + + +def test_delete_file_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_file] = mock_rpc + request = {} + client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_file_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_file + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_file + ] = mock_rpc + + request = {} + await client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_file_async( + transport: str = "grpc_asyncio", request_type=file_service.DeleteFileRequest +): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = file_service.DeleteFileRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_file_async_from_dict(): + await test_delete_file_async(request_type=dict) + + +def test_delete_file_field_headers(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = file_service.DeleteFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + call.return_value = None + client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_file_field_headers_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = file_service.DeleteFileRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_file_flattened(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_file_flattened_error(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_file( + file_service.DeleteFileRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_file_flattened_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_file( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_file_flattened_error_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_file( + file_service.DeleteFileRequest(), + name="name_value", + ) + + +def test_create_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_file] = mock_rpc + + request = {} + client.create_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_files_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_files in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_files] = mock_rpc + + request = {} + client.list_files(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_files(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_files_rest_pager(transport: str = "rest"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + file.File(), + ], + next_page_token="abc", + ), + file_service.ListFilesResponse( + files=[], + next_page_token="def", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + ], + next_page_token="ghi", + ), + file_service.ListFilesResponse( + files=[ + file.File(), + file.File(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(file_service.ListFilesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_files(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, file.File) for i in results) + + pages = list(client.list_files(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_file] = mock_rpc + + request = {} + client.get_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_file_rest_required_fields(request_type=file_service.GetFileRequest): + transport_class = transports.FileServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = file.File() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_file_rest_unset_required_fields(): + transport = transports.FileServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_file_rest_flattened(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file.File() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "files/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=files/*}" % client.transport._host, args[1] + ) + + +def test_get_file_rest_flattened_error(transport: str = "rest"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_file( + file_service.GetFileRequest(), + name="name_value", + ) + + +def test_delete_file_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_file in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_file] = mock_rpc + + request = {} + client.delete_file(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_file(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_file_rest_required_fields(request_type=file_service.DeleteFileRequest): + transport_class = transports.FileServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_file._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_file(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_file_rest_unset_required_fields(): + transport = transports.FileServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_file._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_file_rest_flattened(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "files/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_file(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=files/*}" % client.transport._host, args[1] + ) + + +def test_delete_file_rest_flattened_error(transport: str = "rest"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_file( + file_service.DeleteFileRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FileServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FileServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FileServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FileServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FileServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FileServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FileServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FileServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FileServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FileServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FileServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileServiceGrpcTransport, + transports.FileServiceGrpcAsyncIOTransport, + transports.FileServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = FileServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_file_empty_call_grpc(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_file), "__call__") as call: + call.return_value = file_service.CreateFileResponse() + client.create_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.CreateFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_files_empty_call_grpc(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + call.return_value = file_service.ListFilesResponse() + client.list_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.ListFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_file_empty_call_grpc(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + call.return_value = file.File() + client.get_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.GetFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_file_empty_call_grpc(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + call.return_value = None + client.delete_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.DeleteFileRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = FileServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_file_empty_call_grpc_asyncio(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file_service.CreateFileResponse() + ) + await client.create_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.CreateFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_files_empty_call_grpc_asyncio(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file_service.ListFilesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.ListFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_file_empty_call_grpc_asyncio(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + file.File( + name="name_value", + display_name="display_name_value", + mime_type="mime_type_value", + size_bytes=1089, + sha256_hash=b"sha256_hash_blob", + uri="uri_value", + state=file.File.State.PROCESSING, + ) + ) + await client.get_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.GetFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_file_empty_call_grpc_asyncio(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.DeleteFileRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = FileServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_file_rest_bad_request(request_type=file_service.CreateFileRequest): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_file(request) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.CreateFileRequest, + dict, + ], +) +def test_create_file_rest_call_success(request_type): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file_service.CreateFileResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = file_service.CreateFileResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_file(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, file_service.CreateFileResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_file_rest_interceptors(null_interceptor): + transport = transports.FileServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FileServiceRestInterceptor(), + ) + client = FileServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FileServiceRestInterceptor, "post_create_file" + ) as post, mock.patch.object( + transports.FileServiceRestInterceptor, "post_create_file_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.FileServiceRestInterceptor, "pre_create_file" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = file_service.CreateFileRequest.pb(file_service.CreateFileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = file_service.CreateFileResponse.to_json( + file_service.CreateFileResponse() + ) + req.return_value.content = return_value + + request = file_service.CreateFileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = file_service.CreateFileResponse() + post_with_metadata.return_value = file_service.CreateFileResponse(), metadata + + client.create_file( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_files_rest_bad_request(request_type=file_service.ListFilesRequest): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_files(request) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.ListFilesRequest, + dict, + ], +) +def test_list_files_rest_call_success(request_type): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file_service.ListFilesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = file_service.ListFilesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_files(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFilesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_files_rest_interceptors(null_interceptor): + transport = transports.FileServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FileServiceRestInterceptor(), + ) + client = FileServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FileServiceRestInterceptor, "post_list_files" + ) as post, mock.patch.object( + transports.FileServiceRestInterceptor, "post_list_files_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.FileServiceRestInterceptor, "pre_list_files" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = file_service.ListFilesRequest.pb(file_service.ListFilesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = file_service.ListFilesResponse.to_json( + file_service.ListFilesResponse() + ) + req.return_value.content = return_value + + request = file_service.ListFilesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = file_service.ListFilesResponse() + post_with_metadata.return_value = file_service.ListFilesResponse(), metadata + + client.list_files( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_file_rest_bad_request(request_type=file_service.GetFileRequest): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "files/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_file(request) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.GetFileRequest, + dict, + ], +) +def test_get_file_rest_call_success(request_type): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "files/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = file.File( + name="name_value", + display_name="display_name_value", + mime_type="mime_type_value", + size_bytes=1089, + sha256_hash=b"sha256_hash_blob", + uri="uri_value", + state=file.File.State.PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = file.File.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_file(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, file.File) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.mime_type == "mime_type_value" + assert response.size_bytes == 1089 + assert response.sha256_hash == b"sha256_hash_blob" + assert response.uri == "uri_value" + assert response.state == file.File.State.PROCESSING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_file_rest_interceptors(null_interceptor): + transport = transports.FileServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FileServiceRestInterceptor(), + ) + client = FileServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FileServiceRestInterceptor, "post_get_file" + ) as post, mock.patch.object( + transports.FileServiceRestInterceptor, "post_get_file_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.FileServiceRestInterceptor, "pre_get_file" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = file_service.GetFileRequest.pb(file_service.GetFileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = file.File.to_json(file.File()) + req.return_value.content = return_value + + request = file_service.GetFileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = file.File() + post_with_metadata.return_value = file.File(), metadata + + client.get_file( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_file_rest_bad_request(request_type=file_service.DeleteFileRequest): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "files/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_file(request) + + +@pytest.mark.parametrize( + "request_type", + [ + file_service.DeleteFileRequest, + dict, + ], +) +def test_delete_file_rest_call_success(request_type): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "files/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_file(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_file_rest_interceptors(null_interceptor): + transport = transports.FileServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.FileServiceRestInterceptor(), + ) + client = FileServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.FileServiceRestInterceptor, "pre_delete_file" + ) as pre: + pre.assert_not_called() + pb_message = file_service.DeleteFileRequest.pb(file_service.DeleteFileRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = file_service.DeleteFileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_file( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_file_empty_call_rest(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_file), "__call__") as call: + client.create_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.CreateFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_files_empty_call_rest(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_files), "__call__") as call: + client.list_files(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.ListFilesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_file_empty_call_rest(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_file), "__call__") as call: + client.get_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.GetFileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_file_empty_call_rest(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_file), "__call__") as call: + client.delete_file(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = file_service.DeleteFileRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FileServiceGrpcTransport, + ) + + +def test_file_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FileServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_file_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.file_service.transports.FileServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.FileServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_file", + "list_files", + "get_file", + "delete_file", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_file_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.file_service.transports.FileServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FileServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_file_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.file_service.transports.FileServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FileServiceTransport() + adc.assert_called_once() + + +def test_file_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FileServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileServiceGrpcTransport, + transports.FileServiceGrpcAsyncIOTransport, + ], +) +def test_file_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FileServiceGrpcTransport, + transports.FileServiceGrpcAsyncIOTransport, + transports.FileServiceRestTransport, + ], +) +def test_file_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FileServiceGrpcTransport, grpc_helpers), + (transports.FileServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_file_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.FileServiceGrpcTransport, transports.FileServiceGrpcAsyncIOTransport], +) +def test_file_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_file_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.FileServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_file_service_host_no_port(transport_name): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_file_service_host_with_port(transport_name): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_file_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FileServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FileServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_file._session + session2 = client2.transport.create_file._session + assert session1 != session2 + session1 = client1.transport.list_files._session + session2 = client2.transport.list_files._session + assert session1 != session2 + session1 = client1.transport.get_file._session + session2 = client2.transport.get_file._session + assert session1 != session2 + session1 = client1.transport.delete_file._session + session2 = client2.transport.delete_file._session + assert session1 != session2 + + +def test_file_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FileServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_file_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FileServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.FileServiceGrpcTransport, transports.FileServiceGrpcAsyncIOTransport], +) +def test_file_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.FileServiceGrpcTransport, transports.FileServiceGrpcAsyncIOTransport], +) +def test_file_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_file_path(): + file = "squid" + expected = "files/{file}".format( + file=file, + ) + actual = FileServiceClient.file_path(file) + assert expected == actual + + +def test_parse_file_path(): + expected = { + "file": "clam", + } + path = FileServiceClient.file_path(**expected) + + # Check that the path construction is reversible. + actual = FileServiceClient.parse_file_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = FileServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = FileServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FileServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = FileServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = FileServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FileServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = FileServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = FileServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FileServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = FileServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = FileServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FileServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = FileServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = FileServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FileServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.FileServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.FileServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = FileServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = FileServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = FileServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (FileServiceClient, transports.FileServiceGrpcTransport), + (FileServiceAsyncClient, transports.FileServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_generative_service.py new file mode 100644 index 000000000000..b5453d1a4246 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_generative_service.py @@ -0,0 +1,7041 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.generative_service import ( + GenerativeServiceAsyncClient, + GenerativeServiceClient, + transports, +) +from google.ai.generativelanguage_v1alpha.types import ( + generative_service, + retriever, + safety, +) +from google.ai.generativelanguage_v1alpha.types import content +from google.ai.generativelanguage_v1alpha.types import content as gag_content + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert GenerativeServiceClient._get_default_mtls_endpoint(None) is None + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + GenerativeServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert GenerativeServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert GenerativeServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert GenerativeServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + GenerativeServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert GenerativeServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert GenerativeServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert GenerativeServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + GenerativeServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert GenerativeServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert GenerativeServiceClient._get_client_cert_source(None, False) is None + assert ( + GenerativeServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + GenerativeServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + GenerativeServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + GenerativeServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + GenerativeServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = GenerativeServiceClient._DEFAULT_UNIVERSE + default_endpoint = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + GenerativeServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + GenerativeServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GenerativeServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + GenerativeServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GenerativeServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == GenerativeServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + GenerativeServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + GenerativeServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + GenerativeServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + GenerativeServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + GenerativeServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + GenerativeServiceClient._get_universe_domain(None, None) + == GenerativeServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + GenerativeServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GenerativeServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GenerativeServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GenerativeServiceClient, "grpc"), + (GenerativeServiceAsyncClient, "grpc_asyncio"), + (GenerativeServiceClient, "rest"), + ], +) +def test_generative_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.GenerativeServiceGrpcTransport, "grpc"), + (transports.GenerativeServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.GenerativeServiceRestTransport, "rest"), + ], +) +def test_generative_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (GenerativeServiceClient, "grpc"), + (GenerativeServiceAsyncClient, "grpc_asyncio"), + (GenerativeServiceClient, "rest"), + ], +) +def test_generative_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_generative_service_client_get_transport_class(): + transport = GenerativeServiceClient.get_transport_class() + available_transports = [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceRestTransport, + ] + assert transport in available_transports + + transport = GenerativeServiceClient.get_transport_class("grpc") + assert transport == transports.GenerativeServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + GenerativeServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceAsyncClient), +) +def test_generative_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(GenerativeServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(GenerativeServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + "true", + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + "false", + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + "true", + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + GenerativeServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_generative_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [GenerativeServiceClient, GenerativeServiceAsyncClient] +) +@mock.patch.object( + GenerativeServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(GenerativeServiceAsyncClient), +) +def test_generative_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [GenerativeServiceClient, GenerativeServiceAsyncClient] +) +@mock.patch.object( + GenerativeServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceClient), +) +@mock.patch.object( + GenerativeServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(GenerativeServiceAsyncClient), +) +def test_generative_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = GenerativeServiceClient._DEFAULT_UNIVERSE + default_endpoint = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = GenerativeServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport, "grpc"), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (GenerativeServiceClient, transports.GenerativeServiceRestTransport, "rest"), + ], +) +def test_generative_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + GenerativeServiceClient, + transports.GenerativeServiceRestTransport, + "rest", + None, + ), + ], +) +def test_generative_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_generative_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.generative_service.transports.GenerativeServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = GenerativeServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + GenerativeServiceClient, + transports.GenerativeServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_generative_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse( + model_version="model_version_value", + ) + response = client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = generative_service.GenerateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + assert response.model_version == "model_version_value" + + +def test_generate_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = generative_service.GenerateContentRequest( + model="model_value", + cached_content="cached_content_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest( + model="model_value", + cached_content="cached_content_value", + ) + + +def test_generate_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_content + ] = mock_rpc + request = {} + client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_content + ] = mock_rpc + + request = {} + await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse( + model_version="model_version_value", + ) + ) + response = await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = generative_service.GenerateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + assert response.model_version == "model_version_value" + + +@pytest.mark.asyncio +async def test_generate_content_async_from_dict(): + await test_generate_content_async(request_type=dict) + + +def test_generate_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = generative_service.GenerateContentResponse() + client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + await client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_generate_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_generate_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateAnswerRequest, + dict, + ], +) +def test_generate_answer(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + response = client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = generative_service.GenerateAnswerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateAnswerResponse) + assert math.isclose(response.answerable_probability, 0.234, rel_tol=1e-6) + + +def test_generate_answer_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = generative_service.GenerateAnswerRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_answer(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateAnswerRequest( + model="model_value", + ) + + +def test_generate_answer_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_answer in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_answer] = mock_rpc + request = {} + client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_answer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_answer_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_answer + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_answer + ] = mock_rpc + + request = {} + await client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_answer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_answer_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateAnswerRequest, +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + ) + response = await client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = generative_service.GenerateAnswerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateAnswerResponse) + assert math.isclose(response.answerable_probability, 0.234, rel_tol=1e-6) + + +@pytest.mark.asyncio +async def test_generate_answer_async_from_dict(): + await test_generate_answer_async(request_type=dict) + + +def test_generate_answer_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateAnswerRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + call.return_value = generative_service.GenerateAnswerResponse() + client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_answer_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateAnswerRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse() + ) + await client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_answer_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateAnswerResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_answer( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + arg = args[0].safety_settings + mock_val = [ + safety.SafetySetting(category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY) + ] + assert arg == mock_val + arg = args[0].answer_style + mock_val = generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE + assert arg == mock_val + + +def test_generate_answer_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_answer( + generative_service.GenerateAnswerRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + +@pytest.mark.asyncio +async def test_generate_answer_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.GenerateAnswerResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_answer( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + arg = args[0].safety_settings + mock_val = [ + safety.SafetySetting(category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY) + ] + assert arg == mock_val + arg = args[0].answer_style + mock_val = generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_answer_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_answer( + generative_service.GenerateAnswerRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_stream_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + response = client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = generative_service.GenerateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, generative_service.GenerateContentResponse) + + +def test_stream_generate_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = generative_service.GenerateContentRequest( + model="model_value", + cached_content="cached_content_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.stream_generate_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.GenerateContentRequest( + model="model_value", + cached_content="cached_content_value", + ) + + +def test_stream_generate_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stream_generate_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stream_generate_content + ] = mock_rpc + request = {} + client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.stream_generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_stream_generate_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.stream_generate_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.stream_generate_content + ] = mock_rpc + + request = {} + await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.stream_generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_stream_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + response = await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = generative_service.GenerateContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, generative_service.GenerateContentResponse) + + +@pytest.mark.asyncio +async def test_stream_generate_content_async_from_dict(): + await test_stream_generate_content_async(request_type=dict) + + +def test_stream_generate_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = iter([generative_service.GenerateContentResponse()]) + client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_stream_generate_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.GenerateContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + await client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_stream_generate_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.stream_generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_stream_generate_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_stream_generate_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([generative_service.GenerateContentResponse()]) + + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.stream_generate_content( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_stream_generate_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.EmbedContentRequest, + dict, + ], +) +def test_embed_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + response = client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = generative_service.EmbedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +def test_embed_content_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = generative_service.EmbedContentRequest( + model="model_value", + title="title_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.embed_content(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.EmbedContentRequest( + model="model_value", + title="title_value", + ) + + +def test_embed_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.embed_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.embed_content] = mock_rpc + request = {} + client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.embed_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_embed_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.embed_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.embed_content + ] = mock_rpc + + request = {} + await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.embed_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_embed_content_async( + transport: str = "grpc_asyncio", request_type=generative_service.EmbedContentRequest +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + response = await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = generative_service.EmbedContentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +@pytest.mark.asyncio +async def test_embed_content_async_from_dict(): + await test_embed_content_async(request_type=dict) + + +def test_embed_content_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.EmbedContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = generative_service.EmbedContentResponse() + client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_embed_content_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.EmbedContentRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + await client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_embed_content_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.embed_content( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].content + mock_val = gag_content.Content(parts=[gag_content.Part(text="text_value")]) + assert arg == mock_val + + +def test_embed_content_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +@pytest.mark.asyncio +async def test_embed_content_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.EmbedContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.embed_content( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].content + mock_val = gag_content.Content(parts=[gag_content.Part(text="text_value")]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_embed_content_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BatchEmbedContentsRequest, + dict, + ], +) +def test_batch_embed_contents(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + response = client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = generative_service.BatchEmbedContentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +def test_batch_embed_contents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = generative_service.BatchEmbedContentsRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_embed_contents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.BatchEmbedContentsRequest( + model="model_value", + ) + + +def test_batch_embed_contents_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_embed_contents in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_embed_contents + ] = mock_rpc + request = {} + client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_embed_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_embed_contents + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_embed_contents + ] = mock_rpc + + request = {} + await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_embed_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async( + transport: str = "grpc_asyncio", + request_type=generative_service.BatchEmbedContentsRequest, +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + response = await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = generative_service.BatchEmbedContentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +@pytest.mark.asyncio +async def test_batch_embed_contents_async_from_dict(): + await test_batch_embed_contents_async(request_type=dict) + + +def test_batch_embed_contents_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.BatchEmbedContentsRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = generative_service.BatchEmbedContentsResponse() + client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_embed_contents_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.BatchEmbedContentsRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + await client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_batch_embed_contents_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_embed_contents( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [generative_service.EmbedContentRequest(model="model_value")] + assert arg == mock_val + + +def test_batch_embed_contents_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +@pytest.mark.asyncio +async def test_batch_embed_contents_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.BatchEmbedContentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_embed_contents( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [generative_service.EmbedContentRequest(model="model_value")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_embed_contents_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.CountTokensRequest, + dict, + ], +) +def test_count_tokens(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse( + total_tokens=1303, + cached_content_token_count=2746, + ) + response = client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = generative_service.CountTokensRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + assert response.cached_content_token_count == 2746 + + +def test_count_tokens_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = generative_service.CountTokensRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.count_tokens(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == generative_service.CountTokensRequest( + model="model_value", + ) + + +def test_count_tokens_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.count_tokens in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.count_tokens] = mock_rpc + request = {} + client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.count_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_count_tokens_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.count_tokens + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.count_tokens + ] = mock_rpc + + request = {} + await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.count_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_count_tokens_async( + transport: str = "grpc_asyncio", request_type=generative_service.CountTokensRequest +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse( + total_tokens=1303, + cached_content_token_count=2746, + ) + ) + response = await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = generative_service.CountTokensRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + assert response.cached_content_token_count == 2746 + + +@pytest.mark.asyncio +async def test_count_tokens_async_from_dict(): + await test_count_tokens_async(request_type=dict) + + +def test_count_tokens_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.CountTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = generative_service.CountTokensResponse() + client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_tokens_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = generative_service.CountTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse() + ) + await client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_tokens_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_tokens( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +def test_count_tokens_flattened_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.asyncio +async def test_count_tokens_flattened_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = generative_service.CountTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_tokens( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].contents + mock_val = [content.Content(parts=[content.Part(text="text_value")])] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_tokens_flattened_error_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BidiGenerateContentClientMessage, + dict, + ], +) +def test_bidi_generate_content(request_type, transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bidi_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter( + [generative_service.BidiGenerateContentServerMessage()] + ) + response = client.bidi_generate_content(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, generative_service.BidiGenerateContentServerMessage) + + +def test_bidi_generate_content_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bidi_generate_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bidi_generate_content + ] = mock_rpc + request = [{}] + client.bidi_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bidi_generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bidi_generate_content_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.bidi_generate_content + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.bidi_generate_content + ] = mock_rpc + + request = [{}] + await client.bidi_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.bidi_generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bidi_generate_content_async( + transport: str = "grpc_asyncio", + request_type=generative_service.BidiGenerateContentClientMessage, +): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bidi_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.BidiGenerateContentServerMessage()] + ) + response = await client.bidi_generate_content(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, generative_service.BidiGenerateContentServerMessage) + + +@pytest.mark.asyncio +async def test_bidi_generate_content_async_from_dict(): + await test_bidi_generate_content_async(request_type=dict) + + +def test_generate_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.generate_content + ] = mock_rpc + + request = {} + client.generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_content_rest_required_fields( + request_type=generative_service.GenerateContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.generate_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +def test_generate_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.generate_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:generateContent" % client.transport._host, + args[1], + ) + + +def test_generate_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_generate_answer_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_answer in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_answer] = mock_rpc + + request = {} + client.generate_answer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_answer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_answer_rest_required_fields( + request_type=generative_service.GenerateAnswerRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_answer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_answer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateAnswerResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateAnswerResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.generate_answer(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_answer_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_answer._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + "answerStyle", + ) + ) + ) + + +def test_generate_answer_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateAnswerResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateAnswerResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.generate_answer(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:generateAnswer" % client.transport._host, + args[1], + ) + + +def test_generate_answer_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_answer( + generative_service.GenerateAnswerRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + safety_settings=[ + safety.SafetySetting( + category=safety.HarmCategory.HARM_CATEGORY_DEROGATORY + ) + ], + answer_style=generative_service.GenerateAnswerRequest.AnswerStyle.ABSTRACTIVE, + ) + + +def test_stream_generate_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stream_generate_content + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stream_generate_content + ] = mock_rpc + + request = {} + client.stream_generate_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.stream_generate_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_stream_generate_content_rest_required_fields( + request_type=generative_service.GenerateContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stream_generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).stream_generate_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.stream_generate_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_stream_generate_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.stream_generate_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "contents", + ) + ) + ) + + +def test_stream_generate_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + with mock.patch.object(response_value, "iter_content") as iter_content: + iter_content.return_value = iter(json_return_value) + client.stream_generate_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:streamGenerateContent" + % client.transport._host, + args[1], + ) + + +def test_stream_generate_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.stream_generate_content( + generative_service.GenerateContentRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_embed_content_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.embed_content in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.embed_content] = mock_rpc + + request = {} + client.embed_content(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.embed_content(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_embed_content_rest_required_fields( + request_type=generative_service.EmbedContentRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_content._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.embed_content(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_embed_content_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.embed_content._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "content", + ) + ) + ) + + +def test_embed_content_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.embed_content(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:embedContent" % client.transport._host, args[1] + ) + + +def test_embed_content_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_content( + generative_service.EmbedContentRequest(), + model="model_value", + content=gag_content.Content(parts=[gag_content.Part(text="text_value")]), + ) + + +def test_batch_embed_contents_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_embed_contents in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_embed_contents + ] = mock_rpc + + request = {} + client.batch_embed_contents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_embed_contents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_embed_contents_rest_required_fields( + request_type=generative_service.BatchEmbedContentsRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_contents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_embed_contents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_embed_contents_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_embed_contents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "requests", + ) + ) + ) + + +def test_batch_embed_contents_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.batch_embed_contents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:batchEmbedContents" % client.transport._host, + args[1], + ) + + +def test_batch_embed_contents_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_contents( + generative_service.BatchEmbedContentsRequest(), + model="model_value", + requests=[generative_service.EmbedContentRequest(model="model_value")], + ) + + +def test_count_tokens_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.count_tokens in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.count_tokens] = mock_rpc + + request = {} + client.count_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.count_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_count_tokens_rest_required_fields( + request_type=generative_service.CountTokensRequest, +): + transport_class = transports.GenerativeServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.count_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_tokens_rest_unset_required_fields(): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_tokens._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("model",))) + + +def test_count_tokens_rest_flattened(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.count_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:countTokens" % client.transport._host, args[1] + ) + + +def test_count_tokens_rest_flattened_error(transport: str = "rest"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_tokens( + generative_service.CountTokensRequest(), + model="model_value", + contents=[content.Content(parts=[content.Part(text="text_value")])], + ) + + +def test_bidi_generate_content_rest_no_http_options(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = generative_service.BidiGenerateContentClientMessage() + requests = [request] + with pytest.raises(RuntimeError): + client.bidi_generate_content(requests) + + +def test_bidi_generate_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.bidi_generate_content({}) + assert "Method BidiGenerateContent is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = GenerativeServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = GenerativeServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.GenerativeServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.GenerativeServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + transports.GenerativeServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = GenerativeServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_content_empty_call_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + call.return_value = generative_service.GenerateContentResponse() + client.generate_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_answer_empty_call_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + call.return_value = generative_service.GenerateAnswerResponse() + client.generate_answer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateAnswerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_stream_generate_content_empty_call_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + call.return_value = iter([generative_service.GenerateContentResponse()]) + client.stream_generate_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_embed_content_empty_call_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + call.return_value = generative_service.EmbedContentResponse() + client.embed_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.EmbedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_embed_contents_empty_call_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + call.return_value = generative_service.BatchEmbedContentsResponse() + client.batch_embed_contents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.BatchEmbedContentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_count_tokens_empty_call_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + call.return_value = generative_service.CountTokensResponse() + client.count_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.CountTokensRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = GenerativeServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_content_empty_call_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateContentResponse( + model_version="model_version_value", + ) + ) + await client.generate_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_answer_empty_call_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + ) + await client.generate_answer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateAnswerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_stream_generate_content_empty_call_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[generative_service.GenerateContentResponse()] + ) + await client.stream_generate_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_embed_content_empty_call_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.EmbedContentResponse() + ) + await client.embed_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.EmbedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_embed_contents_empty_call_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.BatchEmbedContentsResponse() + ) + await client.batch_embed_contents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.BatchEmbedContentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_count_tokens_empty_call_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + generative_service.CountTokensResponse( + total_tokens=1303, + cached_content_token_count=2746, + ) + ) + await client.count_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.CountTokensRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = GenerativeServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_generate_content_rest_bad_request( + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_generate_content_rest_call_success(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse( + model_version="model_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + assert response.model_version == "model_version_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_generate_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_generate_content_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_generate_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = generative_service.GenerateContentRequest.pb( + generative_service.GenerateContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = generative_service.GenerateContentResponse.to_json( + generative_service.GenerateContentResponse() + ) + req.return_value.content = return_value + + request = generative_service.GenerateContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateContentResponse() + post_with_metadata.return_value = ( + generative_service.GenerateContentResponse(), + metadata, + ) + + client.generate_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_generate_answer_rest_bad_request( + request_type=generative_service.GenerateAnswerRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_answer(request) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateAnswerRequest, + dict, + ], +) +def test_generate_answer_rest_call_success(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateAnswerResponse( + answerable_probability=0.234, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateAnswerResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_answer(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateAnswerResponse) + assert math.isclose(response.answerable_probability, 0.234, rel_tol=1e-6) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_answer_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_generate_answer" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_generate_answer_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_generate_answer" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = generative_service.GenerateAnswerRequest.pb( + generative_service.GenerateAnswerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = generative_service.GenerateAnswerResponse.to_json( + generative_service.GenerateAnswerResponse() + ) + req.return_value.content = return_value + + request = generative_service.GenerateAnswerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateAnswerResponse() + post_with_metadata.return_value = ( + generative_service.GenerateAnswerResponse(), + metadata, + ) + + client.generate_answer( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_stream_generate_content_rest_bad_request( + request_type=generative_service.GenerateContentRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.stream_generate_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.GenerateContentRequest, + dict, + ], +) +def test_stream_generate_content_rest_call_success(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.GenerateContentResponse( + model_version="model_version_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.GenerateContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + json_return_value = "[{}]".format(json_return_value) + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.stream_generate_content(request) + + assert isinstance(response, Iterable) + response = next(response) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.GenerateContentResponse) + assert response.model_version == "model_version_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_stream_generate_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_stream_generate_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_stream_generate_content_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_stream_generate_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = generative_service.GenerateContentRequest.pb( + generative_service.GenerateContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = generative_service.GenerateContentResponse.to_json( + generative_service.GenerateContentResponse() + ) + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + + request = generative_service.GenerateContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.GenerateContentResponse() + post_with_metadata.return_value = ( + generative_service.GenerateContentResponse(), + metadata, + ) + + client.stream_generate_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_embed_content_rest_bad_request( + request_type=generative_service.EmbedContentRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.embed_content(request) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.EmbedContentRequest, + dict, + ], +) +def test_embed_content_rest_call_success(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.EmbedContentResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.EmbedContentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.embed_content(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.EmbedContentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_embed_content_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_embed_content" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_embed_content_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_embed_content" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = generative_service.EmbedContentRequest.pb( + generative_service.EmbedContentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = generative_service.EmbedContentResponse.to_json( + generative_service.EmbedContentResponse() + ) + req.return_value.content = return_value + + request = generative_service.EmbedContentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.EmbedContentResponse() + post_with_metadata.return_value = ( + generative_service.EmbedContentResponse(), + metadata, + ) + + client.embed_content( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_embed_contents_rest_bad_request( + request_type=generative_service.BatchEmbedContentsRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_embed_contents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.BatchEmbedContentsRequest, + dict, + ], +) +def test_batch_embed_contents_rest_call_success(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.BatchEmbedContentsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.BatchEmbedContentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_embed_contents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.BatchEmbedContentsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_embed_contents_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_batch_embed_contents" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_batch_embed_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_batch_embed_contents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = generative_service.BatchEmbedContentsRequest.pb( + generative_service.BatchEmbedContentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = generative_service.BatchEmbedContentsResponse.to_json( + generative_service.BatchEmbedContentsResponse() + ) + req.return_value.content = return_value + + request = generative_service.BatchEmbedContentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.BatchEmbedContentsResponse() + post_with_metadata.return_value = ( + generative_service.BatchEmbedContentsResponse(), + metadata, + ) + + client.batch_embed_contents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_count_tokens_rest_bad_request( + request_type=generative_service.CountTokensRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.count_tokens(request) + + +@pytest.mark.parametrize( + "request_type", + [ + generative_service.CountTokensRequest, + dict, + ], +) +def test_count_tokens_rest_call_success(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = generative_service.CountTokensResponse( + total_tokens=1303, + cached_content_token_count=2746, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = generative_service.CountTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.count_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, generative_service.CountTokensResponse) + assert response.total_tokens == 1303 + assert response.cached_content_token_count == 2746 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_tokens_rest_interceptors(null_interceptor): + transport = transports.GenerativeServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.GenerativeServiceRestInterceptor(), + ) + client = GenerativeServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_count_tokens" + ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_count_tokens_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "pre_count_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = generative_service.CountTokensRequest.pb( + generative_service.CountTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = generative_service.CountTokensResponse.to_json( + generative_service.CountTokensResponse() + ) + req.return_value.content = return_value + + request = generative_service.CountTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = generative_service.CountTokensResponse() + post_with_metadata.return_value = ( + generative_service.CountTokensResponse(), + metadata, + ) + + client.count_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_bidi_generate_content_rest_error(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.bidi_generate_content({}) + assert "Method BidiGenerateContent is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_content_empty_call_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_content), "__call__") as call: + client.generate_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_answer_empty_call_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_answer), "__call__") as call: + client.generate_answer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateAnswerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_stream_generate_content_empty_call_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.stream_generate_content), "__call__" + ) as call: + client.stream_generate_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.GenerateContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_embed_content_empty_call_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.embed_content), "__call__") as call: + client.embed_content(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.EmbedContentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_embed_contents_empty_call_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_embed_contents), "__call__" + ) as call: + client.batch_embed_contents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.BatchEmbedContentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_count_tokens_empty_call_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.count_tokens), "__call__") as call: + client.count_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = generative_service.CountTokensRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.GenerativeServiceGrpcTransport, + ) + + +def test_generative_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.GenerativeServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_generative_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.generative_service.transports.GenerativeServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.GenerativeServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_content", + "generate_answer", + "stream_generate_content", + "embed_content", + "batch_embed_contents", + "count_tokens", + "bidi_generate_content", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_generative_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.generative_service.transports.GenerativeServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GenerativeServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_generative_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.generative_service.transports.GenerativeServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.GenerativeServiceTransport() + adc.assert_called_once() + + +def test_generative_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + GenerativeServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + transports.GenerativeServiceRestTransport, + ], +) +def test_generative_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.GenerativeServiceGrpcTransport, grpc_helpers), + (transports.GenerativeServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_generative_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_generative_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.GenerativeServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_generative_service_host_no_port(transport_name): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_generative_service_host_with_port(transport_name): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_generative_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = GenerativeServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = GenerativeServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_content._session + session2 = client2.transport.generate_content._session + assert session1 != session2 + session1 = client1.transport.generate_answer._session + session2 = client2.transport.generate_answer._session + assert session1 != session2 + session1 = client1.transport.stream_generate_content._session + session2 = client2.transport.stream_generate_content._session + assert session1 != session2 + session1 = client1.transport.embed_content._session + session2 = client2.transport.embed_content._session + assert session1 != session2 + session1 = client1.transport.batch_embed_contents._session + session2 = client2.transport.batch_embed_contents._session + assert session1 != session2 + session1 = client1.transport.count_tokens._session + session2 = client2.transport.count_tokens._session + assert session1 != session2 + session1 = client1.transport.bidi_generate_content._session + session2 = client2.transport.bidi_generate_content._session + assert session1 != session2 + + +def test_generative_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GenerativeServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_generative_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.GenerativeServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.GenerativeServiceGrpcTransport, + transports.GenerativeServiceGrpcAsyncIOTransport, + ], +) +def test_generative_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cached_content_path(): + id = "squid" + expected = "cachedContents/{id}".format( + id=id, + ) + actual = GenerativeServiceClient.cached_content_path(id) + assert expected == actual + + +def test_parse_cached_content_path(): + expected = { + "id": "clam", + } + path = GenerativeServiceClient.cached_content_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_cached_content_path(path) + assert expected == actual + + +def test_model_path(): + model = "whelk" + expected = "models/{model}".format( + model=model, + ) + actual = GenerativeServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "octopus", + } + path = GenerativeServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = GenerativeServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = GenerativeServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = GenerativeServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = GenerativeServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = GenerativeServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = GenerativeServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = GenerativeServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = GenerativeServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = GenerativeServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = GenerativeServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = GenerativeServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.GenerativeServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.GenerativeServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = GenerativeServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = GenerativeServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = GenerativeServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (GenerativeServiceClient, transports.GenerativeServiceGrpcTransport), + ( + GenerativeServiceAsyncClient, + transports.GenerativeServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_model_service.py new file mode 100644 index 000000000000..3cc274de06d4 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_model_service.py @@ -0,0 +1,7984 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.model_service import ( + ModelServiceAsyncClient, + ModelServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1alpha.types import tuned_model as gag_tuned_model +from google.ai.generativelanguage_v1alpha.types import model, model_service +from google.ai.generativelanguage_v1alpha.types import tuned_model + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ModelServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ModelServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ModelServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert ModelServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ModelServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ModelServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ModelServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ModelServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ModelServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ModelServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ModelServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ModelServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ModelServiceClient._get_client_cert_source(None, False) is None + assert ( + ModelServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + ModelServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ModelServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ModelServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ModelServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceClient), +) +@mock.patch.object( + ModelServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ModelServiceClient._DEFAULT_UNIVERSE + default_endpoint = ModelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ModelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ModelServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ModelServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ModelServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ModelServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ModelServiceClient._get_api_endpoint(None, None, default_universe, "always") + == ModelServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ModelServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ModelServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ModelServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ModelServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ModelServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ModelServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ModelServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ModelServiceClient._get_universe_domain(None, None) + == ModelServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ModelServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ModelServiceGrpcTransport, "grpc"), + (transports.ModelServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ModelServiceClient, "grpc"), + (ModelServiceAsyncClient, "grpc_asyncio"), + (ModelServiceClient, "rest"), + ], +) +def test_model_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_model_service_client_get_transport_class(): + transport = ModelServiceClient.get_transport_class() + available_transports = [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceRestTransport, + ] + assert transport in available_transports + + transport = ModelServiceClient.get_transport_class("grpc") + assert transport == transports.ModelServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + ModelServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceClient), +) +@mock.patch.object( + ModelServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceAsyncClient), +) +def test_model_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ModelServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "true"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc", "false"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "true"), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + ModelServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceClient), +) +@mock.patch.object( + ModelServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_model_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ModelServiceClient, ModelServiceAsyncClient]) +@mock.patch.object( + ModelServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ModelServiceClient) +) +@mock.patch.object( + ModelServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ModelServiceAsyncClient), +) +def test_model_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [ModelServiceClient, ModelServiceAsyncClient]) +@mock.patch.object( + ModelServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceClient), +) +@mock.patch.object( + ModelServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ModelServiceAsyncClient), +) +def test_model_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ModelServiceClient._DEFAULT_UNIVERSE + default_endpoint = ModelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ModelServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport, "grpc"), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest"), + ], +) +def test_model_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (ModelServiceClient, transports.ModelServiceRestTransport, "rest", None), + ], +) +def test_model_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_model_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.model_service.transports.ModelServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ModelServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ModelServiceClient, + transports.ModelServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ModelServiceAsyncClient, + transports.ModelServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_model_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + max_temperature=0.16190000000000002, + top_p=0.546, + top_k=541, + ) + response = client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.GetModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +def test_get_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.GetModelRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetModelRequest( + name="name_value", + ) + + +def test_get_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_model] = mock_rpc + request = {} + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_model + ] = mock_rpc + + request = {} + await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_model_async( + transport: str = "grpc_asyncio", request_type=model_service.GetModelRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + max_temperature=0.16190000000000002, + top_p=0.546, + top_k=541, + ) + ) + response = await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.GetModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +@pytest.mark.asyncio +async def test_get_model_async_from_dict(): + await test_get_model_async(request_type=dict) + + +def test_get_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = model.Model() + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + await client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model.Model() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.ListModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.ListModelsRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListModelsRequest( + page_token="page_token_value", + ) + + +def test_list_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_models in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_models] = mock_rpc + request = {} + client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_models + ] = mock_rpc + + request = {} + await client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_models_async( + transport: str = "grpc_asyncio", request_type=model_service.ListModelsRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.ListModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_models_async_from_dict(): + await test_list_models_async(request_type=dict) + + +def test_list_models_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +def test_list_models_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_models_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_models_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_pager(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_models(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + +def test_list_models_pages(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = list(client.list_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_models_async_pager(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, model.Model) for i in responses) + + +@pytest.mark.asyncio +async def test_list_models_async_pages(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_models), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_models(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetTunedModelRequest, + dict, + ], +) +def test_get_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + base_model="base_model_value", + ) + response = client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.GetTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] + + +def test_get_tuned_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.GetTunedModelRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_tuned_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.GetTunedModelRequest( + name="name_value", + ) + + +def test_get_tuned_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_tuned_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_tuned_model] = mock_rpc + request = {} + client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_tuned_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_tuned_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_tuned_model + ] = mock_rpc + + request = {} + await client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.GetTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + ) + ) + response = await client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.GetTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] + + +@pytest.mark.asyncio +async def test_get_tuned_model_async_from_dict(): + await test_get_tuned_model_async(request_type=dict) + + +def test_get_tuned_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + call.return_value = tuned_model.TunedModel() + client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_tuned_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.GetTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel() + ) + await client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tuned_model.TunedModel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tuned_model( + model_service.GetTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = tuned_model.TunedModel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_tuned_model( + model_service.GetTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListTunedModelsRequest, + dict, + ], +) +def test_list_tuned_models(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.ListTunedModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunedModelsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_tuned_models_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.ListTunedModelsRequest( + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_tuned_models(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.ListTunedModelsRequest( + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_tuned_models_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tuned_models in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_tuned_models + ] = mock_rpc + request = {} + client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tuned_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_tuned_models + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_tuned_models + ] = mock_rpc + + request = {} + await client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_tuned_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_tuned_models_async( + transport: str = "grpc_asyncio", request_type=model_service.ListTunedModelsRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.ListTunedModelsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunedModelsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_from_dict(): + await test_list_tuned_models_async(request_type=dict) + + +def test_list_tuned_models_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListTunedModelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_tuned_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +def test_list_tuned_models_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tuned_models( + model_service.ListTunedModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_tuned_models_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = model_service.ListTunedModelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListTunedModelsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_tuned_models( + page_size=951, + page_token="page_token_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].page_size + mock_val = 951 + assert arg == mock_val + arg = args[0].page_token + mock_val = "page_token_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_tuned_models_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_tuned_models( + model_service.ListTunedModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_tuned_models_pager(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_tuned_models(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuned_model.TunedModel) for i in results) + + +def test_list_tuned_models_pages(transport_name: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + pages = list(client.list_tuned_models(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_pager(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_tuned_models( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, tuned_model.TunedModel) for i in responses) + + +@pytest.mark.asyncio +async def test_list_tuned_models_async_pages(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_tuned_models(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.CreateTunedModelRequest, + dict, + ], +) +def test_create_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.CreateTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_tuned_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.CreateTunedModelRequest( + tuned_model_id="tuned_model_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_tuned_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.CreateTunedModelRequest( + tuned_model_id="tuned_model_id_value", + ) + + +def test_create_tuned_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_tuned_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_tuned_model + ] = mock_rpc + request = {} + client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_tuned_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_tuned_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_tuned_model + ] = mock_rpc + + request = {} + await client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.CreateTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.CreateTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_tuned_model_async_from_dict(): + await test_create_tuned_model_async(request_type=dict) + + +def test_create_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].tuned_model_id + mock_val = "tuned_model_id_value" + assert arg == mock_val + + +def test_create_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tuned_model( + model_service.CreateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].tuned_model_id + mock_val = "tuned_model_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_tuned_model( + model_service.CreateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.UpdateTunedModelRequest, + dict, + ], +) +def test_update_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + base_model="base_model_value", + ) + response = client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.UpdateTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] + + +def test_update_tuned_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.UpdateTunedModelRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_tuned_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.UpdateTunedModelRequest() + + +def test_update_tuned_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_tuned_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_tuned_model + ] = mock_rpc + request = {} + client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_tuned_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_tuned_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_tuned_model + ] = mock_rpc + + request = {} + await client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.UpdateTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + ) + ) + response = await client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.UpdateTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] + + +@pytest.mark.asyncio +async def test_update_tuned_model_async_from_dict(): + await test_update_tuned_model_async(request_type=dict) + + +def test_update_tuned_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.UpdateTunedModelRequest() + + request.tuned_model.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + call.return_value = gag_tuned_model.TunedModel() + client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tuned_model.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_tuned_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.UpdateTunedModelRequest() + + request.tuned_model.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel() + ) + await client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "tuned_model.name=name_value", + ) in kw["metadata"] + + +def test_update_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_tuned_model.TunedModel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tuned_model( + model_service.UpdateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_tuned_model.TunedModel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_tuned_model( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].tuned_model + mock_val = gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ) + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_tuned_model( + model_service.UpdateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.DeleteTunedModelRequest, + dict, + ], +) +def test_delete_tuned_model(request_type, transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = model_service.DeleteTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_tuned_model_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = model_service.DeleteTunedModelRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_tuned_model(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == model_service.DeleteTunedModelRequest( + name="name_value", + ) + + +def test_delete_tuned_model_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_tuned_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_tuned_model + ] = mock_rpc + request = {} + client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_tuned_model_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_tuned_model + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_tuned_model + ] = mock_rpc + + request = {} + await client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_tuned_model_async( + transport: str = "grpc_asyncio", request_type=model_service.DeleteTunedModelRequest +): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = model_service.DeleteTunedModelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_tuned_model_async_from_dict(): + await test_delete_tuned_model_async(request_type=dict) + + +def test_delete_tuned_model_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.DeleteTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + call.return_value = None + client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_tuned_model_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = model_service.DeleteTunedModelRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_tuned_model_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_tuned_model_flattened_error(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tuned_model( + model_service.DeleteTunedModelRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_tuned_model_flattened_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_tuned_model( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_tuned_model_flattened_error_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_tuned_model( + model_service.DeleteTunedModelRequest(), + name="name_value", + ) + + +def test_get_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_model] = mock_rpc + + request = {} + client.get_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_model_rest_required_fields(request_type=model_service.GetModelRequest): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = model.Model() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=models/*}" % client.transport._host, args[1] + ) + + +def test_get_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_model( + model_service.GetModelRequest(), + name="name_value", + ) + + +def test_list_models_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_models in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_models] = mock_rpc + + request = {} + client.list_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_models_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + page_size=951, + page_token="page_token_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/models" % client.transport._host, args[1] + ) + + +def test_list_models_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_models( + model_service.ListModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_models_rest_pager(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + model.Model(), + ], + next_page_token="abc", + ), + model_service.ListModelsResponse( + models=[], + next_page_token="def", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + ], + next_page_token="ghi", + ), + model_service.ListModelsResponse( + models=[ + model.Model(), + model.Model(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(model_service.ListModelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, model.Model) for i in results) + + pages = list(client.list_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_tuned_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_tuned_model in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_tuned_model] = mock_rpc + + request = {} + client.get_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_tuned_model_rest_required_fields( + request_type=model_service.GetTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = tuned_model.TunedModel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tuned_model.TunedModel() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=tunedModels/*}" % client.transport._host, args[1] + ) + + +def test_get_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_tuned_model( + model_service.GetTunedModelRequest(), + name="name_value", + ) + + +def test_list_tuned_models_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_tuned_models in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_tuned_models + ] = mock_rpc + + request = {} + client.list_tuned_models(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_tuned_models(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_tuned_models_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListTunedModelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + page_size=951, + page_token="page_token_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = model_service.ListTunedModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_tuned_models(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/tunedModels" % client.transport._host, args[1] + ) + + +def test_list_tuned_models_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_tuned_models( + model_service.ListTunedModelsRequest(), + page_size=951, + page_token="page_token_value", + ) + + +def test_list_tuned_models_rest_pager(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + next_page_token="abc", + ), + model_service.ListTunedModelsResponse( + tuned_models=[], + next_page_token="def", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + ], + next_page_token="ghi", + ), + model_service.ListTunedModelsResponse( + tuned_models=[ + tuned_model.TunedModel(), + tuned_model.TunedModel(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + model_service.ListTunedModelsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_tuned_models(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuned_model.TunedModel) for i in results) + + pages = list(client.list_tuned_models(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_tuned_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_tuned_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_tuned_model + ] = mock_rpc + + request = {} + client.create_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_tuned_model_rest_required_fields( + request_type=model_service.CreateTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_tuned_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("tuned_model_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(("tunedModelId",)) & set(("tunedModel",))) + + +def test_create_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/tunedModels" % client.transport._host, args[1] + ) + + +def test_create_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_tuned_model( + model_service.CreateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + tuned_model_id="tuned_model_id_value", + ) + + +def test_update_tuned_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_tuned_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_tuned_model + ] = mock_rpc + + request = {} + client.update_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_tuned_model_rest_required_fields( + request_type=model_service.UpdateTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_tuned_model._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_tuned_model.TunedModel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("tunedModel",))) + + +def test_update_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_tuned_model.TunedModel() + + # get arguments that satisfy an http rule for this method + sample_request = {"tuned_model": {"name": "tunedModels/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{tuned_model.name=tunedModels/*}" % client.transport._host, + args[1], + ) + + +def test_update_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_tuned_model( + model_service.UpdateTunedModelRequest(), + tuned_model=gag_tuned_model.TunedModel( + tuned_model_source=gag_tuned_model.TunedModelSource( + tuned_model="tuned_model_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_tuned_model_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_tuned_model in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_tuned_model + ] = mock_rpc + + request = {} + client.delete_tuned_model(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_tuned_model(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_tuned_model_rest_required_fields( + request_type=model_service.DeleteTunedModelRequest, +): + transport_class = transports.ModelServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_tuned_model._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_tuned_model(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_tuned_model_rest_unset_required_fields(): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_tuned_model._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_tuned_model_rest_flattened(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_tuned_model(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=tunedModels/*}" % client.transport._host, args[1] + ) + + +def test_delete_tuned_model_rest_flattened_error(transport: str = "rest"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_tuned_model( + model_service.DeleteTunedModelRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ModelServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ModelServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ModelServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ModelServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ModelServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_model_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + call.return_value = model.Model() + client.get_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.GetModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_models_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + call.return_value = model_service.ListModelsResponse() + client.list_models(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.ListModelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_tuned_model_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + call.return_value = tuned_model.TunedModel() + client.get_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.GetTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tuned_models_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + call.return_value = model_service.ListTunedModelsResponse() + client.list_tuned_models(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.ListTunedModelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_tuned_model_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.CreateTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_tuned_model_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + call.return_value = gag_tuned_model.TunedModel() + client.update_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.UpdateTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_tuned_model_empty_call_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + call.return_value = None + client.delete_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.DeleteTunedModelRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ModelServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_model_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + max_temperature=0.16190000000000002, + top_p=0.546, + top_k=541, + ) + ) + await client.get_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.GetModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_models_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_models(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.ListModelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_tuned_model_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + ) + ) + await client.get_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.GetTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_tuned_models_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_tuned_models(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.ListTunedModelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_tuned_model_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.CreateTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_tuned_model_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + ) + ) + await client.update_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.UpdateTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_tuned_model_empty_call_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.DeleteTunedModelRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ModelServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_model_rest_bad_request(request_type=model_service.GetModelRequest): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_model(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetModelRequest, + dict, + ], +) +def test_get_model_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model.Model( + name="name_value", + base_model_id="base_model_id_value", + version="version_value", + display_name="display_name_value", + description="description_value", + input_token_limit=1838, + output_token_limit=1967, + supported_generation_methods=["supported_generation_methods_value"], + temperature=0.1198, + max_temperature=0.16190000000000002, + top_p=0.546, + top_k=541, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = model.Model.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, model.Model) + assert response.name == "name_value" + assert response.base_model_id == "base_model_id_value" + assert response.version == "version_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.input_token_limit == 1838 + assert response.output_token_limit == 1967 + assert response.supported_generation_methods == [ + "supported_generation_methods_value" + ] + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.max_temperature, 0.16190000000000002, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_get_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = model.Model.to_json(model.Model()) + req.return_value.content = return_value + + request = model_service.GetModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata + + client.get_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_models_rest_bad_request(request_type=model_service.ListModelsRequest): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_models(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListModelsRequest, + dict, + ], +) +def test_list_models_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = model_service.ListModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListModelsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_models_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_list_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = model_service.ListModelsRequest.pb( + model_service.ListModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = model_service.ListModelsResponse.to_json( + model_service.ListModelsResponse() + ) + req.return_value.content = return_value + + request = model_service.ListModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model_service.ListModelsResponse() + post_with_metadata.return_value = model_service.ListModelsResponse(), metadata + + client.list_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_tuned_model_rest_bad_request( + request_type=model_service.GetTunedModelRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_tuned_model(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.GetTunedModelRequest, + dict, + ], +) +def test_get_tuned_model_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + base_model="base_model_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_tuned_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_tuned_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_get_tuned_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = model_service.GetTunedModelRequest.pb( + model_service.GetTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = tuned_model.TunedModel.to_json(tuned_model.TunedModel()) + req.return_value.content = return_value + + request = model_service.GetTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = tuned_model.TunedModel() + post_with_metadata.return_value = tuned_model.TunedModel(), metadata + + client.get_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_tuned_models_rest_bad_request( + request_type=model_service.ListTunedModelsRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_tuned_models(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.ListTunedModelsRequest, + dict, + ], +) +def test_list_tuned_models_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = model_service.ListTunedModelsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = model_service.ListTunedModelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_tuned_models(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTunedModelsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_tuned_models_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_tuned_models" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_tuned_models_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_list_tuned_models" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = model_service.ListTunedModelsRequest.pb( + model_service.ListTunedModelsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = model_service.ListTunedModelsResponse.to_json( + model_service.ListTunedModelsResponse() + ) + req.return_value.content = return_value + + request = model_service.ListTunedModelsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = model_service.ListTunedModelsResponse() + post_with_metadata.return_value = ( + model_service.ListTunedModelsResponse(), + metadata, + ) + + client.list_tuned_models( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_tuned_model_rest_bad_request( + request_type=model_service.CreateTunedModelRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_tuned_model(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.CreateTunedModelRequest, + dict, + ], +) +def test_create_tuned_model_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["tuned_model"] = { + "tuned_model_source": { + "tuned_model": "tuned_model_value", + "base_model": "base_model_value", + }, + "base_model": "base_model_value", + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "temperature": 0.1198, + "top_p": 0.546, + "top_k": 541, + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "tuning_task": { + "start_time": {}, + "complete_time": {}, + "snapshots": [ + {"step": 444, "epoch": 527, "mean_loss": 0.961, "compute_time": {}} + ], + "training_data": { + "examples": { + "examples": [ + {"text_input": "text_input_value", "output": "output_value"} + ], + "multiturn_examples": [ + { + "system_instruction": { + "parts": [{"text": "text_value"}], + "role": "role_value", + }, + "contents": {}, + } + ], + } + }, + "hyperparameters": { + "learning_rate": 0.1371, + "learning_rate_multiplier": 0.2561, + "epoch_count": 1175, + "batch_size": 1052, + }, + }, + "reader_project_numbers": [2341, 2342], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = model_service.CreateTunedModelRequest.meta.fields["tuned_model"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tuned_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tuned_model"][field])): + del request_init["tuned_model"][field][i][subfield] + else: + del request_init["tuned_model"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_tuned_model(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ModelServiceRestInterceptor, "post_create_tuned_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_create_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_create_tuned_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = model_service.CreateTunedModelRequest.pb( + model_service.CreateTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = model_service.CreateTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_tuned_model_rest_bad_request( + request_type=model_service.UpdateTunedModelRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"tuned_model": {"name": "tunedModels/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_tuned_model(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.UpdateTunedModelRequest, + dict, + ], +) +def test_update_tuned_model_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"tuned_model": {"name": "tunedModels/sample1"}} + request_init["tuned_model"] = { + "tuned_model_source": { + "tuned_model": "tuned_model_value", + "base_model": "base_model_value", + }, + "base_model": "base_model_value", + "name": "tunedModels/sample1", + "display_name": "display_name_value", + "description": "description_value", + "temperature": 0.1198, + "top_p": 0.546, + "top_k": 541, + "state": 1, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "tuning_task": { + "start_time": {}, + "complete_time": {}, + "snapshots": [ + {"step": 444, "epoch": 527, "mean_loss": 0.961, "compute_time": {}} + ], + "training_data": { + "examples": { + "examples": [ + {"text_input": "text_input_value", "output": "output_value"} + ], + "multiturn_examples": [ + { + "system_instruction": { + "parts": [{"text": "text_value"}], + "role": "role_value", + }, + "contents": {}, + } + ], + } + }, + "hyperparameters": { + "learning_rate": 0.1371, + "learning_rate_multiplier": 0.2561, + "epoch_count": 1175, + "batch_size": 1052, + }, + }, + "reader_project_numbers": [2341, 2342], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = model_service.UpdateTunedModelRequest.meta.fields["tuned_model"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["tuned_model"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["tuned_model"][field])): + del request_init["tuned_model"][field][i][subfield] + else: + del request_init["tuned_model"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_tuned_model.TunedModel( + name="name_value", + display_name="display_name_value", + description="description_value", + temperature=0.1198, + top_p=0.546, + top_k=541, + state=gag_tuned_model.TunedModel.State.CREATING, + reader_project_numbers=[2340], + base_model="base_model_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_tuned_model.TunedModel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_tuned_model(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_tuned_model.TunedModel) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert math.isclose(response.temperature, 0.1198, rel_tol=1e-6) + assert math.isclose(response.top_p, 0.546, rel_tol=1e-6) + assert response.top_k == 541 + assert response.state == gag_tuned_model.TunedModel.State.CREATING + assert response.reader_project_numbers == [2340] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_update_tuned_model" + ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_update_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_update_tuned_model" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = model_service.UpdateTunedModelRequest.pb( + model_service.UpdateTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gag_tuned_model.TunedModel.to_json(gag_tuned_model.TunedModel()) + req.return_value.content = return_value + + request = model_service.UpdateTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_tuned_model.TunedModel() + post_with_metadata.return_value = gag_tuned_model.TunedModel(), metadata + + client.update_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_tuned_model_rest_bad_request( + request_type=model_service.DeleteTunedModelRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_tuned_model(request) + + +@pytest.mark.parametrize( + "request_type", + [ + model_service.DeleteTunedModelRequest, + dict, + ], +) +def test_delete_tuned_model_rest_call_success(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_tuned_model(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_tuned_model_rest_interceptors(null_interceptor): + transport = transports.ModelServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ModelServiceRestInterceptor(), + ) + client = ModelServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ModelServiceRestInterceptor, "pre_delete_tuned_model" + ) as pre: + pre.assert_not_called() + pb_message = model_service.DeleteTunedModelRequest.pb( + model_service.DeleteTunedModelRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = model_service.DeleteTunedModelRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_tuned_model( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_model_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_model), "__call__") as call: + client.get_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.GetModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_models_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_models), "__call__") as call: + client.list_models(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.ListModelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_tuned_model_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_tuned_model), "__call__") as call: + client.get_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.GetTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_tuned_models_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_tuned_models), "__call__" + ) as call: + client.list_tuned_models(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.ListTunedModelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_tuned_model_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_tuned_model), "__call__" + ) as call: + client.create_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.CreateTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_tuned_model_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_tuned_model), "__call__" + ) as call: + client.update_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.UpdateTunedModelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_tuned_model_empty_call_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_tuned_model), "__call__" + ) as call: + client.delete_tuned_model(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = model_service.DeleteTunedModelRequest() + + assert args[0] == request_msg + + +def test_model_service_rest_lro_client(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ModelServiceGrpcTransport, + ) + + +def test_model_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_model_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.model_service.transports.ModelServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ModelServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_model", + "list_models", + "get_tuned_model", + "list_tuned_models", + "create_tuned_model", + "update_tuned_model", + "delete_tuned_model", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_model_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_model_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.model_service.transports.ModelServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ModelServiceTransport() + adc.assert_called_once() + + +def test_model_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ModelServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + ], +) +def test_model_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ModelServiceGrpcTransport, + transports.ModelServiceGrpcAsyncIOTransport, + transports.ModelServiceRestTransport, + ], +) +def test_model_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ModelServiceGrpcTransport, grpc_helpers), + (transports.ModelServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_model_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_model_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ModelServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_no_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_model_service_host_with_port(transport_name): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_model_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ModelServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ModelServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_model._session + session2 = client2.transport.get_model._session + assert session1 != session2 + session1 = client1.transport.list_models._session + session2 = client2.transport.list_models._session + assert session1 != session2 + session1 = client1.transport.get_tuned_model._session + session2 = client2.transport.get_tuned_model._session + assert session1 != session2 + session1 = client1.transport.list_tuned_models._session + session2 = client2.transport.list_tuned_models._session + assert session1 != session2 + session1 = client1.transport.create_tuned_model._session + session2 = client2.transport.create_tuned_model._session + assert session1 != session2 + session1 = client1.transport.update_tuned_model._session + session2 = client2.transport.update_tuned_model._session + assert session1 != session2 + session1 = client1.transport.delete_tuned_model._session + session2 = client2.transport.delete_tuned_model._session + assert session1 != session2 + + +def test_model_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_model_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ModelServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.ModelServiceGrpcTransport, transports.ModelServiceGrpcAsyncIOTransport], +) +def test_model_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_service_grpc_lro_client(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_model_service_grpc_lro_async_client(): + client = ModelServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = ModelServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = ModelServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_model_path(path) + assert expected == actual + + +def test_tuned_model_path(): + tuned_model = "whelk" + expected = "tunedModels/{tuned_model}".format( + tuned_model=tuned_model, + ) + actual = ModelServiceClient.tuned_model_path(tuned_model) + assert expected == actual + + +def test_parse_tuned_model_path(): + expected = { + "tuned_model": "octopus", + } + path = ModelServiceClient.tuned_model_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_tuned_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ModelServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = ModelServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ModelServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = ModelServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ModelServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = ModelServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = ModelServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = ModelServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ModelServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = ModelServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ModelServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ModelServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ModelServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ModelServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = ModelServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ModelServiceClient, transports.ModelServiceGrpcTransport), + (ModelServiceAsyncClient, transports.ModelServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_permission_service.py new file mode 100644 index 000000000000..388511313893 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_permission_service.py @@ -0,0 +1,7019 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.permission_service import ( + PermissionServiceAsyncClient, + PermissionServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1alpha.types import permission as gag_permission +from google.ai.generativelanguage_v1alpha.types import permission +from google.ai.generativelanguage_v1alpha.types import permission_service + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PermissionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PermissionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PermissionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert PermissionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PermissionServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PermissionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PermissionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PermissionServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PermissionServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PermissionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PermissionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PermissionServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PermissionServiceClient._get_client_cert_source(None, False) is None + assert ( + PermissionServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + PermissionServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PermissionServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PermissionServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PermissionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PermissionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PermissionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PermissionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PermissionServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PermissionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PermissionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PermissionServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PermissionServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PermissionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PermissionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PermissionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PermissionServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PermissionServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PermissionServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PermissionServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PermissionServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PermissionServiceClient._get_universe_domain(None, None) + == PermissionServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PermissionServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PermissionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PermissionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PermissionServiceClient, "grpc"), + (PermissionServiceAsyncClient, "grpc_asyncio"), + (PermissionServiceClient, "rest"), + ], +) +def test_permission_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PermissionServiceGrpcTransport, "grpc"), + (transports.PermissionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PermissionServiceRestTransport, "rest"), + ], +) +def test_permission_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PermissionServiceClient, "grpc"), + (PermissionServiceAsyncClient, "grpc_asyncio"), + (PermissionServiceClient, "rest"), + ], +) +def test_permission_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_permission_service_client_get_transport_class(): + transport = PermissionServiceClient.get_transport_class() + available_transports = [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceRestTransport, + ] + assert transport in available_transports + + transport = PermissionServiceClient.get_transport_class("grpc") + assert transport == transports.PermissionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PermissionServiceClient, transports.PermissionServiceGrpcTransport, "grpc"), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PermissionServiceClient, transports.PermissionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PermissionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceAsyncClient), +) +def test_permission_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PermissionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PermissionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PermissionServiceClient, + transports.PermissionServiceRestTransport, + "rest", + "true", + ), + ( + PermissionServiceClient, + transports.PermissionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PermissionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_permission_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PermissionServiceClient, PermissionServiceAsyncClient] +) +@mock.patch.object( + PermissionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PermissionServiceAsyncClient), +) +def test_permission_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [PermissionServiceClient, PermissionServiceAsyncClient] +) +@mock.patch.object( + PermissionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceClient), +) +@mock.patch.object( + PermissionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PermissionServiceAsyncClient), +) +def test_permission_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PermissionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PermissionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PermissionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PermissionServiceClient, transports.PermissionServiceGrpcTransport, "grpc"), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PermissionServiceClient, transports.PermissionServiceRestTransport, "rest"), + ], +) +def test_permission_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PermissionServiceClient, + transports.PermissionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_permission_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_permission_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.permission_service.transports.PermissionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PermissionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PermissionServiceClient, + transports.PermissionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_permission_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.CreatePermissionRequest, + dict, + ], +) +def test_create_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + response = client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = permission_service.CreatePermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +def test_create_permission_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = permission_service.CreatePermissionRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_permission(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.CreatePermissionRequest( + parent="parent_value", + ) + + +def test_create_permission_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_permission + ] = mock_rpc + request = {} + client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_permission_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_permission + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_permission + ] = mock_rpc + + request = {} + await client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.CreatePermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + ) + response = await client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = permission_service.CreatePermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +@pytest.mark.asyncio +async def test_create_permission_async_from_dict(): + await test_create_permission_async(request_type=dict) + + +def test_create_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.CreatePermissionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + call.return_value = gag_permission.Permission() + client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.CreatePermissionRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + await client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_permission( + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + + +def test_create_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_permission( + permission_service.CreatePermissionRequest(), + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_permission( + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_permission( + permission_service.CreatePermissionRequest(), + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.GetPermissionRequest, + dict, + ], +) +def test_get_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + response = client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = permission_service.GetPermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == permission.Permission.Role.OWNER + + +def test_get_permission_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = permission_service.GetPermissionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_permission(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.GetPermissionRequest( + name="name_value", + ) + + +def test_get_permission_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_permission] = mock_rpc + request = {} + client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_permission_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_permission + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_permission + ] = mock_rpc + + request = {} + await client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.GetPermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + ) + response = await client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = permission_service.GetPermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == permission.Permission.Role.OWNER + + +@pytest.mark.asyncio +async def test_get_permission_async_from_dict(): + await test_get_permission_async(request_type=dict) + + +def test_get_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.GetPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + call.return_value = permission.Permission() + client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.GetPermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission() + ) + await client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission.Permission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_permission( + permission_service.GetPermissionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission.Permission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_permission( + permission_service.GetPermissionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.ListPermissionsRequest, + dict, + ], +) +def test_list_permissions(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = permission_service.ListPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPermissionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = permission_service.ListPermissionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.ListPermissionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_permissions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_permissions + ] = mock_rpc + request = {} + client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_permissions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_permissions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_permissions + ] = mock_rpc + + request = {} + await client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_permissions_async( + transport: str = "grpc_asyncio", + request_type=permission_service.ListPermissionsRequest, +): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = permission_service.ListPermissionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPermissionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_permissions_async_from_dict(): + await test_list_permissions_async(request_type=dict) + + +def test_list_permissions_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.ListPermissionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + call.return_value = permission_service.ListPermissionsResponse() + client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_permissions_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.ListPermissionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse() + ) + await client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_permissions_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.ListPermissionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_permissions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_permissions_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_permissions( + permission_service.ListPermissionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_permissions_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.ListPermissionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_permissions( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_permissions_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_permissions( + permission_service.ListPermissionsRequest(), + parent="parent_value", + ) + + +def test_list_permissions_pager(transport_name: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_permissions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, permission.Permission) for i in results) + + +def test_list_permissions_pages(transport_name: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + pages = list(client.list_permissions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_permissions_async_pager(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_permissions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_permissions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, permission.Permission) for i in responses) + + +@pytest.mark.asyncio +async def test_list_permissions_async_pages(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_permissions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_permissions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.UpdatePermissionRequest, + dict, + ], +) +def test_update_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + response = client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = permission_service.UpdatePermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +def test_update_permission_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = permission_service.UpdatePermissionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_permission(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.UpdatePermissionRequest() + + +def test_update_permission_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_permission + ] = mock_rpc + request = {} + client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_permission_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_permission + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_permission + ] = mock_rpc + + request = {} + await client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.UpdatePermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + ) + response = await client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = permission_service.UpdatePermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +@pytest.mark.asyncio +async def test_update_permission_async_from_dict(): + await test_update_permission_async(request_type=dict) + + +def test_update_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.UpdatePermissionRequest() + + request.permission.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + call.return_value = gag_permission.Permission() + client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "permission.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.UpdatePermissionRequest() + + request.permission.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + await client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "permission.name=name_value", + ) in kw["metadata"] + + +def test_update_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_permission( + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_permission( + permission_service.UpdatePermissionRequest(), + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gag_permission.Permission() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_permission( + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].permission + mock_val = gag_permission.Permission(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_permission( + permission_service.UpdatePermissionRequest(), + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.DeletePermissionRequest, + dict, + ], +) +def test_delete_permission(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = permission_service.DeletePermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_permission_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = permission_service.DeletePermissionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_permission(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.DeletePermissionRequest( + name="name_value", + ) + + +def test_delete_permission_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_permission + ] = mock_rpc + request = {} + client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_permission_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_permission + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_permission + ] = mock_rpc + + request = {} + await client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_permission_async( + transport: str = "grpc_asyncio", + request_type=permission_service.DeletePermissionRequest, +): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = permission_service.DeletePermissionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_permission_async_from_dict(): + await test_delete_permission_async(request_type=dict) + + +def test_delete_permission_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.DeletePermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + call.return_value = None + client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_permission_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.DeletePermissionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_permission_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_permission_flattened_error(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_permission( + permission_service.DeletePermissionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_permission_flattened_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_permission( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_permission_flattened_error_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_permission( + permission_service.DeletePermissionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.TransferOwnershipRequest, + dict, + ], +) +def test_transfer_ownership(request_type, transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = permission_service.TransferOwnershipResponse() + response = client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = permission_service.TransferOwnershipRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, permission_service.TransferOwnershipResponse) + + +def test_transfer_ownership_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = permission_service.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.transfer_ownership(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == permission_service.TransferOwnershipRequest( + name="name_value", + email_address="email_address_value", + ) + + +def test_transfer_ownership_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.transfer_ownership in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.transfer_ownership + ] = mock_rpc + request = {} + client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.transfer_ownership(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_transfer_ownership_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.transfer_ownership + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.transfer_ownership + ] = mock_rpc + + request = {} + await client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.transfer_ownership(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_transfer_ownership_async( + transport: str = "grpc_asyncio", + request_type=permission_service.TransferOwnershipRequest, +): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.TransferOwnershipResponse() + ) + response = await client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = permission_service.TransferOwnershipRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, permission_service.TransferOwnershipResponse) + + +@pytest.mark.asyncio +async def test_transfer_ownership_async_from_dict(): + await test_transfer_ownership_async(request_type=dict) + + +def test_transfer_ownership_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.TransferOwnershipRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + call.return_value = permission_service.TransferOwnershipResponse() + client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_transfer_ownership_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = permission_service.TransferOwnershipRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.TransferOwnershipResponse() + ) + await client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_create_permission_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_permission + ] = mock_rpc + + request = {} + client.create_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_permission_rest_required_fields( + request_type=permission_service.CreatePermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_permission._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "permission", + ) + ) + ) + + +def test_create_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=tunedModels/*}/permissions" % client.transport._host, + args[1], + ) + + +def test_create_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_permission( + permission_service.CreatePermissionRequest(), + parent="parent_value", + permission=gag_permission.Permission(name="name_value"), + ) + + +def test_get_permission_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_permission] = mock_rpc + + request = {} + client.get_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_permission_rest_required_fields( + request_type=permission_service.GetPermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = permission.Permission() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_permission._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission.Permission() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1/permissions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=tunedModels/*/permissions/*}" % client.transport._host, + args[1], + ) + + +def test_get_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_permission( + permission_service.GetPermissionRequest(), + name="name_value", + ) + + +def test_list_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_permissions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_permissions + ] = mock_rpc + + request = {} + client.list_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_permissions_rest_required_fields( + request_type=permission_service.ListPermissionsRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_permissions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = permission_service.ListPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission_service.ListPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_permissions_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_permissions_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission_service.ListPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "tunedModels/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = permission_service.ListPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=tunedModels/*}/permissions" % client.transport._host, + args[1], + ) + + +def test_list_permissions_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_permissions( + permission_service.ListPermissionsRequest(), + parent="parent_value", + ) + + +def test_list_permissions_rest_pager(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + permission.Permission(), + ], + next_page_token="abc", + ), + permission_service.ListPermissionsResponse( + permissions=[], + next_page_token="def", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + ], + next_page_token="ghi", + ), + permission_service.ListPermissionsResponse( + permissions=[ + permission.Permission(), + permission.Permission(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + permission_service.ListPermissionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "tunedModels/sample1"} + + pager = client.list_permissions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, permission.Permission) for i in results) + + pages = list(client.list_permissions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_update_permission_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_permission + ] = mock_rpc + + request = {} + client.update_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_permission_rest_required_fields( + request_type=permission_service.UpdatePermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_permission._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_permission._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "permission", + "updateMask", + ) + ) + ) + + +def test_update_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission() + + # get arguments that satisfy an http rule for this method + sample_request = { + "permission": {"name": "tunedModels/sample1/permissions/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{permission.name=tunedModels/*/permissions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_permission( + permission_service.UpdatePermissionRequest(), + permission=gag_permission.Permission(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_permission_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_permission in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_permission + ] = mock_rpc + + request = {} + client.delete_permission(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_permission(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_permission_rest_required_fields( + request_type=permission_service.DeletePermissionRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_permission._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_permission(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_permission_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_permission._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_permission_rest_flattened(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "tunedModels/sample1/permissions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_permission(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=tunedModels/*/permissions/*}" % client.transport._host, + args[1], + ) + + +def test_delete_permission_rest_flattened_error(transport: str = "rest"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_permission( + permission_service.DeletePermissionRequest(), + name="name_value", + ) + + +def test_transfer_ownership_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.transfer_ownership in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.transfer_ownership + ] = mock_rpc + + request = {} + client.transfer_ownership(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.transfer_ownership(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_transfer_ownership_rest_required_fields( + request_type=permission_service.TransferOwnershipRequest, +): + transport_class = transports.PermissionServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["email_address"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_ownership._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["emailAddress"] = "email_address_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).transfer_ownership._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "emailAddress" in jsonified_request + assert jsonified_request["emailAddress"] == "email_address_value" + + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = permission_service.TransferOwnershipResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission_service.TransferOwnershipResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.transfer_ownership(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_transfer_ownership_rest_unset_required_fields(): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.transfer_ownership._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "emailAddress", + ) + ) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PermissionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PermissionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PermissionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PermissionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + transports.PermissionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = PermissionServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_permission_empty_call_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + call.return_value = gag_permission.Permission() + client.create_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.CreatePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_permission_empty_call_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + call.return_value = permission.Permission() + client.get_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.GetPermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_permissions_empty_call_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + call.return_value = permission_service.ListPermissionsResponse() + client.list_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.ListPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_permission_empty_call_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + call.return_value = gag_permission.Permission() + client.update_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.UpdatePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_permission_empty_call_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + call.return_value = None + client.delete_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.DeletePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_transfer_ownership_empty_call_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + call.return_value = permission_service.TransferOwnershipResponse() + client.transfer_ownership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.TransferOwnershipRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = PermissionServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_permission_empty_call_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + ) + await client.create_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.CreatePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_permission_empty_call_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + ) + await client.get_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.GetPermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_permissions_empty_call_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.ListPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_permission_empty_call_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + ) + await client.update_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.UpdatePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_permission_empty_call_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.DeletePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_transfer_ownership_empty_call_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + permission_service.TransferOwnershipResponse() + ) + await client.transfer_ownership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.TransferOwnershipRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = PermissionServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_permission_rest_bad_request( + request_type=permission_service.CreatePermissionRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_permission(request) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.CreatePermissionRequest, + dict, + ], +) +def test_create_permission_rest_call_success(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request_init["permission"] = { + "name": "name_value", + "grantee_type": 1, + "email_address": "email_address_value", + "role": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = permission_service.CreatePermissionRequest.meta.fields["permission"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["permission"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["permission"][field])): + del request_init["permission"][field][i][subfield] + else: + del request_init["permission"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_permission(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_create_permission" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_create_permission_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_create_permission" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = permission_service.CreatePermissionRequest.pb( + permission_service.CreatePermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gag_permission.Permission.to_json(gag_permission.Permission()) + req.return_value.content = return_value + + request = permission_service.CreatePermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_permission.Permission() + post_with_metadata.return_value = gag_permission.Permission(), metadata + + client.create_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_permission_rest_bad_request( + request_type=permission_service.GetPermissionRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_permission(request) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.GetPermissionRequest, + dict, + ], +) +def test_get_permission_rest_call_success(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission.Permission( + name="name_value", + grantee_type=permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=permission.Permission.Role.OWNER, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_permission(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == permission.Permission.Role.OWNER + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_get_permission" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_get_permission_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_get_permission" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = permission_service.GetPermissionRequest.pb( + permission_service.GetPermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = permission.Permission.to_json(permission.Permission()) + req.return_value.content = return_value + + request = permission_service.GetPermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = permission.Permission() + post_with_metadata.return_value = permission.Permission(), metadata + + client.get_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_permissions_rest_bad_request( + request_type=permission_service.ListPermissionsRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.ListPermissionsRequest, + dict, + ], +) +def test_list_permissions_rest_call_success(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission_service.ListPermissionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission_service.ListPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPermissionsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_permissions_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_list_permissions" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_list_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_list_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = permission_service.ListPermissionsRequest.pb( + permission_service.ListPermissionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = permission_service.ListPermissionsResponse.to_json( + permission_service.ListPermissionsResponse() + ) + req.return_value.content = return_value + + request = permission_service.ListPermissionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = permission_service.ListPermissionsResponse() + post_with_metadata.return_value = ( + permission_service.ListPermissionsResponse(), + metadata, + ) + + client.list_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_permission_rest_bad_request( + request_type=permission_service.UpdatePermissionRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"permission": {"name": "tunedModels/sample1/permissions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_permission(request) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.UpdatePermissionRequest, + dict, + ], +) +def test_update_permission_rest_call_success(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"permission": {"name": "tunedModels/sample1/permissions/sample2"}} + request_init["permission"] = { + "name": "tunedModels/sample1/permissions/sample2", + "grantee_type": 1, + "email_address": "email_address_value", + "role": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = permission_service.UpdatePermissionRequest.meta.fields["permission"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["permission"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["permission"][field])): + del request_init["permission"][field][i][subfield] + else: + del request_init["permission"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gag_permission.Permission( + name="name_value", + grantee_type=gag_permission.Permission.GranteeType.USER, + email_address="email_address_value", + role=gag_permission.Permission.Role.OWNER, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gag_permission.Permission.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_permission(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gag_permission.Permission) + assert response.name == "name_value" + assert response.grantee_type == gag_permission.Permission.GranteeType.USER + assert response.email_address == "email_address_value" + assert response.role == gag_permission.Permission.Role.OWNER + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_update_permission" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_update_permission_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_update_permission" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = permission_service.UpdatePermissionRequest.pb( + permission_service.UpdatePermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gag_permission.Permission.to_json(gag_permission.Permission()) + req.return_value.content = return_value + + request = permission_service.UpdatePermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gag_permission.Permission() + post_with_metadata.return_value = gag_permission.Permission(), metadata + + client.update_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_permission_rest_bad_request( + request_type=permission_service.DeletePermissionRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_permission(request) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.DeletePermissionRequest, + dict, + ], +) +def test_delete_permission_rest_call_success(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1/permissions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_permission(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_permission_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_delete_permission" + ) as pre: + pre.assert_not_called() + pb_message = permission_service.DeletePermissionRequest.pb( + permission_service.DeletePermissionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = permission_service.DeletePermissionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_permission( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_transfer_ownership_rest_bad_request( + request_type=permission_service.TransferOwnershipRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.transfer_ownership(request) + + +@pytest.mark.parametrize( + "request_type", + [ + permission_service.TransferOwnershipRequest, + dict, + ], +) +def test_transfer_ownership_rest_call_success(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = permission_service.TransferOwnershipResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = permission_service.TransferOwnershipResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.transfer_ownership(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, permission_service.TransferOwnershipResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_transfer_ownership_rest_interceptors(null_interceptor): + transport = transports.PermissionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PermissionServiceRestInterceptor(), + ) + client = PermissionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_transfer_ownership" + ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_transfer_ownership_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.PermissionServiceRestInterceptor, "pre_transfer_ownership" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = permission_service.TransferOwnershipRequest.pb( + permission_service.TransferOwnershipRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = permission_service.TransferOwnershipResponse.to_json( + permission_service.TransferOwnershipResponse() + ) + req.return_value.content = return_value + + request = permission_service.TransferOwnershipRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = permission_service.TransferOwnershipResponse() + post_with_metadata.return_value = ( + permission_service.TransferOwnershipResponse(), + metadata, + ) + + client.transfer_ownership( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_permission_empty_call_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_permission), "__call__" + ) as call: + client.create_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.CreatePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_permission_empty_call_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_permission), "__call__") as call: + client.get_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.GetPermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_permissions_empty_call_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_permissions), "__call__") as call: + client.list_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.ListPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_permission_empty_call_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_permission), "__call__" + ) as call: + client.update_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.UpdatePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_permission_empty_call_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_permission), "__call__" + ) as call: + client.delete_permission(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.DeletePermissionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_transfer_ownership_empty_call_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.transfer_ownership), "__call__" + ) as call: + client.transfer_ownership(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = permission_service.TransferOwnershipRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PermissionServiceGrpcTransport, + ) + + +def test_permission_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PermissionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_permission_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.permission_service.transports.PermissionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PermissionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_permission", + "get_permission", + "list_permissions", + "update_permission", + "delete_permission", + "transfer_ownership", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_permission_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.permission_service.transports.PermissionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PermissionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_permission_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.permission_service.transports.PermissionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PermissionServiceTransport() + adc.assert_called_once() + + +def test_permission_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PermissionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + transports.PermissionServiceRestTransport, + ], +) +def test_permission_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PermissionServiceGrpcTransport, grpc_helpers), + (transports.PermissionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_permission_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_permission_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PermissionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_permission_service_host_no_port(transport_name): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_permission_service_host_with_port(transport_name): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_permission_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PermissionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PermissionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_permission._session + session2 = client2.transport.create_permission._session + assert session1 != session2 + session1 = client1.transport.get_permission._session + session2 = client2.transport.get_permission._session + assert session1 != session2 + session1 = client1.transport.list_permissions._session + session2 = client2.transport.list_permissions._session + assert session1 != session2 + session1 = client1.transport.update_permission._session + session2 = client2.transport.update_permission._session + assert session1 != session2 + session1 = client1.transport.delete_permission._session + session2 = client2.transport.delete_permission._session + assert session1 != session2 + session1 = client1.transport.transfer_ownership._session + session2 = client2.transport.transfer_ownership._session + assert session1 != session2 + + +def test_permission_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PermissionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_permission_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PermissionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PermissionServiceGrpcTransport, + transports.PermissionServiceGrpcAsyncIOTransport, + ], +) +def test_permission_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_permission_path(): + tuned_model = "squid" + permission = "clam" + expected = "tunedModels/{tuned_model}/permissions/{permission}".format( + tuned_model=tuned_model, + permission=permission, + ) + actual = PermissionServiceClient.permission_path(tuned_model, permission) + assert expected == actual + + +def test_parse_permission_path(): + expected = { + "tuned_model": "whelk", + "permission": "octopus", + } + path = PermissionServiceClient.permission_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_permission_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PermissionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = PermissionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PermissionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = PermissionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PermissionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = PermissionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = PermissionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = PermissionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PermissionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = PermissionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PermissionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PermissionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PermissionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PermissionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = PermissionServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PermissionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PermissionServiceClient, transports.PermissionServiceGrpcTransport), + ( + PermissionServiceAsyncClient, + transports.PermissionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_prediction_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_prediction_service.py new file mode 100644 index 000000000000..0d29e41c0d55 --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_prediction_service.py @@ -0,0 +1,3056 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import struct_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.prediction_service import ( + PredictionServiceAsyncClient, + PredictionServiceClient, + transports, +) +from google.ai.generativelanguage_v1alpha.types import prediction_service + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PredictionServiceClient._get_default_mtls_endpoint(None) is None + assert ( + PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PredictionServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PredictionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PredictionServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PredictionServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PredictionServiceClient._get_client_cert_source(None, False) is None + assert ( + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PredictionServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PredictionServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PredictionServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PredictionServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PredictionServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PredictionServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PredictionServiceClient._get_universe_domain(None, None) + == PredictionServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PredictionServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), + ], +) +def test_prediction_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PredictionServiceGrpcTransport, "grpc"), + (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PredictionServiceRestTransport, "rest"), + ], +) +def test_prediction_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PredictionServiceClient, "grpc"), + (PredictionServiceAsyncClient, "grpc_asyncio"), + (PredictionServiceClient, "rest"), + ], +) +def test_prediction_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_prediction_service_client_get_transport_class(): + transport = PredictionServiceClient.get_transport_class() + available_transports = [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceRestTransport, + ] + assert transport in available_transports + + transport = PredictionServiceClient.get_transport_class("grpc") + assert transport == transports.PredictionServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "true", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + "false", + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + "true", + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_prediction_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PredictionServiceAsyncClient), +) +def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [PredictionServiceClient, PredictionServiceAsyncClient] +) +@mock.patch.object( + PredictionServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceClient), +) +@mock.patch.object( + PredictionServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PredictionServiceAsyncClient), +) +def test_prediction_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PredictionServiceClient._DEFAULT_UNIVERSE + default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), + ], +) +def test_prediction_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + PredictionServiceClient, + transports.PredictionServiceRestTransport, + "rest", + None, + ), + ], +) +def test_prediction_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_prediction_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PredictionServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PredictionServiceClient, + transports.PredictionServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_prediction_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + prediction_service.PredictRequest, + dict, + ], +) +def test_predict(request_type, transport: str = "grpc"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + response = client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +def test_predict_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = prediction_service.PredictRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.predict(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == prediction_service.PredictRequest( + model="model_value", + ) + + +def test_predict_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.predict in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.predict] = mock_rpc + request = {} + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.predict + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.predict + ] = mock_rpc + + request = {} + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_predict_async( + transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest +): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + response = await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = prediction_service.PredictRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +@pytest.mark.asyncio +async def test_predict_async_from_dict(): + await test_predict_async(request_type=dict) + + +def test_predict_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = prediction_service.PredictResponse() + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_predict_field_headers_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = prediction_service.PredictRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + await client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_predict_flattened(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +def test_predict_flattened_error(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +@pytest.mark.asyncio +async def test_predict_flattened_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = prediction_service.PredictResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.predict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].instances + mock_val = [struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_predict_flattened_error_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +def test_predict_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.predict in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.predict] = mock_rpc + + request = {} + client.predict(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.predict(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): + transport_class = transports.PredictionServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).predict._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).predict._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.predict(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_predict_rest_unset_required_fields(): + transport = transports.PredictionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.predict._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "instances", + ) + ) + ) + + +def test_predict_rest_flattened(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.predict(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:predict" % client.transport._host, args[1] + ) + + +def test_predict_rest_flattened_error(transport: str = "rest"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.predict( + prediction_service.PredictRequest(), + model="model_value", + instances=[struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)], + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PredictionServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PredictionServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PredictionServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PredictionServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = PredictionServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_predict_empty_call_grpc(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + call.return_value = prediction_service.PredictResponse() + client.predict(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = prediction_service.PredictRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = PredictionServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_predict_empty_call_grpc_asyncio(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + prediction_service.PredictResponse() + ) + await client.predict(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = prediction_service.PredictRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = PredictionServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_predict_rest_bad_request(request_type=prediction_service.PredictRequest): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.predict(request) + + +@pytest.mark.parametrize( + "request_type", + [ + prediction_service.PredictRequest, + dict, + ], +) +def test_predict_rest_call_success(request_type): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = prediction_service.PredictResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = prediction_service.PredictResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.predict(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, prediction_service.PredictResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_predict_rest_interceptors(null_interceptor): + transport = transports.PredictionServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PredictionServiceRestInterceptor(), + ) + client = PredictionServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict" + ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PredictionServiceRestInterceptor, "pre_predict" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = prediction_service.PredictRequest.pb( + prediction_service.PredictRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = prediction_service.PredictResponse.to_json( + prediction_service.PredictResponse() + ) + req.return_value.content = return_value + + request = prediction_service.PredictRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = prediction_service.PredictResponse() + post_with_metadata.return_value = prediction_service.PredictResponse(), metadata + + client.predict( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_predict_empty_call_rest(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.predict), "__call__") as call: + client.predict(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = prediction_service.PredictRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PredictionServiceGrpcTransport, + ) + + +def test_prediction_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PredictionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_prediction_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.prediction_service.transports.PredictionServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PredictionServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "predict", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_prediction_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PredictionServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_prediction_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PredictionServiceTransport() + adc.assert_called_once() + + +def test_prediction_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PredictionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + transports.PredictionServiceRestTransport, + ], +) +def test_prediction_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PredictionServiceGrpcTransport, grpc_helpers), + (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_prediction_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PredictionServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_prediction_service_host_no_port(transport_name): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_prediction_service_host_with_port(transport_name): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_prediction_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PredictionServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PredictionServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.predict._session + session2 = client2.transport.predict._session + assert session1 != session2 + + +def test_prediction_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_prediction_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PredictionServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.PredictionServiceGrpcTransport, + transports.PredictionServiceGrpcAsyncIOTransport, + ], +) +def test_prediction_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = PredictionServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = PredictionServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PredictionServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = PredictionServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PredictionServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = PredictionServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PredictionServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = PredictionServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = PredictionServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = PredictionServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PredictionServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = PredictionServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PredictionServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PredictionServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PredictionServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PredictionServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = PredictionServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PredictionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PredictionServiceClient, transports.PredictionServiceGrpcTransport), + ( + PredictionServiceAsyncClient, + transports.PredictionServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_retriever_service.py new file mode 100644 index 000000000000..3c558a65e1aa --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_retriever_service.py @@ -0,0 +1,16639 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.ai.generativelanguage_v1alpha.services.retriever_service import ( + RetrieverServiceAsyncClient, + RetrieverServiceClient, + pagers, + transports, +) +from google.ai.generativelanguage_v1alpha.types import retriever, retriever_service + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert RetrieverServiceClient._get_default_mtls_endpoint(None) is None + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RetrieverServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert RetrieverServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RetrieverServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RetrieverServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + RetrieverServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RetrieverServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RetrieverServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RetrieverServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RetrieverServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RetrieverServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert RetrieverServiceClient._get_client_cert_source(None, False) is None + assert ( + RetrieverServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + RetrieverServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RetrieverServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RetrieverServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RetrieverServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RetrieverServiceClient._DEFAULT_UNIVERSE + default_endpoint = RetrieverServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RetrieverServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RetrieverServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RetrieverServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RetrieverServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RetrieverServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + RetrieverServiceClient._get_api_endpoint(None, None, default_universe, "always") + == RetrieverServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RetrieverServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RetrieverServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RetrieverServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + RetrieverServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RetrieverServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RetrieverServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RetrieverServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + RetrieverServiceClient._get_universe_domain(None, None) + == RetrieverServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RetrieverServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RetrieverServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RetrieverServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RetrieverServiceClient, "grpc"), + (RetrieverServiceAsyncClient, "grpc_asyncio"), + (RetrieverServiceClient, "rest"), + ], +) +def test_retriever_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RetrieverServiceGrpcTransport, "grpc"), + (transports.RetrieverServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.RetrieverServiceRestTransport, "rest"), + ], +) +def test_retriever_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RetrieverServiceClient, "grpc"), + (RetrieverServiceAsyncClient, "grpc_asyncio"), + (RetrieverServiceClient, "rest"), + ], +) +def test_retriever_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_retriever_service_client_get_transport_class(): + transport = RetrieverServiceClient.get_transport_class() + available_transports = [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceRestTransport, + ] + assert transport in available_transports + + transport = RetrieverServiceClient.get_transport_class("grpc") + assert transport == transports.RetrieverServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport, "grpc"), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RetrieverServiceClient, transports.RetrieverServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + RetrieverServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceAsyncClient), +) +def test_retriever_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RetrieverServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RetrieverServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + "true", + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + "false", + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceRestTransport, + "rest", + "true", + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RetrieverServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_retriever_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [RetrieverServiceClient, RetrieverServiceAsyncClient] +) +@mock.patch.object( + RetrieverServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RetrieverServiceAsyncClient), +) +def test_retriever_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [RetrieverServiceClient, RetrieverServiceAsyncClient] +) +@mock.patch.object( + RetrieverServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceClient), +) +@mock.patch.object( + RetrieverServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RetrieverServiceAsyncClient), +) +def test_retriever_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RetrieverServiceClient._DEFAULT_UNIVERSE + default_endpoint = RetrieverServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RetrieverServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport, "grpc"), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (RetrieverServiceClient, transports.RetrieverServiceRestTransport, "rest"), + ], +) +def test_retriever_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + RetrieverServiceClient, + transports.RetrieverServiceRestTransport, + "rest", + None, + ), + ], +) +def test_retriever_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_retriever_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.retriever_service.transports.RetrieverServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = RetrieverServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RetrieverServiceClient, + transports.RetrieverServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + RetrieverServiceAsyncClient, + transports.RetrieverServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_retriever_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateCorpusRequest, + dict, + ], +) +def test_create_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + response = client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.CreateCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_corpus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.CreateCorpusRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_corpus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateCorpusRequest() + + +def test_create_corpus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_corpus] = mock_rpc + request = {} + client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_corpus_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_corpus + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_corpus + ] = mock_rpc + + request = {} + await client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.CreateCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.CreateCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_corpus_async_from_dict(): + await test_create_corpus_async(request_type=dict) + + +def test_create_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_corpus( + corpus=retriever.Corpus(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + + +def test_create_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_corpus( + retriever_service.CreateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_corpus( + corpus=retriever.Corpus(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_corpus( + retriever_service.CreateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetCorpusRequest, + dict, + ], +) +def test_get_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + response = client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.GetCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_corpus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.GetCorpusRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_corpus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetCorpusRequest( + name="name_value", + ) + + +def test_get_corpus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_corpus] = mock_rpc + request = {} + client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_corpus_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_corpus + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_corpus + ] = mock_rpc + + request = {} + await client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.GetCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.GetCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_corpus_async_from_dict(): + await test_get_corpus_async(request_type=dict) + + +def test_get_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + await client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_corpus( + retriever_service.GetCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_corpus( + retriever_service.GetCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateCorpusRequest, + dict, + ], +) +def test_update_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + response = client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.UpdateCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_corpus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.UpdateCorpusRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_corpus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateCorpusRequest() + + +def test_update_corpus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_corpus] = mock_rpc + request = {} + client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_corpus_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_corpus + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_corpus + ] = mock_rpc + + request = {} + await client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.UpdateCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.UpdateCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_update_corpus_async_from_dict(): + await test_update_corpus_async(request_type=dict) + + +def test_update_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateCorpusRequest() + + request.corpus.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "corpus.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateCorpusRequest() + + request.corpus.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + await client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "corpus.name=name_value", + ) in kw["metadata"] + + +def test_update_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_corpus( + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_corpus( + retriever_service.UpdateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Corpus() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Corpus()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_corpus( + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].corpus + mock_val = retriever.Corpus(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_corpus( + retriever_service.UpdateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteCorpusRequest, + dict, + ], +) +def test_delete_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.DeleteCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_corpus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.DeleteCorpusRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_corpus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteCorpusRequest( + name="name_value", + ) + + +def test_delete_corpus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_corpus] = mock_rpc + request = {} + client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_corpus_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_corpus + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_corpus + ] = mock_rpc + + request = {} + await client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.DeleteCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.DeleteCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_corpus_async_from_dict(): + await test_delete_corpus_async(request_type=dict) + + +def test_delete_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + call.return_value = None + client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_corpus_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_corpus_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_corpus( + retriever_service.DeleteCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_corpus_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_corpus( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_corpus_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_corpus( + retriever_service.DeleteCorpusRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListCorporaRequest, + dict, + ], +) +def test_list_corpora(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + response = client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.ListCorporaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCorporaPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_corpora_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.ListCorporaRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_corpora(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListCorporaRequest( + page_token="page_token_value", + ) + + +def test_list_corpora_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_corpora in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_corpora] = mock_rpc + request = {} + client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_corpora(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_corpora_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_corpora + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_corpora + ] = mock_rpc + + request = {} + await client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_corpora(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_corpora_async( + transport: str = "grpc_asyncio", request_type=retriever_service.ListCorporaRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.ListCorporaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCorporaAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_corpora_async_from_dict(): + await test_list_corpora_async(request_type=dict) + + +def test_list_corpora_pager(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_corpora(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Corpus) for i in results) + + +def test_list_corpora_pages(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + pages = list(client.list_corpora(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_corpora_async_pager(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_corpora), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_corpora( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, retriever.Corpus) for i in responses) + + +@pytest.mark.asyncio +async def test_list_corpora_async_pages(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_corpora), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_corpora(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryCorpusRequest, + dict, + ], +) +def test_query_corpus(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.QueryCorpusResponse() + response = client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.QueryCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryCorpusResponse) + + +def test_query_corpus_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.query_corpus(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryCorpusRequest( + name="name_value", + query="query_value", + ) + + +def test_query_corpus_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.query_corpus] = mock_rpc + request = {} + client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_query_corpus_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.query_corpus + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.query_corpus + ] = mock_rpc + + request = {} + await client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.query_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_query_corpus_async( + transport: str = "grpc_asyncio", request_type=retriever_service.QueryCorpusRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryCorpusResponse() + ) + response = await client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.QueryCorpusRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryCorpusResponse) + + +@pytest.mark.asyncio +async def test_query_corpus_async_from_dict(): + await test_query_corpus_async(request_type=dict) + + +def test_query_corpus_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + call.return_value = retriever_service.QueryCorpusResponse() + client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_corpus_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryCorpusRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryCorpusResponse() + ) + await client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + response = client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.CreateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_create_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.CreateDocumentRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateDocumentRequest( + parent="parent_value", + ) + + +def test_create_document_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + request = {} + client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_document_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_document + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_document + ] = mock_rpc + + request = {} + await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_document_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.CreateDocumentRequest, +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.CreateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_create_document_async_from_dict(): + await test_create_document_async(request_type=dict) + + +def test_create_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateDocumentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = retriever.Document() + client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateDocumentRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + await client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_document( + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + + +def test_create_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + retriever_service.CreateDocumentRequest(), + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_document( + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_document( + retriever_service.CreateDocumentRequest(), + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetDocumentRequest, + dict, + ], +) +def test_get_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + response = client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.GetDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_get_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.GetDocumentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetDocumentRequest( + name="name_value", + ) + + +def test_get_document_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_document_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_document + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_document + ] = mock_rpc + + request = {} + await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_document_async( + transport: str = "grpc_asyncio", request_type=retriever_service.GetDocumentRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.GetDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_document_async_from_dict(): + await test_get_document_async(request_type=dict) + + +def test_get_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = retriever.Document() + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + await client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + retriever_service.GetDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_document( + retriever_service.GetDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateDocumentRequest, + dict, + ], +) +def test_update_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + response = client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.UpdateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +def test_update_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.UpdateDocumentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateDocumentRequest() + + +def test_update_document_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + request = {} + client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_document_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_document + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_document + ] = mock_rpc + + request = {} + await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_document_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.UpdateDocumentRequest, +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + response = await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.UpdateDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_update_document_async_from_dict(): + await test_update_document_async(request_type=dict) + + +def test_update_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateDocumentRequest() + + request.document.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = retriever.Document() + client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "document.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateDocumentRequest() + + request.document.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + await client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "document.name=name_value", + ) in kw["metadata"] + + +def test_update_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_document( + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + retriever_service.UpdateDocumentRequest(), + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Document() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Document()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_document( + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].document + mock_val = retriever.Document(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_document( + retriever_service.UpdateDocumentRequest(), + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.DeleteDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.DeleteDocumentRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteDocumentRequest( + name="name_value", + ) + + +def test_delete_document_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + request = {} + client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_document_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_document + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_document + ] = mock_rpc + + request = {} + await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_document_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.DeleteDocumentRequest, +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.DeleteDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_document_async_from_dict(): + await test_delete_document_async(request_type=dict) + + +def test_delete_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = None + client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_document_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_document_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + retriever_service.DeleteDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_document_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_document( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_document_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_document( + retriever_service.DeleteDocumentRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.ListDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_documents_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.ListDocumentsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_documents(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListDocumentsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_documents_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_documents in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + request = {} + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_documents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_documents_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_documents + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_documents + ] = mock_rpc + + request = {} + await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_documents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_documents_async( + transport: str = "grpc_asyncio", request_type=retriever_service.ListDocumentsRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.ListDocumentsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_documents_async_from_dict(): + await test_list_documents_async(request_type=dict) + + +def test_list_documents_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = retriever_service.ListDocumentsResponse() + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_documents_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListDocumentsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse() + ) + await client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_documents_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListDocumentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_documents( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_documents_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + retriever_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_documents_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListDocumentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_documents( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_documents_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_documents( + retriever_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_pager(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_documents(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Document) for i in results) + + +def test_list_documents_pages(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + pages = list(client.list_documents(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_documents_async_pager(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_documents( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, retriever.Document) for i in responses) + + +@pytest.mark.asyncio +async def test_list_documents_async_pages(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_documents), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_documents(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryDocumentRequest, + dict, + ], +) +def test_query_document(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.QueryDocumentResponse() + response = client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.QueryDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryDocumentResponse) + + +def test_query_document_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.query_document(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.QueryDocumentRequest( + name="name_value", + query="query_value", + ) + + +def test_query_document_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.query_document] = mock_rpc + request = {} + client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_query_document_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.query_document + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.query_document + ] = mock_rpc + + request = {} + await client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.query_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_query_document_async( + transport: str = "grpc_asyncio", request_type=retriever_service.QueryDocumentRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryDocumentResponse() + ) + response = await client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.QueryDocumentRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryDocumentResponse) + + +@pytest.mark.asyncio +async def test_query_document_async_from_dict(): + await test_query_document_async(request_type=dict) + + +def test_query_document_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + call.return_value = retriever_service.QueryDocumentResponse() + client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_document_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.QueryDocumentRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryDocumentResponse() + ) + await client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateChunkRequest, + dict, + ], +) +def test_create_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + response = client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.CreateChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_create_chunk_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.CreateChunkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_chunk(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.CreateChunkRequest( + parent="parent_value", + ) + + +def test_create_chunk_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_chunk] = mock_rpc + request = {} + client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_chunk_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_chunk + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_chunk + ] = mock_rpc + + request = {} + await client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.CreateChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + response = await client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.CreateChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.asyncio +async def test_create_chunk_async_from_dict(): + await test_create_chunk_async(request_type=dict) + + +def test_create_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateChunkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.CreateChunkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + await client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_chunk( + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + + +def test_create_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_chunk( + retriever_service.CreateChunkRequest(), + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_chunk( + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_chunk( + retriever_service.CreateChunkRequest(), + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchCreateChunksRequest, + dict, + ], +) +def test_batch_create_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.BatchCreateChunksResponse() + response = client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.BatchCreateChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchCreateChunksResponse) + + +def test_batch_create_chunks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.BatchCreateChunksRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_create_chunks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchCreateChunksRequest( + parent="parent_value", + ) + + +def test_batch_create_chunks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_create_chunks in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_create_chunks + ] = mock_rpc + request = {} + client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_create_chunks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_create_chunks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_create_chunks + ] = mock_rpc + + request = {} + await client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_create_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_create_chunks_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.BatchCreateChunksRequest, +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchCreateChunksResponse() + ) + response = await client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.BatchCreateChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchCreateChunksResponse) + + +@pytest.mark.asyncio +async def test_batch_create_chunks_async_from_dict(): + await test_batch_create_chunks_async(request_type=dict) + + +def test_batch_create_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchCreateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + call.return_value = retriever_service.BatchCreateChunksResponse() + client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_create_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchCreateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchCreateChunksResponse() + ) + await client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetChunkRequest, + dict, + ], +) +def test_get_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + response = client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.GetChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_get_chunk_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.GetChunkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_chunk(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.GetChunkRequest( + name="name_value", + ) + + +def test_get_chunk_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_chunk] = mock_rpc + request = {} + client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_chunk_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_chunk + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_chunk + ] = mock_rpc + + request = {} + await client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.GetChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + response = await client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.GetChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.asyncio +async def test_get_chunk_async_from_dict(): + await test_get_chunk_async(request_type=dict) + + +def test_get_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.GetChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + await client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_chunk( + retriever_service.GetChunkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_chunk( + retriever_service.GetChunkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateChunkRequest, + dict, + ], +) +def test_update_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + response = client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.UpdateChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +def test_update_chunk_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.UpdateChunkRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_chunk(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.UpdateChunkRequest() + + +def test_update_chunk_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_chunk] = mock_rpc + request = {} + client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_chunk_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_chunk + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_chunk + ] = mock_rpc + + request = {} + await client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.UpdateChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + response = await client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.UpdateChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.asyncio +async def test_update_chunk_async_from_dict(): + await test_update_chunk_async(request_type=dict) + + +def test_update_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateChunkRequest() + + request.chunk.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "chunk.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.UpdateChunkRequest() + + request.chunk.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + await client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "chunk.name=name_value", + ) in kw["metadata"] + + +def test_update_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_chunk( + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_chunk( + retriever_service.UpdateChunkRequest(), + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever.Chunk() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(retriever.Chunk()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_chunk( + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].chunk + mock_val = retriever.Chunk(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_chunk( + retriever_service.UpdateChunkRequest(), + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchUpdateChunksRequest, + dict, + ], +) +def test_batch_update_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.BatchUpdateChunksResponse() + response = client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.BatchUpdateChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchUpdateChunksResponse) + + +def test_batch_update_chunks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.BatchUpdateChunksRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_update_chunks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchUpdateChunksRequest( + parent="parent_value", + ) + + +def test_batch_update_chunks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_update_chunks in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_update_chunks + ] = mock_rpc + request = {} + client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_update_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_update_chunks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_update_chunks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_update_chunks + ] = mock_rpc + + request = {} + await client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_update_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_update_chunks_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.BatchUpdateChunksRequest, +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchUpdateChunksResponse() + ) + response = await client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.BatchUpdateChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchUpdateChunksResponse) + + +@pytest.mark.asyncio +async def test_batch_update_chunks_async_from_dict(): + await test_batch_update_chunks_async(request_type=dict) + + +def test_batch_update_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchUpdateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + call.return_value = retriever_service.BatchUpdateChunksResponse() + client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_update_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchUpdateChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchUpdateChunksResponse() + ) + await client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteChunkRequest, + dict, + ], +) +def test_delete_chunk(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.DeleteChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_chunk_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.DeleteChunkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_chunk(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.DeleteChunkRequest( + name="name_value", + ) + + +def test_delete_chunk_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_chunk] = mock_rpc + request = {} + client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_chunk_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_chunk + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_chunk + ] = mock_rpc + + request = {} + await client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_chunk_async( + transport: str = "grpc_asyncio", request_type=retriever_service.DeleteChunkRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.DeleteChunkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_chunk_async_from_dict(): + await test_delete_chunk_async(request_type=dict) + + +def test_delete_chunk_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + call.return_value = None + client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_chunk_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.DeleteChunkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_chunk_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_chunk_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_chunk( + retriever_service.DeleteChunkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_chunk_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_chunk( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_chunk_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_chunk( + retriever_service.DeleteChunkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchDeleteChunksRequest, + dict, + ], +) +def test_batch_delete_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.BatchDeleteChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_batch_delete_chunks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.BatchDeleteChunksRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_delete_chunks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.BatchDeleteChunksRequest( + parent="parent_value", + ) + + +def test_batch_delete_chunks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_delete_chunks in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_delete_chunks + ] = mock_rpc + request = {} + client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_delete_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_delete_chunks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_delete_chunks + ] = mock_rpc + + request = {} + await client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_delete_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_async( + transport: str = "grpc_asyncio", + request_type=retriever_service.BatchDeleteChunksRequest, +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.BatchDeleteChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_async_from_dict(): + await test_batch_delete_chunks_async(request_type=dict) + + +def test_batch_delete_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchDeleteChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + call.return_value = None + client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_delete_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.BatchDeleteChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListChunksRequest, + dict, + ], +) +def test_list_chunks(request_type, transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = retriever_service.ListChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChunksPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_chunks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = retriever_service.ListChunksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_chunks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == retriever_service.ListChunksRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_chunks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_chunks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_chunks] = mock_rpc + request = {} + client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_chunks_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_chunks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_chunks + ] = mock_rpc + + request = {} + await client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_chunks_async( + transport: str = "grpc_asyncio", request_type=retriever_service.ListChunksRequest +): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = retriever_service.ListChunksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChunksAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_chunks_async_from_dict(): + await test_list_chunks_async(request_type=dict) + + +def test_list_chunks_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + call.return_value = retriever_service.ListChunksResponse() + client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_chunks_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = retriever_service.ListChunksRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse() + ) + await client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_chunks_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListChunksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_chunks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_chunks_flattened_error(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_chunks( + retriever_service.ListChunksRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_chunks_flattened_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = retriever_service.ListChunksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_chunks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_chunks_flattened_error_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_chunks( + retriever_service.ListChunksRequest(), + parent="parent_value", + ) + + +def test_list_chunks_pager(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_chunks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Chunk) for i in results) + + +def test_list_chunks_pages(transport_name: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + pages = list(client.list_chunks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_chunks_async_pager(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_chunks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_chunks( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, retriever.Chunk) for i in responses) + + +@pytest.mark.asyncio +async def test_list_chunks_async_pages(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_chunks), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_chunks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_corpus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_corpus] = mock_rpc + + request = {} + client.create_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_corpus_rest_required_fields( + request_type=retriever_service.CreateCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_corpus._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("corpus",))) + + +def test_create_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + corpus=retriever.Corpus(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/corpora" % client.transport._host, args[1] + ) + + +def test_create_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_corpus( + retriever_service.CreateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + ) + + +def test_get_corpus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_corpus] = mock_rpc + + request = {} + client.get_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_corpus_rest_required_fields( + request_type=retriever_service.GetCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_corpus._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=corpora/*}" % client.transport._host, args[1] + ) + + +def test_get_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_corpus( + retriever_service.GetCorpusRequest(), + name="name_value", + ) + + +def test_update_corpus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_corpus] = mock_rpc + + request = {} + client.update_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_corpus_rest_required_fields( + request_type=retriever_service.UpdateCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_corpus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_corpus._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "corpus", + "updateMask", + ) + ) + ) + + +def test_update_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus() + + # get arguments that satisfy an http rule for this method + sample_request = {"corpus": {"name": "corpora/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{corpus.name=corpora/*}" % client.transport._host, args[1] + ) + + +def test_update_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_corpus( + retriever_service.UpdateCorpusRequest(), + corpus=retriever.Corpus(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_corpus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_corpus] = mock_rpc + + request = {} + client.delete_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_corpus_rest_required_fields( + request_type=retriever_service.DeleteCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_corpus._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_corpus._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +def test_delete_corpus_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_corpus(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=corpora/*}" % client.transport._host, args[1] + ) + + +def test_delete_corpus_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_corpus( + retriever_service.DeleteCorpusRequest(), + name="name_value", + ) + + +def test_list_corpora_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_corpora in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_corpora] = mock_rpc + + request = {} + client.list_corpora(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_corpora(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_corpora_rest_pager(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + retriever.Corpus(), + ], + next_page_token="abc", + ), + retriever_service.ListCorporaResponse( + corpora=[], + next_page_token="def", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + ], + next_page_token="ghi", + ), + retriever_service.ListCorporaResponse( + corpora=[ + retriever.Corpus(), + retriever.Corpus(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + retriever_service.ListCorporaResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_corpora(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Corpus) for i in results) + + pages = list(client.list_corpora(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_query_corpus_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_corpus in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.query_corpus] = mock_rpc + + request = {} + client.query_corpus(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_corpus(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_corpus_rest_required_fields( + request_type=retriever_service.QueryCorpusRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_corpus._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryCorpusResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.QueryCorpusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.query_corpus(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_query_corpus_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.query_corpus._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "query", + ) + ) + ) + + +def test_create_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_document] = mock_rpc + + request = {} + client.create_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_document_rest_required_fields( + request_type=retriever_service.CreateDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "document", + ) + ) + ) + + +def test_create_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=corpora/*}/documents" % client.transport._host, args[1] + ) + + +def test_create_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_document( + retriever_service.CreateDocumentRequest(), + parent="parent_value", + document=retriever.Document(name="name_value"), + ) + + +def test_get_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_document] = mock_rpc + + request = {} + client.get_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_document_rest_required_fields( + request_type=retriever_service.GetDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=corpora/*/documents/*}" % client.transport._host, args[1] + ) + + +def test_get_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_document( + retriever_service.GetDocumentRequest(), + name="name_value", + ) + + +def test_update_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_document] = mock_rpc + + request = {} + client.update_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_document_rest_required_fields( + request_type=retriever_service.UpdateDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "document", + "updateMask", + ) + ) + ) + + +def test_update_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document() + + # get arguments that satisfy an http rule for this method + sample_request = {"document": {"name": "corpora/sample1/documents/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{document.name=corpora/*/documents/*}" % client.transport._host, + args[1], + ) + + +def test_update_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_document( + retriever_service.UpdateDocumentRequest(), + document=retriever.Document(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_document] = mock_rpc + + request = {} + client.delete_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_document_rest_required_fields( + request_type=retriever_service.DeleteDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_document._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_document._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +def test_delete_document_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_document(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=corpora/*/documents/*}" % client.transport._host, args[1] + ) + + +def test_delete_document_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_document( + retriever_service.DeleteDocumentRequest(), + name="name_value", + ) + + +def test_list_documents_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_documents in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_documents] = mock_rpc + + request = {} + client.list_documents(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_documents(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_documents_rest_required_fields( + request_type=retriever_service.ListDocumentsRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_documents._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListDocumentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_documents(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_documents_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_documents._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_documents_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListDocumentsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_documents(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=corpora/*}/documents" % client.transport._host, args[1] + ) + + +def test_list_documents_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_documents( + retriever_service.ListDocumentsRequest(), + parent="parent_value", + ) + + +def test_list_documents_rest_pager(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + retriever.Document(), + ], + next_page_token="abc", + ), + retriever_service.ListDocumentsResponse( + documents=[], + next_page_token="def", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + ], + next_page_token="ghi", + ), + retriever_service.ListDocumentsResponse( + documents=[ + retriever.Document(), + retriever.Document(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + retriever_service.ListDocumentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "corpora/sample1"} + + pager = client.list_documents(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Document) for i in results) + + pages = list(client.list_documents(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_query_document_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_document in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.query_document] = mock_rpc + + request = {} + client.query_document(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_document(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_document_rest_required_fields( + request_type=retriever_service.QueryDocumentRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["query"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["query"] = "query_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).query_document._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryDocumentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.QueryDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.query_document(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_query_document_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.query_document._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "query", + ) + ) + ) + + +def test_create_chunk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_chunk] = mock_rpc + + request = {} + client.create_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_chunk_rest_required_fields( + request_type=retriever_service.CreateChunkRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_chunk._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "chunk", + ) + ) + ) + + +def test_create_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=corpora/*/documents/*}/chunks" % client.transport._host, + args[1], + ) + + +def test_create_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_chunk( + retriever_service.CreateChunkRequest(), + parent="parent_value", + chunk=retriever.Chunk(name="name_value"), + ) + + +def test_batch_create_chunks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_create_chunks in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_create_chunks + ] = mock_rpc + + request = {} + client.batch_create_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_create_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_create_chunks_rest_required_fields( + request_type=retriever_service.BatchCreateChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_create_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchCreateChunksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.BatchCreateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_create_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_create_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_create_chunks._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("requests",))) + + +def test_get_chunk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_chunk] = mock_rpc + + request = {} + client.get_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_chunk_rest_required_fields(request_type=retriever_service.GetChunkRequest): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_chunk._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=corpora/*/documents/*/chunks/*}" % client.transport._host, + args[1], + ) + + +def test_get_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_chunk( + retriever_service.GetChunkRequest(), + name="name_value", + ) + + +def test_update_chunk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_chunk] = mock_rpc + + request = {} + client.update_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_chunk_rest_required_fields( + request_type=retriever_service.UpdateChunkRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_chunk._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_chunk._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "chunk", + "updateMask", + ) + ) + ) + + +def test_update_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk() + + # get arguments that satisfy an http rule for this method + sample_request = { + "chunk": {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + } + + # get truthy value for each flattened field + mock_args = dict( + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{chunk.name=corpora/*/documents/*/chunks/*}" + % client.transport._host, + args[1], + ) + + +def test_update_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_chunk( + retriever_service.UpdateChunkRequest(), + chunk=retriever.Chunk(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_batch_update_chunks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_update_chunks in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_update_chunks + ] = mock_rpc + + request = {} + client.batch_update_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_update_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_update_chunks_rest_required_fields( + request_type=retriever_service.BatchUpdateChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_update_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchUpdateChunksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.BatchUpdateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_update_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_update_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_update_chunks._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("requests",))) + + +def test_delete_chunk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_chunk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_chunk] = mock_rpc + + request = {} + client.delete_chunk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_chunk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_chunk_rest_required_fields( + request_type=retriever_service.DeleteChunkRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_chunk._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_chunk(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_chunk_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_chunk._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_chunk_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_chunk(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=corpora/*/documents/*/chunks/*}" % client.transport._host, + args[1], + ) + + +def test_delete_chunk_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_chunk( + retriever_service.DeleteChunkRequest(), + name="name_value", + ) + + +def test_batch_delete_chunks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.batch_delete_chunks in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_delete_chunks + ] = mock_rpc + + request = {} + client.batch_delete_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_delete_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_delete_chunks_rest_required_fields( + request_type=retriever_service.BatchDeleteChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_delete_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_delete_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_delete_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_delete_chunks._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("requests",))) + + +def test_list_chunks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_chunks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_chunks] = mock_rpc + + request = {} + client.list_chunks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_chunks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_chunks_rest_required_fields( + request_type=retriever_service.ListChunksRequest, +): + transport_class = transports.RetrieverServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_chunks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_chunks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListChunksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_chunks(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_chunks_rest_unset_required_fields(): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_chunks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_chunks_rest_flattened(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListChunksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "corpora/sample1/documents/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = retriever_service.ListChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_chunks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=corpora/*/documents/*}/chunks" % client.transport._host, + args[1], + ) + + +def test_list_chunks_rest_flattened_error(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_chunks( + retriever_service.ListChunksRequest(), + parent="parent_value", + ) + + +def test_list_chunks_rest_pager(transport: str = "rest"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + retriever.Chunk(), + ], + next_page_token="abc", + ), + retriever_service.ListChunksResponse( + chunks=[], + next_page_token="def", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + ], + next_page_token="ghi", + ), + retriever_service.ListChunksResponse( + chunks=[ + retriever.Chunk(), + retriever.Chunk(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + retriever_service.ListChunksResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "corpora/sample1/documents/sample2"} + + pager = client.list_chunks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, retriever.Chunk) for i in results) + + pages = list(client.list_chunks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RetrieverServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RetrieverServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.RetrieverServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.RetrieverServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + transports.RetrieverServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = RetrieverServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_corpus_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.create_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_corpus_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.get_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_corpus_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + call.return_value = retriever.Corpus() + client.update_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_corpus_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + call.return_value = None + client.delete_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_corpora_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + call.return_value = retriever_service.ListCorporaResponse() + client.list_corpora(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListCorporaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_corpus_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + call.return_value = retriever_service.QueryCorpusResponse() + client.query_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.QueryCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_document_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + call.return_value = retriever.Document() + client.create_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_document_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + call.return_value = retriever.Document() + client.get_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_document_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + call.return_value = retriever.Document() + client.update_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_document_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + call.return_value = None + client.delete_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_documents_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + call.return_value = retriever_service.ListDocumentsResponse() + client.list_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_document_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + call.return_value = retriever_service.QueryDocumentResponse() + client.query_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.QueryDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_chunk_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.create_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_create_chunks_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + call.return_value = retriever_service.BatchCreateChunksResponse() + client.batch_create_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchCreateChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_chunk_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.get_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_chunk_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + call.return_value = retriever.Chunk() + client.update_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_update_chunks_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + call.return_value = retriever_service.BatchUpdateChunksResponse() + client.batch_update_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchUpdateChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_chunk_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + call.return_value = None + client.delete_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_delete_chunks_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + call.return_value = None + client.batch_delete_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchDeleteChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_chunks_empty_call_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + call.return_value = retriever_service.ListChunksResponse() + client.list_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListChunksRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = RetrieverServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_corpus_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + await client.create_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_corpus_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + await client.get_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_corpus_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + ) + await client.update_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_corpus_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_corpora_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_corpora(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListCorporaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_corpus_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryCorpusResponse() + ) + await client.query_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.QueryCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_document_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + await client.create_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_document_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + await client.get_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_document_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Document( + name="name_value", + display_name="display_name_value", + ) + ) + await client.update_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_document_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_documents_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_document_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.QueryDocumentResponse() + ) + await client.query_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.QueryDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_chunk_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + await client.create_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_create_chunks_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchCreateChunksResponse() + ) + await client.batch_create_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchCreateChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_chunk_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + await client.get_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_chunk_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + ) + await client.update_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_update_chunks_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.BatchUpdateChunksResponse() + ) + await client.batch_update_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchUpdateChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_chunk_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_delete_chunks_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.batch_delete_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchDeleteChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_chunks_empty_call_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListChunksRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = RetrieverServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_corpus_rest_bad_request( + request_type=retriever_service.CreateCorpusRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_corpus(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateCorpusRequest, + dict, + ], +) +def test_create_corpus_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["corpus"] = { + "name": "name_value", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.CreateCorpusRequest.meta.fields["corpus"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["corpus"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["corpus"][field])): + del request_init["corpus"][field][i][subfield] + else: + del request_init["corpus"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_create_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.CreateCorpusRequest.pb( + retriever_service.CreateCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Corpus.to_json(retriever.Corpus()) + req.return_value.content = return_value + + request = retriever_service.CreateCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Corpus() + post_with_metadata.return_value = retriever.Corpus(), metadata + + client.create_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_corpus_rest_bad_request(request_type=retriever_service.GetCorpusRequest): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_corpus(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetCorpusRequest, + dict, + ], +) +def test_get_corpus_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_get_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.GetCorpusRequest.pb( + retriever_service.GetCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Corpus.to_json(retriever.Corpus()) + req.return_value.content = return_value + + request = retriever_service.GetCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Corpus() + post_with_metadata.return_value = retriever.Corpus(), metadata + + client.get_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_corpus_rest_bad_request( + request_type=retriever_service.UpdateCorpusRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"corpus": {"name": "corpora/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_corpus(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateCorpusRequest, + dict, + ], +) +def test_update_corpus_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"corpus": {"name": "corpora/sample1"}} + request_init["corpus"] = { + "name": "corpora/sample1", + "display_name": "display_name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.UpdateCorpusRequest.meta.fields["corpus"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["corpus"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["corpus"][field])): + del request_init["corpus"][field][i][subfield] + else: + del request_init["corpus"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Corpus( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Corpus.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Corpus) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_update_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.UpdateCorpusRequest.pb( + retriever_service.UpdateCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Corpus.to_json(retriever.Corpus()) + req.return_value.content = return_value + + request = retriever_service.UpdateCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Corpus() + post_with_metadata.return_value = retriever.Corpus(), metadata + + client.update_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_corpus_rest_bad_request( + request_type=retriever_service.DeleteCorpusRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_corpus(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteCorpusRequest, + dict, + ], +) +def test_delete_corpus_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_corpus(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_delete_corpus" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.DeleteCorpusRequest.pb( + retriever_service.DeleteCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = retriever_service.DeleteCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_list_corpora_rest_bad_request( + request_type=retriever_service.ListCorporaRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_corpora(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListCorporaRequest, + dict, + ], +) +def test_list_corpora_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListCorporaResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListCorporaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_corpora(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListCorporaPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_corpora_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_corpora" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_corpora_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_list_corpora" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.ListCorporaRequest.pb( + retriever_service.ListCorporaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.ListCorporaResponse.to_json( + retriever_service.ListCorporaResponse() + ) + req.return_value.content = return_value + + request = retriever_service.ListCorporaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.ListCorporaResponse() + post_with_metadata.return_value = ( + retriever_service.ListCorporaResponse(), + metadata, + ) + + client.list_corpora( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_query_corpus_rest_bad_request( + request_type=retriever_service.QueryCorpusRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.query_corpus(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryCorpusRequest, + dict, + ], +) +def test_query_corpus_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryCorpusResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.QueryCorpusResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.query_corpus(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryCorpusResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_corpus_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_corpus" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_query_corpus" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.QueryCorpusRequest.pb( + retriever_service.QueryCorpusRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.QueryCorpusResponse.to_json( + retriever_service.QueryCorpusResponse() + ) + req.return_value.content = return_value + + request = retriever_service.QueryCorpusRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.QueryCorpusResponse() + post_with_metadata.return_value = ( + retriever_service.QueryCorpusResponse(), + metadata, + ) + + client.query_corpus( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_document_rest_bad_request( + request_type=retriever_service.CreateDocumentRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateDocumentRequest, + dict, + ], +) +def test_create_document_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request_init["document"] = { + "name": "name_value", + "display_name": "display_name_value", + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.CreateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_create_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.CreateDocumentRequest.pb( + retriever_service.CreateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Document.to_json(retriever.Document()) + req.return_value.content = return_value + + request = retriever_service.CreateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Document() + post_with_metadata.return_value = retriever.Document(), metadata + + client.create_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_document_rest_bad_request( + request_type=retriever_service.GetDocumentRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetDocumentRequest, + dict, + ], +) +def test_get_document_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_get_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.GetDocumentRequest.pb( + retriever_service.GetDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Document.to_json(retriever.Document()) + req.return_value.content = return_value + + request = retriever_service.GetDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Document() + post_with_metadata.return_value = retriever.Document(), metadata + + client.get_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_document_rest_bad_request( + request_type=retriever_service.UpdateDocumentRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"document": {"name": "corpora/sample1/documents/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateDocumentRequest, + dict, + ], +) +def test_update_document_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"document": {"name": "corpora/sample1/documents/sample2"}} + request_init["document"] = { + "name": "corpora/sample1/documents/sample2", + "display_name": "display_name_value", + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "update_time": {"seconds": 751, "nanos": 543}, + "create_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.UpdateDocumentRequest.meta.fields["document"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["document"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["document"][field])): + del request_init["document"][field][i][subfield] + else: + del request_init["document"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Document( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Document.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Document) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_update_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.UpdateDocumentRequest.pb( + retriever_service.UpdateDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Document.to_json(retriever.Document()) + req.return_value.content = return_value + + request = retriever_service.UpdateDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Document() + post_with_metadata.return_value = retriever.Document(), metadata + + client.update_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_document_rest_bad_request( + request_type=retriever_service.DeleteDocumentRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteDocumentRequest, + dict, + ], +) +def test_delete_document_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_document(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_delete_document" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.DeleteDocumentRequest.pb( + retriever_service.DeleteDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = retriever_service.DeleteDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_list_documents_rest_bad_request( + request_type=retriever_service.ListDocumentsRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_documents(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListDocumentsRequest, + dict, + ], +) +def test_list_documents_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListDocumentsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListDocumentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_documents(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDocumentsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_documents_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_documents" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_list_documents" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.ListDocumentsRequest.pb( + retriever_service.ListDocumentsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.ListDocumentsResponse.to_json( + retriever_service.ListDocumentsResponse() + ) + req.return_value.content = return_value + + request = retriever_service.ListDocumentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.ListDocumentsResponse() + post_with_metadata.return_value = ( + retriever_service.ListDocumentsResponse(), + metadata, + ) + + client.list_documents( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_query_document_rest_bad_request( + request_type=retriever_service.QueryDocumentRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.query_document(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.QueryDocumentRequest, + dict, + ], +) +def test_query_document_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.QueryDocumentResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.QueryDocumentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.query_document(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.QueryDocumentResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_document_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_document" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_document_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_query_document" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.QueryDocumentRequest.pb( + retriever_service.QueryDocumentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.QueryDocumentResponse.to_json( + retriever_service.QueryDocumentResponse() + ) + req.return_value.content = return_value + + request = retriever_service.QueryDocumentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.QueryDocumentResponse() + post_with_metadata.return_value = ( + retriever_service.QueryDocumentResponse(), + metadata, + ) + + client.query_document( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_chunk_rest_bad_request( + request_type=retriever_service.CreateChunkRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_chunk(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.CreateChunkRequest, + dict, + ], +) +def test_create_chunk_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request_init["chunk"] = { + "name": "name_value", + "data": {"string_value": "string_value_value"}, + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.CreateChunkRequest.meta.fields["chunk"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["chunk"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["chunk"][field])): + del request_init["chunk"][field][i][subfield] + else: + del request_init["chunk"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_chunk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_chunk" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_create_chunk" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.CreateChunkRequest.pb( + retriever_service.CreateChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Chunk.to_json(retriever.Chunk()) + req.return_value.content = return_value + + request = retriever_service.CreateChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Chunk() + post_with_metadata.return_value = retriever.Chunk(), metadata + + client.create_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_create_chunks_rest_bad_request( + request_type=retriever_service.BatchCreateChunksRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_create_chunks(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchCreateChunksRequest, + dict, + ], +) +def test_batch_create_chunks_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchCreateChunksResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.BatchCreateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_create_chunks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchCreateChunksResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_create_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_batch_create_chunks" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, + "post_batch_create_chunks_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_batch_create_chunks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.BatchCreateChunksRequest.pb( + retriever_service.BatchCreateChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.BatchCreateChunksResponse.to_json( + retriever_service.BatchCreateChunksResponse() + ) + req.return_value.content = return_value + + request = retriever_service.BatchCreateChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.BatchCreateChunksResponse() + post_with_metadata.return_value = ( + retriever_service.BatchCreateChunksResponse(), + metadata, + ) + + client.batch_create_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_chunk_rest_bad_request(request_type=retriever_service.GetChunkRequest): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_chunk(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.GetChunkRequest, + dict, + ], +) +def test_get_chunk_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_chunk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_chunk" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_get_chunk" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.GetChunkRequest.pb( + retriever_service.GetChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Chunk.to_json(retriever.Chunk()) + req.return_value.content = return_value + + request = retriever_service.GetChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Chunk() + post_with_metadata.return_value = retriever.Chunk(), metadata + + client.get_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_chunk_rest_bad_request( + request_type=retriever_service.UpdateChunkRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "chunk": {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_chunk(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.UpdateChunkRequest, + dict, + ], +) +def test_update_chunk_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "chunk": {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + } + request_init["chunk"] = { + "name": "corpora/sample1/documents/sample2/chunks/sample3", + "data": {"string_value": "string_value_value"}, + "custom_metadata": [ + { + "string_value": "string_value_value", + "string_list_value": {"values": ["values_value1", "values_value2"]}, + "numeric_value": 0.1391, + "key": "key_value", + } + ], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = retriever_service.UpdateChunkRequest.meta.fields["chunk"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["chunk"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["chunk"][field])): + del request_init["chunk"][field][i][subfield] + else: + del request_init["chunk"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever.Chunk( + name="name_value", + state=retriever.Chunk.State.STATE_PENDING_PROCESSING, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever.Chunk.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_chunk(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever.Chunk) + assert response.name == "name_value" + assert response.state == retriever.Chunk.State.STATE_PENDING_PROCESSING + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_chunk" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_update_chunk" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.UpdateChunkRequest.pb( + retriever_service.UpdateChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever.Chunk.to_json(retriever.Chunk()) + req.return_value.content = return_value + + request = retriever_service.UpdateChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever.Chunk() + post_with_metadata.return_value = retriever.Chunk(), metadata + + client.update_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_update_chunks_rest_bad_request( + request_type=retriever_service.BatchUpdateChunksRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_update_chunks(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchUpdateChunksRequest, + dict, + ], +) +def test_batch_update_chunks_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.BatchUpdateChunksResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.BatchUpdateChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_update_chunks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, retriever_service.BatchUpdateChunksResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_update_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_batch_update_chunks" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, + "post_batch_update_chunks_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_batch_update_chunks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.BatchUpdateChunksRequest.pb( + retriever_service.BatchUpdateChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.BatchUpdateChunksResponse.to_json( + retriever_service.BatchUpdateChunksResponse() + ) + req.return_value.content = return_value + + request = retriever_service.BatchUpdateChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.BatchUpdateChunksResponse() + post_with_metadata.return_value = ( + retriever_service.BatchUpdateChunksResponse(), + metadata, + ) + + client.batch_update_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_chunk_rest_bad_request( + request_type=retriever_service.DeleteChunkRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_chunk(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.DeleteChunkRequest, + dict, + ], +) +def test_delete_chunk_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "corpora/sample1/documents/sample2/chunks/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_chunk(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_chunk_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_delete_chunk" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.DeleteChunkRequest.pb( + retriever_service.DeleteChunkRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = retriever_service.DeleteChunkRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_chunk( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_batch_delete_chunks_rest_bad_request( + request_type=retriever_service.BatchDeleteChunksRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_delete_chunks(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.BatchDeleteChunksRequest, + dict, + ], +) +def test_batch_delete_chunks_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_delete_chunks(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_delete_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_batch_delete_chunks" + ) as pre: + pre.assert_not_called() + pb_message = retriever_service.BatchDeleteChunksRequest.pb( + retriever_service.BatchDeleteChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = retriever_service.BatchDeleteChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.batch_delete_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_list_chunks_rest_bad_request(request_type=retriever_service.ListChunksRequest): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_chunks(request) + + +@pytest.mark.parametrize( + "request_type", + [ + retriever_service.ListChunksRequest, + dict, + ], +) +def test_list_chunks_rest_call_success(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "corpora/sample1/documents/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = retriever_service.ListChunksResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = retriever_service.ListChunksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_chunks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChunksPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_chunks_rest_interceptors(null_interceptor): + transport = transports.RetrieverServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RetrieverServiceRestInterceptor(), + ) + client = RetrieverServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_chunks" + ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_chunks_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "pre_list_chunks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = retriever_service.ListChunksRequest.pb( + retriever_service.ListChunksRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = retriever_service.ListChunksResponse.to_json( + retriever_service.ListChunksResponse() + ) + req.return_value.content = return_value + + request = retriever_service.ListChunksRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = retriever_service.ListChunksResponse() + post_with_metadata.return_value = ( + retriever_service.ListChunksResponse(), + metadata, + ) + + client.list_chunks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_corpus_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_corpus), "__call__") as call: + client.create_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_corpus_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_corpus), "__call__") as call: + client.get_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_corpus_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_corpus), "__call__") as call: + client.update_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_corpus_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_corpus), "__call__") as call: + client.delete_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_corpora_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_corpora), "__call__") as call: + client.list_corpora(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListCorporaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_corpus_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.query_corpus), "__call__") as call: + client.query_corpus(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.QueryCorpusRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_document_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_document), "__call__") as call: + client.create_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_document_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_document), "__call__") as call: + client.get_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_document_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_document), "__call__") as call: + client.update_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_document_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_document), "__call__") as call: + client.delete_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_documents_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_documents), "__call__") as call: + client.list_documents(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListDocumentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_document_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.query_document), "__call__") as call: + client.query_document(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.QueryDocumentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_chunk_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_chunk), "__call__") as call: + client.create_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.CreateChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_create_chunks_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_create_chunks), "__call__" + ) as call: + client.batch_create_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchCreateChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_chunk_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_chunk), "__call__") as call: + client.get_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.GetChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_chunk_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_chunk), "__call__") as call: + client.update_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.UpdateChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_update_chunks_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_update_chunks), "__call__" + ) as call: + client.batch_update_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchUpdateChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_chunk_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_chunk), "__call__") as call: + client.delete_chunk(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.DeleteChunkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_delete_chunks_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_delete_chunks), "__call__" + ) as call: + client.batch_delete_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.BatchDeleteChunksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_chunks_empty_call_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_chunks), "__call__") as call: + client.list_chunks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = retriever_service.ListChunksRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.RetrieverServiceGrpcTransport, + ) + + +def test_retriever_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RetrieverServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_retriever_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.retriever_service.transports.RetrieverServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RetrieverServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_corpus", + "get_corpus", + "update_corpus", + "delete_corpus", + "list_corpora", + "query_corpus", + "create_document", + "get_document", + "update_document", + "delete_document", + "list_documents", + "query_document", + "create_chunk", + "batch_create_chunks", + "get_chunk", + "update_chunk", + "batch_update_chunks", + "delete_chunk", + "batch_delete_chunks", + "list_chunks", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_retriever_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.retriever_service.transports.RetrieverServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RetrieverServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_retriever_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.retriever_service.transports.RetrieverServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RetrieverServiceTransport() + adc.assert_called_once() + + +def test_retriever_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RetrieverServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + transports.RetrieverServiceRestTransport, + ], +) +def test_retriever_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.RetrieverServiceGrpcTransport, grpc_helpers), + (transports.RetrieverServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_retriever_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_retriever_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RetrieverServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_retriever_service_host_no_port(transport_name): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_retriever_service_host_with_port(transport_name): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_retriever_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RetrieverServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RetrieverServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_corpus._session + session2 = client2.transport.create_corpus._session + assert session1 != session2 + session1 = client1.transport.get_corpus._session + session2 = client2.transport.get_corpus._session + assert session1 != session2 + session1 = client1.transport.update_corpus._session + session2 = client2.transport.update_corpus._session + assert session1 != session2 + session1 = client1.transport.delete_corpus._session + session2 = client2.transport.delete_corpus._session + assert session1 != session2 + session1 = client1.transport.list_corpora._session + session2 = client2.transport.list_corpora._session + assert session1 != session2 + session1 = client1.transport.query_corpus._session + session2 = client2.transport.query_corpus._session + assert session1 != session2 + session1 = client1.transport.create_document._session + session2 = client2.transport.create_document._session + assert session1 != session2 + session1 = client1.transport.get_document._session + session2 = client2.transport.get_document._session + assert session1 != session2 + session1 = client1.transport.update_document._session + session2 = client2.transport.update_document._session + assert session1 != session2 + session1 = client1.transport.delete_document._session + session2 = client2.transport.delete_document._session + assert session1 != session2 + session1 = client1.transport.list_documents._session + session2 = client2.transport.list_documents._session + assert session1 != session2 + session1 = client1.transport.query_document._session + session2 = client2.transport.query_document._session + assert session1 != session2 + session1 = client1.transport.create_chunk._session + session2 = client2.transport.create_chunk._session + assert session1 != session2 + session1 = client1.transport.batch_create_chunks._session + session2 = client2.transport.batch_create_chunks._session + assert session1 != session2 + session1 = client1.transport.get_chunk._session + session2 = client2.transport.get_chunk._session + assert session1 != session2 + session1 = client1.transport.update_chunk._session + session2 = client2.transport.update_chunk._session + assert session1 != session2 + session1 = client1.transport.batch_update_chunks._session + session2 = client2.transport.batch_update_chunks._session + assert session1 != session2 + session1 = client1.transport.delete_chunk._session + session2 = client2.transport.delete_chunk._session + assert session1 != session2 + session1 = client1.transport.batch_delete_chunks._session + session2 = client2.transport.batch_delete_chunks._session + assert session1 != session2 + session1 = client1.transport.list_chunks._session + session2 = client2.transport.list_chunks._session + assert session1 != session2 + + +def test_retriever_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RetrieverServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_retriever_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.RetrieverServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.RetrieverServiceGrpcTransport, + transports.RetrieverServiceGrpcAsyncIOTransport, + ], +) +def test_retriever_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_chunk_path(): + corpus = "squid" + document = "clam" + chunk = "whelk" + expected = "corpora/{corpus}/documents/{document}/chunks/{chunk}".format( + corpus=corpus, + document=document, + chunk=chunk, + ) + actual = RetrieverServiceClient.chunk_path(corpus, document, chunk) + assert expected == actual + + +def test_parse_chunk_path(): + expected = { + "corpus": "octopus", + "document": "oyster", + "chunk": "nudibranch", + } + path = RetrieverServiceClient.chunk_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_chunk_path(path) + assert expected == actual + + +def test_corpus_path(): + corpus = "cuttlefish" + expected = "corpora/{corpus}".format( + corpus=corpus, + ) + actual = RetrieverServiceClient.corpus_path(corpus) + assert expected == actual + + +def test_parse_corpus_path(): + expected = { + "corpus": "mussel", + } + path = RetrieverServiceClient.corpus_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_corpus_path(path) + assert expected == actual + + +def test_document_path(): + corpus = "winkle" + document = "nautilus" + expected = "corpora/{corpus}/documents/{document}".format( + corpus=corpus, + document=document, + ) + actual = RetrieverServiceClient.document_path(corpus, document) + assert expected == actual + + +def test_parse_document_path(): + expected = { + "corpus": "scallop", + "document": "abalone", + } + path = RetrieverServiceClient.document_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_document_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RetrieverServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RetrieverServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RetrieverServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RetrieverServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RetrieverServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RetrieverServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RetrieverServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RetrieverServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RetrieverServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RetrieverServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RetrieverServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RetrieverServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RetrieverServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RetrieverServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = RetrieverServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = RetrieverServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RetrieverServiceClient, transports.RetrieverServiceGrpcTransport), + (RetrieverServiceAsyncClient, transports.RetrieverServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_text_service.py new file mode 100644 index 000000000000..c73969a8a3ac --- /dev/null +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1alpha/test_text_service.py @@ -0,0 +1,5177 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account + +from google.ai.generativelanguage_v1alpha.services.text_service import ( + TextServiceAsyncClient, + TextServiceClient, + transports, +) +from google.ai.generativelanguage_v1alpha.types import safety, text_service + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TextServiceClient._get_default_mtls_endpoint(None) is None + assert ( + TextServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + TextServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert TextServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert TextServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert TextServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert TextServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + TextServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert TextServiceClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert TextServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert TextServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + TextServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert TextServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert TextServiceClient._get_client_cert_source(None, False) is None + assert ( + TextServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + TextServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + TextServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + TextServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + TextServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceClient), +) +@mock.patch.object( + TextServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = TextServiceClient._DEFAULT_UNIVERSE + default_endpoint = TextServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = TextServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + TextServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + TextServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == TextServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TextServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + TextServiceClient._get_api_endpoint(None, None, default_universe, "always") + == TextServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TextServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == TextServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + TextServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + TextServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + TextServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + TextServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + TextServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + TextServiceClient._get_universe_domain(None, None) + == TextServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + TextServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextServiceClient, "grpc"), + (TextServiceAsyncClient, "grpc_asyncio"), + (TextServiceClient, "rest"), + ], +) +def test_text_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.TextServiceGrpcTransport, "grpc"), + (transports.TextServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TextServiceRestTransport, "rest"), + ], +) +def test_text_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (TextServiceClient, "grpc"), + (TextServiceAsyncClient, "grpc_asyncio"), + (TextServiceClient, "rest"), + ], +) +def test_text_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +def test_text_service_client_get_transport_class(): + transport = TextServiceClient.get_transport_class() + available_transports = [ + transports.TextServiceGrpcTransport, + transports.TextServiceRestTransport, + ] + assert transport in available_transports + + transport = TextServiceClient.get_transport_class("grpc") + assert transport == transports.TextServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + TextServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceClient), +) +@mock.patch.object( + TextServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceAsyncClient), +) +def test_text_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TextServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TextServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", "true"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", "false"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest", "true"), + (TextServiceClient, transports.TextServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + TextServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceClient), +) +@mock.patch.object( + TextServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_text_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [TextServiceClient, TextServiceAsyncClient]) +@mock.patch.object( + TextServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TextServiceClient) +) +@mock.patch.object( + TextServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TextServiceAsyncClient), +) +def test_text_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [TextServiceClient, TextServiceAsyncClient]) +@mock.patch.object( + TextServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceClient), +) +@mock.patch.object( + TextServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(TextServiceAsyncClient), +) +def test_text_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = TextServiceClient._DEFAULT_UNIVERSE + default_endpoint = TextServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = TextServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc"), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest"), + ], +) +def test_text_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", grpc_helpers), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (TextServiceClient, transports.TextServiceRestTransport, "rest", None), + ], +) +def test_text_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_text_service_client_client_options_from_dict(): + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.text_service.transports.TextServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = TextServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (TextServiceClient, transports.TextServiceGrpcTransport, "grpc", grpc_helpers), + ( + TextServiceAsyncClient, + transports.TextServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_text_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=(), + scopes=None, + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.GenerateTextRequest, + dict, + ], +) +def test_generate_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + response = client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = text_service.GenerateTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +def test_generate_text_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = text_service.GenerateTextRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.generate_text(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.GenerateTextRequest( + model="model_value", + ) + + +def test_generate_text_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_text] = mock_rpc + request = {} + client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_text_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.generate_text + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.generate_text + ] = mock_rpc + + request = {} + await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.generate_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_generate_text_async( + transport: str = "grpc_asyncio", request_type=text_service.GenerateTextRequest +): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + response = await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = text_service.GenerateTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +@pytest.mark.asyncio +async def test_generate_text_async_from_dict(): + await test_generate_text_async(request_type=dict) + + +def test_generate_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.GenerateTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = text_service.GenerateTextResponse() + client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_generate_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.GenerateTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + await client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_generate_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_text( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + arg = args[0].max_output_tokens + mock_val = 1865 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +def test_generate_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.asyncio +async def test_generate_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.GenerateTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_text( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + assert math.isclose(args[0].temperature, 0.1198, rel_tol=1e-6) + arg = args[0].candidate_count + mock_val = 1573 + assert arg == mock_val + arg = args[0].max_output_tokens + mock_val = 1865 + assert arg == mock_val + assert math.isclose(args[0].top_p, 0.546, rel_tol=1e-6) + arg = args[0].top_k + mock_val = 541 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_generate_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.EmbedTextRequest, + dict, + ], +) +def test_embed_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + response = client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = text_service.EmbedTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +def test_embed_text_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = text_service.EmbedTextRequest( + model="model_value", + text="text_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.embed_text(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.EmbedTextRequest( + model="model_value", + text="text_value", + ) + + +def test_embed_text_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.embed_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.embed_text] = mock_rpc + request = {} + client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.embed_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_embed_text_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.embed_text + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.embed_text + ] = mock_rpc + + request = {} + await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.embed_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_embed_text_async( + transport: str = "grpc_asyncio", request_type=text_service.EmbedTextRequest +): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + response = await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = text_service.EmbedTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +@pytest.mark.asyncio +async def test_embed_text_async_from_dict(): + await test_embed_text_async(request_type=dict) + + +def test_embed_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.EmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = text_service.EmbedTextResponse() + client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_embed_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.EmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + await client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_embed_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.embed_text( + model="model_value", + text="text_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].text + mock_val = "text_value" + assert arg == mock_val + + +def test_embed_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +@pytest.mark.asyncio +async def test_embed_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.EmbedTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.embed_text( + model="model_value", + text="text_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].text + mock_val = "text_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_embed_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.BatchEmbedTextRequest, + dict, + ], +) +def test_batch_embed_text(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.BatchEmbedTextResponse() + response = client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = text_service.BatchEmbedTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.BatchEmbedTextResponse) + + +def test_batch_embed_text_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = text_service.BatchEmbedTextRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.batch_embed_text(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.BatchEmbedTextRequest( + model="model_value", + ) + + +def test_batch_embed_text_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_embed_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_embed_text + ] = mock_rpc + request = {} + client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_embed_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_embed_text_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.batch_embed_text + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.batch_embed_text + ] = mock_rpc + + request = {} + await client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.batch_embed_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_batch_embed_text_async( + transport: str = "grpc_asyncio", request_type=text_service.BatchEmbedTextRequest +): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + response = await client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = text_service.BatchEmbedTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.BatchEmbedTextResponse) + + +@pytest.mark.asyncio +async def test_batch_embed_text_async_from_dict(): + await test_batch_embed_text_async(request_type=dict) + + +def test_batch_embed_text_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.BatchEmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + call.return_value = text_service.BatchEmbedTextResponse() + client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_batch_embed_text_field_headers_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.BatchEmbedTextRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + await client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_batch_embed_text_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.BatchEmbedTextResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.batch_embed_text( + model="model_value", + texts=["texts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].texts + mock_val = ["texts_value"] + assert arg == mock_val + + +def test_batch_embed_text_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_text( + text_service.BatchEmbedTextRequest(), + model="model_value", + texts=["texts_value"], + ) + + +@pytest.mark.asyncio +async def test_batch_embed_text_flattened_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.BatchEmbedTextResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.batch_embed_text( + model="model_value", + texts=["texts_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].texts + mock_val = ["texts_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_batch_embed_text_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.batch_embed_text( + text_service.BatchEmbedTextRequest(), + model="model_value", + texts=["texts_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.CountTextTokensRequest, + dict, + ], +) +def test_count_text_tokens(request_type, transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.CountTextTokensResponse( + token_count=1193, + ) + response = client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = text_service.CountTextTokensRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.CountTextTokensResponse) + assert response.token_count == 1193 + + +def test_count_text_tokens_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = text_service.CountTextTokensRequest( + model="model_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.count_text_tokens(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == text_service.CountTextTokensRequest( + model="model_value", + ) + + +def test_count_text_tokens_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.count_text_tokens in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.count_text_tokens + ] = mock_rpc + request = {} + client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.count_text_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_count_text_tokens_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.count_text_tokens + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.count_text_tokens + ] = mock_rpc + + request = {} + await client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.count_text_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_count_text_tokens_async( + transport: str = "grpc_asyncio", request_type=text_service.CountTextTokensRequest +): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse( + token_count=1193, + ) + ) + response = await client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = text_service.CountTextTokensRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.CountTextTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.asyncio +async def test_count_text_tokens_async_from_dict(): + await test_count_text_tokens_async(request_type=dict) + + +def test_count_text_tokens_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.CountTextTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + call.return_value = text_service.CountTextTokensResponse() + client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_count_text_tokens_field_headers_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = text_service.CountTextTokensRequest() + + request.model = "model_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse() + ) + await client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "model=model_value", + ) in kw["metadata"] + + +def test_count_text_tokens_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.CountTextTokensResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.count_text_tokens( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + + +def test_count_text_tokens_flattened_error(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_text_tokens( + text_service.CountTextTokensRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + +@pytest.mark.asyncio +async def test_count_text_tokens_flattened_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = text_service.CountTextTokensResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.count_text_tokens( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].model + mock_val = "model_value" + assert arg == mock_val + arg = args[0].prompt + mock_val = text_service.TextPrompt(text="text_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_count_text_tokens_flattened_error_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.count_text_tokens( + text_service.CountTextTokensRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + +def test_generate_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.generate_text] = mock_rpc + + request = {} + client.generate_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_generate_text_rest_required_fields( + request_type=text_service.GenerateTextRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).generate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.generate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_generate_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.generate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +def test_generate_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.generate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:generateText" % client.transport._host, args[1] + ) + + +def test_generate_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_text( + text_service.GenerateTextRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + temperature=0.1198, + candidate_count=1573, + max_output_tokens=1865, + top_p=0.546, + top_k=541, + ) + + +def test_embed_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.embed_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.embed_text] = mock_rpc + + request = {} + client.embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.embed_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_embed_text_rest_required_fields(request_type=text_service.EmbedTextRequest): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.embed_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_embed_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.embed_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("model",))) + + +def test_embed_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + text="text_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.embed_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:embedText" % client.transport._host, args[1] + ) + + +def test_embed_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.embed_text( + text_service.EmbedTextRequest(), + model="model_value", + text="text_value", + ) + + +def test_batch_embed_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_embed_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.batch_embed_text + ] = mock_rpc + + request = {} + client.batch_embed_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_embed_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_batch_embed_text_rest_required_fields( + request_type=text_service.BatchEmbedTextRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).batch_embed_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.BatchEmbedTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.BatchEmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.batch_embed_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_batch_embed_text_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.batch_embed_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("model",))) + + +def test_batch_embed_text_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.BatchEmbedTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + texts=["texts_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.BatchEmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.batch_embed_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:batchEmbedText" % client.transport._host, + args[1], + ) + + +def test_batch_embed_text_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.batch_embed_text( + text_service.BatchEmbedTextRequest(), + model="model_value", + texts=["texts_value"], + ) + + +def test_count_text_tokens_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.count_text_tokens in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.count_text_tokens + ] = mock_rpc + + request = {} + client.count_text_tokens(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.count_text_tokens(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_count_text_tokens_rest_required_fields( + request_type=text_service.CountTextTokensRequest, +): + transport_class = transports.TextServiceRestTransport + + request_init = {} + request_init["model"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_text_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["model"] = "model_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).count_text_tokens._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "model" in jsonified_request + assert jsonified_request["model"] == "model_value" + + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = text_service.CountTextTokensResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.CountTextTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.count_text_tokens(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_count_text_tokens_rest_unset_required_fields(): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.count_text_tokens._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "model", + "prompt", + ) + ) + ) + + +def test_count_text_tokens_rest_flattened(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.CountTextTokensResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"model": "models/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = text_service.CountTextTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.count_text_tokens(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{model=models/*}:countTextTokens" % client.transport._host, + args[1], + ) + + +def test_count_text_tokens_rest_flattened_error(transport: str = "rest"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.count_text_tokens( + text_service.CountTextTokensRequest(), + model="model_value", + prompt=text_service.TextPrompt(text="text_value"), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TextServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TextServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TextServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TextServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TextServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + transports.TextServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = TextServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_text_empty_call_grpc(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + call.return_value = text_service.GenerateTextResponse() + client.generate_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.GenerateTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_embed_text_empty_call_grpc(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + call.return_value = text_service.EmbedTextResponse() + client.embed_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.EmbedTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_embed_text_empty_call_grpc(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + call.return_value = text_service.BatchEmbedTextResponse() + client.batch_embed_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.BatchEmbedTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_count_text_tokens_empty_call_grpc(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + call.return_value = text_service.CountTextTokensResponse() + client.count_text_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.CountTextTokensRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = TextServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_text_empty_call_grpc_asyncio(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.GenerateTextResponse() + ) + await client.generate_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.GenerateTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_embed_text_empty_call_grpc_asyncio(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.EmbedTextResponse() + ) + await client.embed_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.EmbedTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_embed_text_empty_call_grpc_asyncio(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.BatchEmbedTextResponse() + ) + await client.batch_embed_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.BatchEmbedTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_count_text_tokens_empty_call_grpc_asyncio(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + text_service.CountTextTokensResponse( + token_count=1193, + ) + ) + await client.count_text_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.CountTextTokensRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = TextServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_generate_text_rest_bad_request(request_type=text_service.GenerateTextRequest): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.generate_text(request) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.GenerateTextRequest, + dict, + ], +) +def test_generate_text_rest_call_success(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.GenerateTextResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.GenerateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.generate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.GenerateTextResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_generate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = text_service.GenerateTextRequest.pb( + text_service.GenerateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = text_service.GenerateTextResponse.to_json( + text_service.GenerateTextResponse() + ) + req.return_value.content = return_value + + request = text_service.GenerateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.GenerateTextResponse() + post_with_metadata.return_value = text_service.GenerateTextResponse(), metadata + + client.generate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_embed_text_rest_bad_request(request_type=text_service.EmbedTextRequest): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.embed_text(request) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.EmbedTextRequest, + dict, + ], +) +def test_embed_text_rest_call_success(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.EmbedTextResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.EmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.embed_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.EmbedTextResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_embed_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_embed_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = text_service.EmbedTextRequest.pb(text_service.EmbedTextRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = text_service.EmbedTextResponse.to_json( + text_service.EmbedTextResponse() + ) + req.return_value.content = return_value + + request = text_service.EmbedTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.EmbedTextResponse() + post_with_metadata.return_value = text_service.EmbedTextResponse(), metadata + + client.embed_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_batch_embed_text_rest_bad_request( + request_type=text_service.BatchEmbedTextRequest, +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.batch_embed_text(request) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.BatchEmbedTextRequest, + dict, + ], +) +def test_batch_embed_text_rest_call_success(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.BatchEmbedTextResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.BatchEmbedTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.batch_embed_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.BatchEmbedTextResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_embed_text_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_batch_embed_text" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_batch_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_batch_embed_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = text_service.BatchEmbedTextRequest.pb( + text_service.BatchEmbedTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = text_service.BatchEmbedTextResponse.to_json( + text_service.BatchEmbedTextResponse() + ) + req.return_value.content = return_value + + request = text_service.BatchEmbedTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.BatchEmbedTextResponse() + post_with_metadata.return_value = ( + text_service.BatchEmbedTextResponse(), + metadata, + ) + + client.batch_embed_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_count_text_tokens_rest_bad_request( + request_type=text_service.CountTextTokensRequest, +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.count_text_tokens(request) + + +@pytest.mark.parametrize( + "request_type", + [ + text_service.CountTextTokensRequest, + dict, + ], +) +def test_count_text_tokens_rest_call_success(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"model": "models/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = text_service.CountTextTokensResponse( + token_count=1193, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = text_service.CountTextTokensResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.count_text_tokens(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, text_service.CountTextTokensResponse) + assert response.token_count == 1193 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_count_text_tokens_rest_interceptors(null_interceptor): + transport = transports.TextServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TextServiceRestInterceptor(), + ) + client = TextServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TextServiceRestInterceptor, "post_count_text_tokens" + ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_count_text_tokens_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.TextServiceRestInterceptor, "pre_count_text_tokens" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = text_service.CountTextTokensRequest.pb( + text_service.CountTextTokensRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = text_service.CountTextTokensResponse.to_json( + text_service.CountTextTokensResponse() + ) + req.return_value.content = return_value + + request = text_service.CountTextTokensRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = text_service.CountTextTokensResponse() + post_with_metadata.return_value = ( + text_service.CountTextTokensResponse(), + metadata, + ) + + client.count_text_tokens( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "tunedModels/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "tunedModels/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "tunedModels/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_text_empty_call_rest(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.generate_text), "__call__") as call: + client.generate_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.GenerateTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_embed_text_empty_call_rest(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.embed_text), "__call__") as call: + client.embed_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.EmbedTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_embed_text_empty_call_rest(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.batch_embed_text), "__call__") as call: + client.batch_embed_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.BatchEmbedTextRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_count_text_tokens_empty_call_rest(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.count_text_tokens), "__call__" + ) as call: + client.count_text_tokens(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = text_service.CountTextTokensRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TextServiceGrpcTransport, + ) + + +def test_text_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TextServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_text_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.ai.generativelanguage_v1alpha.services.text_service.transports.TextServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.TextServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "generate_text", + "embed_text", + "batch_embed_text", + "count_text_tokens", + "get_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_text_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.ai.generativelanguage_v1alpha.services.text_service.transports.TextServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_text_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.ai.generativelanguage_v1alpha.services.text_service.transports.TextServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TextServiceTransport() + adc.assert_called_once() + + +def test_text_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TextServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + ], +) +def test_text_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=(), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TextServiceGrpcTransport, + transports.TextServiceGrpcAsyncIOTransport, + transports.TextServiceRestTransport, + ], +) +def test_text_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TextServiceGrpcTransport, grpc_helpers), + (transports.TextServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_text_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "generativelanguage.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=(), + scopes=["1", "2"], + default_host="generativelanguage.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_text_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.TextServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_service_host_no_port(transport_name): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_text_service_host_with_port(transport_name): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="generativelanguage.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "generativelanguage.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://generativelanguage.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_text_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TextServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TextServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_text._session + session2 = client2.transport.generate_text._session + assert session1 != session2 + session1 = client1.transport.embed_text._session + session2 = client2.transport.embed_text._session + assert session1 != session2 + session1 = client1.transport.batch_embed_text._session + session2 = client2.transport.batch_embed_text._session + assert session1 != session2 + session1 = client1.transport.count_text_tokens._session + session2 = client2.transport.count_text_tokens._session + assert session1 != session2 + + +def test_text_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_text_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TextServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.TextServiceGrpcTransport, transports.TextServiceGrpcAsyncIOTransport], +) +def test_text_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_model_path(): + model = "squid" + expected = "models/{model}".format( + model=model, + ) + actual = TextServiceClient.model_path(model) + assert expected == actual + + +def test_parse_model_path(): + expected = { + "model": "clam", + } + path = TextServiceClient.model_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_model_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = TextServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = TextServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = TextServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = TextServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = TextServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = TextServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = TextServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = TextServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = TextServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = TextServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TextServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.TextServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.TextServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = TextServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = TextServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = TextServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TextServiceClient, transports.TextServiceGrpcTransport), + (TextServiceAsyncClient, transports.TextServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py index 94bb3702eed2..ba03dc928f5f 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_cache_service.py @@ -70,6 +70,13 @@ from google.ai.generativelanguage_v1beta.types import cached_content from google.ai.generativelanguage_v1beta.types import content +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CacheServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CacheServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4077,10 +4127,14 @@ def test_list_cached_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CacheServiceRestInterceptor, "post_list_cached_contents" ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, + "post_list_cached_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CacheServiceRestInterceptor, "pre_list_cached_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cache_service.ListCachedContentsRequest.pb( cache_service.ListCachedContentsRequest() ) @@ -4106,6 +4160,10 @@ def test_list_cached_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cache_service.ListCachedContentsResponse() + post_with_metadata.return_value = ( + cache_service.ListCachedContentsResponse(), + metadata, + ) client.list_cached_contents( request, @@ -4117,6 +4175,7 @@ def test_list_cached_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cached_content_rest_bad_request( @@ -4172,8 +4231,16 @@ def test_create_cached_content_rest_call_success(request_type): "mime_type": "mime_type_value", "data": b"data_blob", }, - "function_call": {"name": "name_value", "args": {"fields": {}}}, - "function_response": {"name": "name_value", "response": {}}, + "function_call": { + "id": "id_value", + "name": "name_value", + "args": {"fields": {}}, + }, + "function_response": { + "id": "id_value", + "name": "name_value", + "response": {}, + }, "file_data": { "mime_type": "mime_type_value", "file_uri": "file_uri_value", @@ -4203,12 +4270,14 @@ def test_create_cached_content_rest_call_success(request_type): "properties": {}, "required": ["required_value1", "required_value2"], }, + "response": {}, } ], "google_search_retrieval": { "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} }, "code_execution": {}, + "google_search": {}, } ], "tool_config": { @@ -4338,10 +4407,14 @@ def test_create_cached_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CacheServiceRestInterceptor, "post_create_cached_content" ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, + "post_create_cached_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CacheServiceRestInterceptor, "pre_create_cached_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cache_service.CreateCachedContentRequest.pb( cache_service.CreateCachedContentRequest() ) @@ -4367,6 +4440,7 @@ def test_create_cached_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_cached_content.CachedContent() + post_with_metadata.return_value = gag_cached_content.CachedContent(), metadata client.create_cached_content( request, @@ -4378,6 +4452,7 @@ def test_create_cached_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cached_content_rest_bad_request( @@ -4466,10 +4541,13 @@ def test_get_cached_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CacheServiceRestInterceptor, "post_get_cached_content" ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, "post_get_cached_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CacheServiceRestInterceptor, "pre_get_cached_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cache_service.GetCachedContentRequest.pb( cache_service.GetCachedContentRequest() ) @@ -4495,6 +4573,7 @@ def test_get_cached_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cached_content.CachedContent() + post_with_metadata.return_value = cached_content.CachedContent(), metadata client.get_cached_content( request, @@ -4506,6 +4585,7 @@ def test_get_cached_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cached_content_rest_bad_request( @@ -4561,8 +4641,16 @@ def test_update_cached_content_rest_call_success(request_type): "mime_type": "mime_type_value", "data": b"data_blob", }, - "function_call": {"name": "name_value", "args": {"fields": {}}}, - "function_response": {"name": "name_value", "response": {}}, + "function_call": { + "id": "id_value", + "name": "name_value", + "args": {"fields": {}}, + }, + "function_response": { + "id": "id_value", + "name": "name_value", + "response": {}, + }, "file_data": { "mime_type": "mime_type_value", "file_uri": "file_uri_value", @@ -4592,12 +4680,14 @@ def test_update_cached_content_rest_call_success(request_type): "properties": {}, "required": ["required_value1", "required_value2"], }, + "response": {}, } ], "google_search_retrieval": { "dynamic_retrieval_config": {"mode": 1, "dynamic_threshold": 0.1809} }, "code_execution": {}, + "google_search": {}, } ], "tool_config": { @@ -4727,10 +4817,14 @@ def test_update_cached_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CacheServiceRestInterceptor, "post_update_cached_content" ) as post, mock.patch.object( + transports.CacheServiceRestInterceptor, + "post_update_cached_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CacheServiceRestInterceptor, "pre_update_cached_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cache_service.UpdateCachedContentRequest.pb( cache_service.UpdateCachedContentRequest() ) @@ -4756,6 +4850,7 @@ def test_update_cached_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_cached_content.CachedContent() + post_with_metadata.return_value = gag_cached_content.CachedContent(), metadata client.update_cached_content( request, @@ -4767,6 +4862,7 @@ def test_update_cached_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cached_content_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py index 0f0e41c98c2c..660d8105cd8c 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_discuss_service.py @@ -60,6 +60,13 @@ ) from google.ai.generativelanguage_v1beta.types import citation, discuss_service, safety +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2467,10 +2517,13 @@ def test_generate_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiscussServiceRestInterceptor, "post_generate_message" ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DiscussServiceRestInterceptor, "pre_generate_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = discuss_service.GenerateMessageRequest.pb( discuss_service.GenerateMessageRequest() ) @@ -2496,6 +2549,10 @@ def test_generate_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discuss_service.GenerateMessageResponse() + post_with_metadata.return_value = ( + discuss_service.GenerateMessageResponse(), + metadata, + ) client.generate_message( request, @@ -2507,6 +2564,7 @@ def test_generate_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_message_tokens_rest_bad_request( @@ -2591,10 +2649,14 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiscussServiceRestInterceptor, "post_count_message_tokens" ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, + "post_count_message_tokens_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DiscussServiceRestInterceptor, "pre_count_message_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = discuss_service.CountMessageTokensRequest.pb( discuss_service.CountMessageTokensRequest() ) @@ -2620,6 +2682,10 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discuss_service.CountMessageTokensResponse() + post_with_metadata.return_value = ( + discuss_service.CountMessageTokensResponse(), + metadata, + ) client.count_message_tokens( request, @@ -2631,6 +2697,7 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py index dba74cbfa712..3e5a3f5c4f44 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_file_service.py @@ -65,6 +65,13 @@ ) from google.ai.generativelanguage_v1beta.types import file, file_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FileServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FileServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3091,10 +3141,13 @@ def test_create_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FileServiceRestInterceptor, "post_create_file" ) as post, mock.patch.object( + transports.FileServiceRestInterceptor, "post_create_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FileServiceRestInterceptor, "pre_create_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file_service.CreateFileRequest.pb(file_service.CreateFileRequest()) transcode.return_value = { "method": "post", @@ -3118,6 +3171,7 @@ def test_create_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file_service.CreateFileResponse() + post_with_metadata.return_value = file_service.CreateFileResponse(), metadata client.create_file( request, @@ -3129,6 +3183,7 @@ def test_create_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_files_rest_bad_request(request_type=file_service.ListFilesRequest): @@ -3211,10 +3266,13 @@ def test_list_files_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FileServiceRestInterceptor, "post_list_files" ) as post, mock.patch.object( + transports.FileServiceRestInterceptor, "post_list_files_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FileServiceRestInterceptor, "pre_list_files" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file_service.ListFilesRequest.pb(file_service.ListFilesRequest()) transcode.return_value = { "method": "post", @@ -3238,6 +3296,7 @@ def test_list_files_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file_service.ListFilesResponse() + post_with_metadata.return_value = file_service.ListFilesResponse(), metadata client.list_files( request, @@ -3249,6 +3308,7 @@ def test_list_files_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_file_rest_bad_request(request_type=file_service.GetFileRequest): @@ -3343,10 +3403,13 @@ def test_get_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FileServiceRestInterceptor, "post_get_file" ) as post, mock.patch.object( + transports.FileServiceRestInterceptor, "post_get_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FileServiceRestInterceptor, "pre_get_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file_service.GetFileRequest.pb(file_service.GetFileRequest()) transcode.return_value = { "method": "post", @@ -3368,6 +3431,7 @@ def test_get_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file.File() + post_with_metadata.return_value = file.File(), metadata client.get_file( request, @@ -3379,6 +3443,7 @@ def test_get_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_file_rest_bad_request(request_type=file_service.DeleteFileRequest): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py index 6264482575e5..5bf4b88b7033 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_generative_service.py @@ -67,6 +67,13 @@ from google.ai.generativelanguage_v1beta.types import content from google.ai.generativelanguage_v1beta.types import content as gag_content +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GenerativeServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GenerativeServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4831,10 +4881,14 @@ def test_generate_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_generate_content" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_generate_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_generate_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.GenerateContentRequest.pb( generative_service.GenerateContentRequest() ) @@ -4860,6 +4914,10 @@ def test_generate_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.GenerateContentResponse() + post_with_metadata.return_value = ( + generative_service.GenerateContentResponse(), + metadata, + ) client.generate_content( request, @@ -4871,6 +4929,7 @@ def test_generate_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_answer_rest_bad_request( @@ -4955,10 +5014,14 @@ def test_generate_answer_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_generate_answer" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_generate_answer_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_generate_answer" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.GenerateAnswerRequest.pb( generative_service.GenerateAnswerRequest() ) @@ -4984,6 +5047,10 @@ def test_generate_answer_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.GenerateAnswerResponse() + post_with_metadata.return_value = ( + generative_service.GenerateAnswerResponse(), + metadata, + ) client.generate_answer( request, @@ -4995,6 +5062,7 @@ def test_generate_answer_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stream_generate_content_rest_bad_request( @@ -5083,10 +5151,14 @@ def test_stream_generate_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_stream_generate_content" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_stream_generate_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_stream_generate_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.GenerateContentRequest.pb( generative_service.GenerateContentRequest() ) @@ -5112,6 +5184,10 @@ def test_stream_generate_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.GenerateContentResponse() + post_with_metadata.return_value = ( + generative_service.GenerateContentResponse(), + metadata, + ) client.stream_generate_content( request, @@ -5123,6 +5199,7 @@ def test_stream_generate_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_embed_content_rest_bad_request( @@ -5204,10 +5281,13 @@ def test_embed_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_embed_content" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_embed_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_embed_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.EmbedContentRequest.pb( generative_service.EmbedContentRequest() ) @@ -5233,6 +5313,10 @@ def test_embed_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.EmbedContentResponse() + post_with_metadata.return_value = ( + generative_service.EmbedContentResponse(), + metadata, + ) client.embed_content( request, @@ -5244,6 +5328,7 @@ def test_embed_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_embed_contents_rest_bad_request( @@ -5325,10 +5410,14 @@ def test_batch_embed_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_batch_embed_contents" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, + "post_batch_embed_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_batch_embed_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.BatchEmbedContentsRequest.pb( generative_service.BatchEmbedContentsRequest() ) @@ -5354,6 +5443,10 @@ def test_batch_embed_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.BatchEmbedContentsResponse() + post_with_metadata.return_value = ( + generative_service.BatchEmbedContentsResponse(), + metadata, + ) client.batch_embed_contents( request, @@ -5365,6 +5458,7 @@ def test_batch_embed_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_tokens_rest_bad_request( @@ -5451,10 +5545,13 @@ def test_count_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GenerativeServiceRestInterceptor, "post_count_tokens" ) as post, mock.patch.object( + transports.GenerativeServiceRestInterceptor, "post_count_tokens_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GenerativeServiceRestInterceptor, "pre_count_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generative_service.CountTokensRequest.pb( generative_service.CountTokensRequest() ) @@ -5480,6 +5577,10 @@ def test_count_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_service.CountTokensResponse() + post_with_metadata.return_value = ( + generative_service.CountTokensResponse(), + metadata, + ) client.count_tokens( request, @@ -5491,6 +5592,7 @@ def test_count_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py index 0a325329ebe4..13fe4958a472 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_model_service.py @@ -74,6 +74,13 @@ from google.ai.generativelanguage_v1beta.types import model, model_service from google.ai.generativelanguage_v1beta.types import tuned_model +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5576,10 +5626,13 @@ def test_get_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_get_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_get_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) transcode.return_value = { "method": "post", @@ -5601,6 +5654,7 @@ def test_get_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata client.get_model( request, @@ -5612,6 +5666,7 @@ def test_get_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_models_rest_bad_request(request_type=model_service.ListModelsRequest): @@ -5694,10 +5749,13 @@ def test_list_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_list_models" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_list_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.ListModelsRequest.pb( model_service.ListModelsRequest() ) @@ -5723,6 +5781,7 @@ def test_list_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_service.ListModelsResponse() + post_with_metadata.return_value = model_service.ListModelsResponse(), metadata client.list_models( request, @@ -5734,6 +5793,7 @@ def test_list_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_tuned_model_rest_bad_request( @@ -5833,10 +5893,13 @@ def test_get_tuned_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_get_tuned_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_get_tuned_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.GetTunedModelRequest.pb( model_service.GetTunedModelRequest() ) @@ -5860,6 +5923,7 @@ def test_get_tuned_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tuned_model.TunedModel() + post_with_metadata.return_value = tuned_model.TunedModel(), metadata client.get_tuned_model( request, @@ -5871,6 +5935,7 @@ def test_get_tuned_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tuned_models_rest_bad_request( @@ -5955,10 +6020,13 @@ def test_list_tuned_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_list_tuned_models" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_tuned_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_list_tuned_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.ListTunedModelsRequest.pb( model_service.ListTunedModelsRequest() ) @@ -5984,6 +6052,10 @@ def test_list_tuned_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_service.ListTunedModelsResponse() + post_with_metadata.return_value = ( + model_service.ListTunedModelsResponse(), + metadata, + ) client.list_tuned_models( request, @@ -5995,6 +6067,7 @@ def test_list_tuned_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_tuned_model_rest_bad_request( @@ -6179,10 +6252,13 @@ def test_create_tuned_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ModelServiceRestInterceptor, "post_create_tuned_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_create_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_create_tuned_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.CreateTunedModelRequest.pb( model_service.CreateTunedModelRequest() ) @@ -6206,6 +6282,7 @@ def test_create_tuned_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_tuned_model( request, @@ -6217,6 +6294,7 @@ def test_create_tuned_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_tuned_model_rest_bad_request( @@ -6420,10 +6498,13 @@ def test_update_tuned_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_update_tuned_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_update_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_update_tuned_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.UpdateTunedModelRequest.pb( model_service.UpdateTunedModelRequest() ) @@ -6447,6 +6528,7 @@ def test_update_tuned_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_tuned_model.TunedModel() + post_with_metadata.return_value = gag_tuned_model.TunedModel(), metadata client.update_tuned_model( request, @@ -6458,6 +6540,7 @@ def test_update_tuned_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_tuned_model_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py index 5d1db613de20..3736995c2bb8 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_permission_service.py @@ -64,6 +64,13 @@ from google.ai.generativelanguage_v1beta.types import permission from google.ai.generativelanguage_v1beta.types import permission_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PermissionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PermissionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4980,10 +5030,14 @@ def test_create_permission_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_create_permission" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_create_permission_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_create_permission" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.CreatePermissionRequest.pb( permission_service.CreatePermissionRequest() ) @@ -5007,6 +5061,7 @@ def test_create_permission_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_permission.Permission() + post_with_metadata.return_value = gag_permission.Permission(), metadata client.create_permission( request, @@ -5018,6 +5073,7 @@ def test_create_permission_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_permission_rest_bad_request( @@ -5108,10 +5164,13 @@ def test_get_permission_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_get_permission" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_get_permission_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_get_permission" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.GetPermissionRequest.pb( permission_service.GetPermissionRequest() ) @@ -5135,6 +5194,7 @@ def test_get_permission_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = permission.Permission() + post_with_metadata.return_value = permission.Permission(), metadata client.get_permission( request, @@ -5146,6 +5206,7 @@ def test_get_permission_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_permissions_rest_bad_request( @@ -5230,10 +5291,14 @@ def test_list_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_list_permissions" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_list_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_list_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.ListPermissionsRequest.pb( permission_service.ListPermissionsRequest() ) @@ -5259,6 +5324,10 @@ def test_list_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = permission_service.ListPermissionsResponse() + post_with_metadata.return_value = ( + permission_service.ListPermissionsResponse(), + metadata, + ) client.list_permissions( request, @@ -5270,6 +5339,7 @@ def test_list_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_permission_rest_bad_request( @@ -5433,10 +5503,14 @@ def test_update_permission_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_update_permission" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_update_permission_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_update_permission" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.UpdatePermissionRequest.pb( permission_service.UpdatePermissionRequest() ) @@ -5460,6 +5534,7 @@ def test_update_permission_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_permission.Permission() + post_with_metadata.return_value = gag_permission.Permission(), metadata client.update_permission( request, @@ -5471,6 +5546,7 @@ def test_update_permission_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_permission_rest_bad_request( @@ -5661,10 +5737,14 @@ def test_transfer_ownership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_transfer_ownership" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_transfer_ownership_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_transfer_ownership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.TransferOwnershipRequest.pb( permission_service.TransferOwnershipRequest() ) @@ -5690,6 +5770,10 @@ def test_transfer_ownership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = permission_service.TransferOwnershipResponse() + post_with_metadata.return_value = ( + permission_service.TransferOwnershipResponse(), + metadata, + ) client.transfer_ownership( request, @@ -5701,6 +5785,7 @@ def test_transfer_ownership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py index 2da4cddf7b82..d2054f044d62 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_prediction_service.py @@ -61,6 +61,13 @@ ) from google.ai.generativelanguage_v1beta.types import prediction_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1859,10 +1909,13 @@ def test_predict_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PredictionServiceRestInterceptor, "post_predict" ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PredictionServiceRestInterceptor, "pre_predict" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = prediction_service.PredictRequest.pb( prediction_service.PredictRequest() ) @@ -1888,6 +1941,7 @@ def test_predict_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = prediction_service.PredictResponse() + post_with_metadata.return_value = prediction_service.PredictResponse(), metadata client.predict( request, @@ -1899,6 +1953,7 @@ def test_predict_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py index 4ecb0a8c1b04..1fcaccedcbe8 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_retriever_service.py @@ -63,6 +63,13 @@ ) from google.ai.generativelanguage_v1beta.types import retriever, retriever_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -322,6 +329,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RetrieverServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RetrieverServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12157,10 +12207,13 @@ def test_create_corpus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_create_corpus" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_create_corpus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.CreateCorpusRequest.pb( retriever_service.CreateCorpusRequest() ) @@ -12184,6 +12237,7 @@ def test_create_corpus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Corpus() + post_with_metadata.return_value = retriever.Corpus(), metadata client.create_corpus( request, @@ -12195,6 +12249,7 @@ def test_create_corpus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_corpus_rest_bad_request(request_type=retriever_service.GetCorpusRequest): @@ -12279,10 +12334,13 @@ def test_get_corpus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_get_corpus" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_get_corpus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.GetCorpusRequest.pb( retriever_service.GetCorpusRequest() ) @@ -12306,6 +12364,7 @@ def test_get_corpus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Corpus() + post_with_metadata.return_value = retriever.Corpus(), metadata client.get_corpus( request, @@ -12317,6 +12376,7 @@ def test_get_corpus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_corpus_rest_bad_request( @@ -12476,10 +12536,13 @@ def test_update_corpus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_update_corpus" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_update_corpus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.UpdateCorpusRequest.pb( retriever_service.UpdateCorpusRequest() ) @@ -12503,6 +12566,7 @@ def test_update_corpus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Corpus() + post_with_metadata.return_value = retriever.Corpus(), metadata client.update_corpus( request, @@ -12514,6 +12578,7 @@ def test_update_corpus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_corpus_rest_bad_request( @@ -12707,10 +12772,13 @@ def test_list_corpora_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_list_corpora" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_corpora_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_list_corpora" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.ListCorporaRequest.pb( retriever_service.ListCorporaRequest() ) @@ -12736,6 +12804,10 @@ def test_list_corpora_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.ListCorporaResponse() + post_with_metadata.return_value = ( + retriever_service.ListCorporaResponse(), + metadata, + ) client.list_corpora( request, @@ -12747,6 +12819,7 @@ def test_list_corpora_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_corpus_rest_bad_request( @@ -12828,10 +12901,13 @@ def test_query_corpus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_query_corpus" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_corpus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_query_corpus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.QueryCorpusRequest.pb( retriever_service.QueryCorpusRequest() ) @@ -12857,6 +12933,10 @@ def test_query_corpus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.QueryCorpusResponse() + post_with_metadata.return_value = ( + retriever_service.QueryCorpusResponse(), + metadata, + ) client.query_corpus( request, @@ -12868,6 +12948,7 @@ def test_query_corpus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request( @@ -13035,10 +13116,13 @@ def test_create_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.CreateDocumentRequest.pb( retriever_service.CreateDocumentRequest() ) @@ -13062,6 +13146,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Document() + post_with_metadata.return_value = retriever.Document(), metadata client.create_document( request, @@ -13073,6 +13158,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_rest_bad_request( @@ -13159,10 +13245,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.GetDocumentRequest.pb( retriever_service.GetDocumentRequest() ) @@ -13186,6 +13275,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Document() + post_with_metadata.return_value = retriever.Document(), metadata client.get_document( request, @@ -13197,6 +13287,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -13364,10 +13455,13 @@ def test_update_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.UpdateDocumentRequest.pb( retriever_service.UpdateDocumentRequest() ) @@ -13391,6 +13485,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Document() + post_with_metadata.return_value = retriever.Document(), metadata client.update_document( request, @@ -13402,6 +13497,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request( @@ -13595,10 +13691,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.ListDocumentsRequest.pb( retriever_service.ListDocumentsRequest() ) @@ -13624,6 +13723,10 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.ListDocumentsResponse() + post_with_metadata.return_value = ( + retriever_service.ListDocumentsResponse(), + metadata, + ) client.list_documents( request, @@ -13635,6 +13738,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_document_rest_bad_request( @@ -13716,10 +13820,13 @@ def test_query_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_query_document" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_query_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_query_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.QueryDocumentRequest.pb( retriever_service.QueryDocumentRequest() ) @@ -13745,6 +13852,10 @@ def test_query_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.QueryDocumentResponse() + post_with_metadata.return_value = ( + retriever_service.QueryDocumentResponse(), + metadata, + ) client.query_document( request, @@ -13756,6 +13867,7 @@ def test_query_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_chunk_rest_bad_request( @@ -13924,10 +14036,13 @@ def test_create_chunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_create_chunk" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_create_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_create_chunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.CreateChunkRequest.pb( retriever_service.CreateChunkRequest() ) @@ -13951,6 +14066,7 @@ def test_create_chunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Chunk() + post_with_metadata.return_value = retriever.Chunk(), metadata client.create_chunk( request, @@ -13962,6 +14078,7 @@ def test_create_chunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_chunks_rest_bad_request( @@ -14043,10 +14160,14 @@ def test_batch_create_chunks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_batch_create_chunks" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, + "post_batch_create_chunks_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_batch_create_chunks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.BatchCreateChunksRequest.pb( retriever_service.BatchCreateChunksRequest() ) @@ -14072,6 +14193,10 @@ def test_batch_create_chunks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.BatchCreateChunksResponse() + post_with_metadata.return_value = ( + retriever_service.BatchCreateChunksResponse(), + metadata, + ) client.batch_create_chunks( request, @@ -14083,6 +14208,7 @@ def test_batch_create_chunks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_chunk_rest_bad_request(request_type=retriever_service.GetChunkRequest): @@ -14167,10 +14293,13 @@ def test_get_chunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_get_chunk" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_get_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_get_chunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.GetChunkRequest.pb( retriever_service.GetChunkRequest() ) @@ -14194,6 +14323,7 @@ def test_get_chunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Chunk() + post_with_metadata.return_value = retriever.Chunk(), metadata client.get_chunk( request, @@ -14205,6 +14335,7 @@ def test_get_chunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_chunk_rest_bad_request( @@ -14377,10 +14508,13 @@ def test_update_chunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_update_chunk" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_update_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_update_chunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.UpdateChunkRequest.pb( retriever_service.UpdateChunkRequest() ) @@ -14404,6 +14538,7 @@ def test_update_chunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever.Chunk() + post_with_metadata.return_value = retriever.Chunk(), metadata client.update_chunk( request, @@ -14415,6 +14550,7 @@ def test_update_chunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_chunks_rest_bad_request( @@ -14496,10 +14632,14 @@ def test_batch_update_chunks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_batch_update_chunks" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, + "post_batch_update_chunks_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_batch_update_chunks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.BatchUpdateChunksRequest.pb( retriever_service.BatchUpdateChunksRequest() ) @@ -14525,6 +14665,10 @@ def test_batch_update_chunks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.BatchUpdateChunksResponse() + post_with_metadata.return_value = ( + retriever_service.BatchUpdateChunksResponse(), + metadata, + ) client.batch_update_chunks( request, @@ -14536,6 +14680,7 @@ def test_batch_update_chunks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_chunk_rest_bad_request( @@ -14836,10 +14981,13 @@ def test_list_chunks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RetrieverServiceRestInterceptor, "post_list_chunks" ) as post, mock.patch.object( + transports.RetrieverServiceRestInterceptor, "post_list_chunks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RetrieverServiceRestInterceptor, "pre_list_chunks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = retriever_service.ListChunksRequest.pb( retriever_service.ListChunksRequest() ) @@ -14865,6 +15013,10 @@ def test_list_chunks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = retriever_service.ListChunksResponse() + post_with_metadata.return_value = ( + retriever_service.ListChunksResponse(), + metadata, + ) client.list_chunks( request, @@ -14876,6 +15028,7 @@ def test_list_chunks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py index f4c3411358f8..53e5ff4d544e 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta/test_text_service.py @@ -60,6 +60,13 @@ ) from google.ai.generativelanguage_v1beta.types import safety, text_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3537,10 +3587,13 @@ def test_generate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_generate_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_generate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.GenerateTextRequest.pb( text_service.GenerateTextRequest() ) @@ -3566,6 +3619,7 @@ def test_generate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.GenerateTextResponse() + post_with_metadata.return_value = text_service.GenerateTextResponse(), metadata client.generate_text( request, @@ -3577,6 +3631,7 @@ def test_generate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_embed_text_rest_bad_request(request_type=text_service.EmbedTextRequest): @@ -3656,10 +3711,13 @@ def test_embed_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_embed_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_embed_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.EmbedTextRequest.pb(text_service.EmbedTextRequest()) transcode.return_value = { "method": "post", @@ -3683,6 +3741,7 @@ def test_embed_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.EmbedTextResponse() + post_with_metadata.return_value = text_service.EmbedTextResponse(), metadata client.embed_text( request, @@ -3694,6 +3753,7 @@ def test_embed_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_embed_text_rest_bad_request( @@ -3775,10 +3835,13 @@ def test_batch_embed_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_batch_embed_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_batch_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_batch_embed_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.BatchEmbedTextRequest.pb( text_service.BatchEmbedTextRequest() ) @@ -3804,6 +3867,10 @@ def test_batch_embed_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.BatchEmbedTextResponse() + post_with_metadata.return_value = ( + text_service.BatchEmbedTextResponse(), + metadata, + ) client.batch_embed_text( request, @@ -3815,6 +3882,7 @@ def test_batch_embed_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_text_tokens_rest_bad_request( @@ -3899,10 +3967,13 @@ def test_count_text_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_count_text_tokens" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_count_text_tokens_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_count_text_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.CountTextTokensRequest.pb( text_service.CountTextTokensRequest() ) @@ -3928,6 +3999,10 @@ def test_count_text_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.CountTextTokensResponse() + post_with_metadata.return_value = ( + text_service.CountTextTokensResponse(), + metadata, + ) client.count_text_tokens( request, @@ -3939,6 +4014,7 @@ def test_count_text_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py index ae755c101e55..d2d2b1d5372a 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_discuss_service.py @@ -59,6 +59,13 @@ ) from google.ai.generativelanguage_v1beta2.types import citation, discuss_service, safety +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2466,10 +2516,13 @@ def test_generate_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiscussServiceRestInterceptor, "post_generate_message" ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DiscussServiceRestInterceptor, "pre_generate_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = discuss_service.GenerateMessageRequest.pb( discuss_service.GenerateMessageRequest() ) @@ -2495,6 +2548,10 @@ def test_generate_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discuss_service.GenerateMessageResponse() + post_with_metadata.return_value = ( + discuss_service.GenerateMessageResponse(), + metadata, + ) client.generate_message( request, @@ -2506,6 +2563,7 @@ def test_generate_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_message_tokens_rest_bad_request( @@ -2590,10 +2648,14 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiscussServiceRestInterceptor, "post_count_message_tokens" ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, + "post_count_message_tokens_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DiscussServiceRestInterceptor, "pre_count_message_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = discuss_service.CountMessageTokensRequest.pb( discuss_service.CountMessageTokensRequest() ) @@ -2619,6 +2681,10 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discuss_service.CountMessageTokensResponse() + post_with_metadata.return_value = ( + discuss_service.CountMessageTokensResponse(), + metadata, + ) client.count_message_tokens( request, @@ -2630,6 +2696,7 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py index 344c9b785904..6fd7eff500ef 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_model_service.py @@ -60,6 +60,13 @@ ) from google.ai.generativelanguage_v1beta2.types import model, model_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -303,6 +310,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2525,10 +2575,13 @@ def test_get_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_get_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_get_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) transcode.return_value = { "method": "post", @@ -2550,6 +2603,7 @@ def test_get_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata client.get_model( request, @@ -2561,6 +2615,7 @@ def test_get_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_models_rest_bad_request(request_type=model_service.ListModelsRequest): @@ -2643,10 +2698,13 @@ def test_list_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_list_models" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_list_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.ListModelsRequest.pb( model_service.ListModelsRequest() ) @@ -2672,6 +2730,7 @@ def test_list_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_service.ListModelsResponse() + post_with_metadata.return_value = model_service.ListModelsResponse(), metadata client.list_models( request, @@ -2683,6 +2742,7 @@ def test_list_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py index d0e3e74b6eb9..f372edf9581e 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta2/test_text_service.py @@ -59,6 +59,13 @@ ) from google.ai.generativelanguage_v1beta2.types import safety, text_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2400,10 +2450,13 @@ def test_generate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_generate_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_generate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.GenerateTextRequest.pb( text_service.GenerateTextRequest() ) @@ -2429,6 +2482,7 @@ def test_generate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.GenerateTextResponse() + post_with_metadata.return_value = text_service.GenerateTextResponse(), metadata client.generate_text( request, @@ -2440,6 +2494,7 @@ def test_generate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_embed_text_rest_bad_request(request_type=text_service.EmbedTextRequest): @@ -2519,10 +2574,13 @@ def test_embed_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_embed_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_embed_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.EmbedTextRequest.pb(text_service.EmbedTextRequest()) transcode.return_value = { "method": "post", @@ -2546,6 +2604,7 @@ def test_embed_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.EmbedTextResponse() + post_with_metadata.return_value = text_service.EmbedTextResponse(), metadata client.embed_text( request, @@ -2557,6 +2616,7 @@ def test_embed_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py index b0e1f7beddc8..6d167a96bebe 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_discuss_service.py @@ -60,6 +60,13 @@ ) from google.ai.generativelanguage_v1beta3.types import citation, discuss_service, safety +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DiscussServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2467,10 +2517,13 @@ def test_generate_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiscussServiceRestInterceptor, "post_generate_message" ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, "post_generate_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DiscussServiceRestInterceptor, "pre_generate_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = discuss_service.GenerateMessageRequest.pb( discuss_service.GenerateMessageRequest() ) @@ -2496,6 +2549,10 @@ def test_generate_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discuss_service.GenerateMessageResponse() + post_with_metadata.return_value = ( + discuss_service.GenerateMessageResponse(), + metadata, + ) client.generate_message( request, @@ -2507,6 +2564,7 @@ def test_generate_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_message_tokens_rest_bad_request( @@ -2591,10 +2649,14 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiscussServiceRestInterceptor, "post_count_message_tokens" ) as post, mock.patch.object( + transports.DiscussServiceRestInterceptor, + "post_count_message_tokens_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DiscussServiceRestInterceptor, "pre_count_message_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = discuss_service.CountMessageTokensRequest.pb( discuss_service.CountMessageTokensRequest() ) @@ -2620,6 +2682,10 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discuss_service.CountMessageTokensResponse() + post_with_metadata.return_value = ( + discuss_service.CountMessageTokensResponse(), + metadata, + ) client.count_message_tokens( request, @@ -2631,6 +2697,7 @@ def test_count_message_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py index 18414da7feef..bc516fe51427 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_model_service.py @@ -74,6 +74,13 @@ from google.ai.generativelanguage_v1beta3.types import model, model_service from google.ai.generativelanguage_v1beta3.types import tuned_model +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ModelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5565,10 +5615,13 @@ def test_get_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_get_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_get_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.GetModelRequest.pb(model_service.GetModelRequest()) transcode.return_value = { "method": "post", @@ -5590,6 +5643,7 @@ def test_get_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata client.get_model( request, @@ -5601,6 +5655,7 @@ def test_get_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_models_rest_bad_request(request_type=model_service.ListModelsRequest): @@ -5683,10 +5738,13 @@ def test_list_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_list_models" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_list_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.ListModelsRequest.pb( model_service.ListModelsRequest() ) @@ -5712,6 +5770,7 @@ def test_list_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_service.ListModelsResponse() + post_with_metadata.return_value = model_service.ListModelsResponse(), metadata client.list_models( request, @@ -5723,6 +5782,7 @@ def test_list_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_tuned_model_rest_bad_request( @@ -5820,10 +5880,13 @@ def test_get_tuned_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_get_tuned_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_get_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_get_tuned_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.GetTunedModelRequest.pb( model_service.GetTunedModelRequest() ) @@ -5847,6 +5910,7 @@ def test_get_tuned_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tuned_model.TunedModel() + post_with_metadata.return_value = tuned_model.TunedModel(), metadata client.get_tuned_model( request, @@ -5858,6 +5922,7 @@ def test_get_tuned_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tuned_models_rest_bad_request( @@ -5942,10 +6007,13 @@ def test_list_tuned_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_list_tuned_models" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_list_tuned_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_list_tuned_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.ListTunedModelsRequest.pb( model_service.ListTunedModelsRequest() ) @@ -5971,6 +6039,10 @@ def test_list_tuned_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_service.ListTunedModelsResponse() + post_with_metadata.return_value = ( + model_service.ListTunedModelsResponse(), + metadata, + ) client.list_tuned_models( request, @@ -5982,6 +6054,7 @@ def test_list_tuned_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_tuned_model_rest_bad_request( @@ -6164,10 +6237,13 @@ def test_create_tuned_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ModelServiceRestInterceptor, "post_create_tuned_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_create_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_create_tuned_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.CreateTunedModelRequest.pb( model_service.CreateTunedModelRequest() ) @@ -6191,6 +6267,7 @@ def test_create_tuned_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_tuned_model( request, @@ -6202,6 +6279,7 @@ def test_create_tuned_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_tuned_model_rest_bad_request( @@ -6401,10 +6479,13 @@ def test_update_tuned_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ModelServiceRestInterceptor, "post_update_tuned_model" ) as post, mock.patch.object( + transports.ModelServiceRestInterceptor, "post_update_tuned_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ModelServiceRestInterceptor, "pre_update_tuned_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = model_service.UpdateTunedModelRequest.pb( model_service.UpdateTunedModelRequest() ) @@ -6428,6 +6509,7 @@ def test_update_tuned_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_tuned_model.TunedModel() + post_with_metadata.return_value = gag_tuned_model.TunedModel(), metadata client.update_tuned_model( request, @@ -6439,6 +6521,7 @@ def test_update_tuned_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_tuned_model_rest_bad_request( diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py index b10783b57a84..20a5c4f3311a 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_permission_service.py @@ -64,6 +64,13 @@ from google.ai.generativelanguage_v1beta3.types import permission from google.ai.generativelanguage_v1beta3.types import permission_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PermissionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PermissionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4980,10 +5030,14 @@ def test_create_permission_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_create_permission" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_create_permission_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_create_permission" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.CreatePermissionRequest.pb( permission_service.CreatePermissionRequest() ) @@ -5007,6 +5061,7 @@ def test_create_permission_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_permission.Permission() + post_with_metadata.return_value = gag_permission.Permission(), metadata client.create_permission( request, @@ -5018,6 +5073,7 @@ def test_create_permission_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_permission_rest_bad_request( @@ -5108,10 +5164,13 @@ def test_get_permission_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_get_permission" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, "post_get_permission_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_get_permission" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.GetPermissionRequest.pb( permission_service.GetPermissionRequest() ) @@ -5135,6 +5194,7 @@ def test_get_permission_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = permission.Permission() + post_with_metadata.return_value = permission.Permission(), metadata client.get_permission( request, @@ -5146,6 +5206,7 @@ def test_get_permission_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_permissions_rest_bad_request( @@ -5230,10 +5291,14 @@ def test_list_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_list_permissions" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_list_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_list_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.ListPermissionsRequest.pb( permission_service.ListPermissionsRequest() ) @@ -5259,6 +5324,10 @@ def test_list_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = permission_service.ListPermissionsResponse() + post_with_metadata.return_value = ( + permission_service.ListPermissionsResponse(), + metadata, + ) client.list_permissions( request, @@ -5270,6 +5339,7 @@ def test_list_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_permission_rest_bad_request( @@ -5433,10 +5503,14 @@ def test_update_permission_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_update_permission" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_update_permission_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_update_permission" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.UpdatePermissionRequest.pb( permission_service.UpdatePermissionRequest() ) @@ -5460,6 +5534,7 @@ def test_update_permission_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gag_permission.Permission() + post_with_metadata.return_value = gag_permission.Permission(), metadata client.update_permission( request, @@ -5471,6 +5546,7 @@ def test_update_permission_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_permission_rest_bad_request( @@ -5661,10 +5737,14 @@ def test_transfer_ownership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PermissionServiceRestInterceptor, "post_transfer_ownership" ) as post, mock.patch.object( + transports.PermissionServiceRestInterceptor, + "post_transfer_ownership_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PermissionServiceRestInterceptor, "pre_transfer_ownership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = permission_service.TransferOwnershipRequest.pb( permission_service.TransferOwnershipRequest() ) @@ -5690,6 +5770,10 @@ def test_transfer_ownership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = permission_service.TransferOwnershipResponse() + post_with_metadata.return_value = ( + permission_service.TransferOwnershipResponse(), + metadata, + ) client.transfer_ownership( request, @@ -5701,6 +5785,7 @@ def test_transfer_ownership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py index 17dce0231d24..060cd7bb739a 100644 --- a/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py +++ b/packages/google-ai-generativelanguage/tests/unit/gapic/generativelanguage_v1beta3/test_text_service.py @@ -60,6 +60,13 @@ ) from google.ai.generativelanguage_v1beta3.types import safety, text_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TextServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3561,10 +3611,13 @@ def test_generate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_generate_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_generate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_generate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.GenerateTextRequest.pb( text_service.GenerateTextRequest() ) @@ -3590,6 +3643,7 @@ def test_generate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.GenerateTextResponse() + post_with_metadata.return_value = text_service.GenerateTextResponse(), metadata client.generate_text( request, @@ -3601,6 +3655,7 @@ def test_generate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_embed_text_rest_bad_request(request_type=text_service.EmbedTextRequest): @@ -3680,10 +3735,13 @@ def test_embed_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_embed_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_embed_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.EmbedTextRequest.pb(text_service.EmbedTextRequest()) transcode.return_value = { "method": "post", @@ -3707,6 +3765,7 @@ def test_embed_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.EmbedTextResponse() + post_with_metadata.return_value = text_service.EmbedTextResponse(), metadata client.embed_text( request, @@ -3718,6 +3777,7 @@ def test_embed_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_embed_text_rest_bad_request( @@ -3799,10 +3859,13 @@ def test_batch_embed_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_batch_embed_text" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_batch_embed_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_batch_embed_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.BatchEmbedTextRequest.pb( text_service.BatchEmbedTextRequest() ) @@ -3828,6 +3891,10 @@ def test_batch_embed_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.BatchEmbedTextResponse() + post_with_metadata.return_value = ( + text_service.BatchEmbedTextResponse(), + metadata, + ) client.batch_embed_text( request, @@ -3839,6 +3906,7 @@ def test_batch_embed_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_count_text_tokens_rest_bad_request( @@ -3923,10 +3991,13 @@ def test_count_text_tokens_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TextServiceRestInterceptor, "post_count_text_tokens" ) as post, mock.patch.object( + transports.TextServiceRestInterceptor, "post_count_text_tokens_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TextServiceRestInterceptor, "pre_count_text_tokens" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = text_service.CountTextTokensRequest.pb( text_service.CountTextTokensRequest() ) @@ -3952,6 +4023,10 @@ def test_count_text_tokens_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = text_service.CountTextTokensResponse() + post_with_metadata.return_value = ( + text_service.CountTextTokensResponse(), + metadata, + ) client.count_text_tokens( request, @@ -3963,6 +4038,7 @@ def test_count_text_tokens_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-analytics-admin/CHANGELOG.md b/packages/google-analytics-admin/CHANGELOG.md index 546e34387f20..f3b288235611 100644 --- a/packages/google-analytics-admin/CHANGELOG.md +++ b/packages/google-analytics-admin/CHANGELOG.md @@ -1,5 +1,27 @@ # Changelog +## [0.23.4](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.23.3...google-analytics-admin-v0.23.4) (2025-02-12) + + +### Features + +* add `user_data_retention` field to `DataRetentionSettings` and mark as `REQUIRED` ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) +* add `user_data_retention` field to `DataRetentionSettings` and mark as `REQUIRED` ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) +* Add REST Interceptors which support reading metadata ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) +* Add support for reading selective GAPIC generation methods from service YAML ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) + + +### Bug Fixes + +* mark `event_data_retention` field in `DataRetentionSettings` as `REQUIRED` ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) +* mark `event_data_retention` field in `DataRetentionSettings` as `REQUIRED` ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) + + +### Documentation + +* replace "GA4" with "Google Analytics" or "GA" in all comments ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) +* replace "GA4" with "Google Analytics" or "GA" in all comments ([96aefef](https://github.com/googleapis/google-cloud-python/commit/96aefef02f806d12a2f4c1847a228181ab5b4afa)) + ## [0.23.3](https://github.com/googleapis/google-cloud-python/compare/google-analytics-admin-v0.23.2...google-analytics-admin-v0.23.3) (2024-12-12) diff --git a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py index dd30746d3360..eb79334047e7 100644 --- a/packages/google-analytics-admin/google/analytics/admin/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.3" # {x-release-please-version} +__version__ = "0.23.4" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py index dd30746d3360..eb79334047e7 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.3" # {x-release-please-version} +__version__ = "0.23.4" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py index b3e3c29acd1f..1b6308eb449c 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py @@ -80,7 +80,7 @@ class AnalyticsAdminServiceAsyncClient: - """Service Interface for the Analytics Admin API (GA4).""" + """Service Interface for the Google Analytics Admin API.""" _client: AnalyticsAdminServiceClient @@ -575,7 +575,7 @@ async def list_accounts( ) -> pagers.ListAccountsAsyncPager: r"""Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -956,7 +956,7 @@ async def get_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Lookup for a single "GA4" Property. + r"""Lookup for a single GA Property. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.GetPropertyRequest, dict]]): @@ -979,7 +979,7 @@ async def get_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1038,7 +1038,6 @@ async def list_properties( ) -> pagers.ListPropertiesAsyncPager: r"""Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -1111,8 +1110,8 @@ async def create_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Creates an "GA4" property with the specified location - and attributes. + r"""Creates a Google Analytics property with the + specified location and attributes. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.CreatePropertyRequest, dict]]): @@ -1137,7 +1136,7 @@ async def create_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1201,8 +1200,7 @@ async def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.DeletePropertyRequest, dict]]): @@ -1227,7 +1225,7 @@ async def delete_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1321,7 +1319,7 @@ async def update_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1419,8 +1417,8 @@ async def create_firebase_link( Returns: google.analytics.admin_v1alpha.types.FirebaseLink: - A link between a GA4 property and a - Firebase project. + A link between a Google Analytics + property and a Firebase project. """ # Create or coerce a protobuf request object. @@ -1778,8 +1776,8 @@ async def create_google_ads_link( Returns: google.analytics.admin_v1alpha.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -1873,8 +1871,8 @@ async def update_google_ads_link( Returns: google.analytics.admin_v1alpha.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2199,7 +2197,7 @@ async def get_measurement_protocol_secret( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.MeasurementProtocolSecret: - r"""Lookup for a single "GA4" MeasurementProtocolSecret. + r"""Lookup for a single MeasurementProtocolSecret. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.GetMeasurementProtocolSecretRequest, dict]]): @@ -3235,6 +3233,9 @@ async def search_change_history_events( r"""Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + Args: request (Optional[Union[google.analytics.admin_v1alpha.types.SearchChangeHistoryEventsRequest, dict]]): The request object. Request message for @@ -4456,8 +4457,9 @@ async def get_display_video360_advertiser_link( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ # Create or coerce a protobuf request object. @@ -4658,8 +4660,9 @@ async def create_display_video360_advertiser_link( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ # Create or coerce a protobuf request object. @@ -4845,8 +4848,9 @@ async def update_display_video360_advertiser_link( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ # Create or coerce a protobuf request object. @@ -4948,9 +4952,9 @@ async def get_display_video360_advertiser_link_proposal( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -5164,9 +5168,9 @@ async def create_display_video360_advertiser_link_proposal( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -5431,9 +5435,9 @@ async def cancel_display_video360_advertiser_link_proposal( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -7027,7 +7031,7 @@ async def get_audience( Returns: google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -7213,7 +7217,7 @@ async def create_audience( Returns: google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -7309,7 +7313,7 @@ async def update_audience( Returns: google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -7449,8 +7453,8 @@ async def get_search_ads360_link( Returns: google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -7639,8 +7643,8 @@ async def create_search_ads360_link( Returns: google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -7810,8 +7814,8 @@ async def update_search_ads360_link( Returns: google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -8068,14 +8072,19 @@ async def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + Args: request (Optional[Union[google.analytics.admin_v1alpha.types.RunAccessReportRequest, dict]]): The request object. The request for a Data Access Record @@ -8856,9 +8865,7 @@ async def get_expanded_data_set( Returns: google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -9046,9 +9053,7 @@ async def create_expanded_data_set( Returns: google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -9144,9 +9149,7 @@ async def update_expanded_data_set( Returns: google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -9888,8 +9891,8 @@ async def create_big_query_link( Returns: google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ # Create or coerce a protobuf request object. @@ -9974,8 +9977,8 @@ async def get_big_query_link( Returns: google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ # Create or coerce a protobuf request object. @@ -10242,8 +10245,8 @@ async def update_big_query_link( Returns: google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ # Create or coerce a protobuf request object. @@ -10760,8 +10763,9 @@ async def get_ad_sense_link( Returns: google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + A link between a Google Analytics + property and an AdSense for Content ad + client. """ # Create or coerce a protobuf request object. @@ -10850,8 +10854,9 @@ async def create_ad_sense_link( Returns: google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + A link between a Google Analytics + property and an AdSense for Content ad + client. """ # Create or coerce a protobuf request object. @@ -13299,8 +13304,9 @@ async def create_subproperty_event_filter( Returns: google.analytics.admin_v1alpha.types.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ # Create or coerce a protobuf request object. @@ -13387,8 +13393,9 @@ async def get_subproperty_event_filter( Returns: google.analytics.admin_v1alpha.types.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ # Create or coerce a protobuf request object. @@ -13582,8 +13589,9 @@ async def update_subproperty_event_filter( Returns: google.analytics.admin_v1alpha.types.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ # Create or coerce a protobuf request object. diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py index ccf6bb0fe82c..9ef9e7fc0d40 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -123,7 +125,7 @@ def get_transport_class( class AnalyticsAdminServiceClient(metaclass=AnalyticsAdminServiceClientMeta): - """Service Interface for the Analytics Admin API (GA4).""" + """Service Interface for the Google Analytics Admin API.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -1123,6 +1125,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1413,7 +1442,7 @@ def list_accounts( ) -> pagers.ListAccountsPager: r"""Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -1782,7 +1811,7 @@ def get_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Lookup for a single "GA4" Property. + r"""Lookup for a single GA Property. Args: request (Union[google.analytics.admin_v1alpha.types.GetPropertyRequest, dict]): @@ -1805,7 +1834,7 @@ def get_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1861,7 +1890,6 @@ def list_properties( ) -> pagers.ListPropertiesPager: r"""Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -1932,8 +1960,8 @@ def create_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Creates an "GA4" property with the specified location - and attributes. + r"""Creates a Google Analytics property with the + specified location and attributes. Args: request (Union[google.analytics.admin_v1alpha.types.CreatePropertyRequest, dict]): @@ -1958,7 +1986,7 @@ def create_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -2019,8 +2047,7 @@ def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. Args: request (Union[google.analytics.admin_v1alpha.types.DeletePropertyRequest, dict]): @@ -2045,7 +2072,7 @@ def delete_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -2136,7 +2163,7 @@ def update_property( Returns: google.analytics.admin_v1alpha.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -2231,8 +2258,8 @@ def create_firebase_link( Returns: google.analytics.admin_v1alpha.types.FirebaseLink: - A link between a GA4 property and a - Firebase project. + A link between a Google Analytics + property and a Firebase project. """ # Create or coerce a protobuf request object. @@ -2578,8 +2605,8 @@ def create_google_ads_link( Returns: google.analytics.admin_v1alpha.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2670,8 +2697,8 @@ def update_google_ads_link( Returns: google.analytics.admin_v1alpha.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2986,7 +3013,7 @@ def get_measurement_protocol_secret( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.MeasurementProtocolSecret: - r"""Lookup for a single "GA4" MeasurementProtocolSecret. + r"""Lookup for a single MeasurementProtocolSecret. Args: request (Union[google.analytics.admin_v1alpha.types.GetMeasurementProtocolSecretRequest, dict]): @@ -4012,6 +4039,9 @@ def search_change_history_events( r"""Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + Args: request (Union[google.analytics.admin_v1alpha.types.SearchChangeHistoryEventsRequest, dict]): The request object. Request message for @@ -5201,8 +5231,9 @@ def get_display_video360_advertiser_link( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ # Create or coerce a protobuf request object. @@ -5401,8 +5432,9 @@ def create_display_video360_advertiser_link( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ # Create or coerce a protobuf request object. @@ -5586,8 +5618,9 @@ def update_display_video360_advertiser_link( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ # Create or coerce a protobuf request object. @@ -5688,9 +5721,9 @@ def get_display_video360_advertiser_link_proposal( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -5902,9 +5935,9 @@ def create_display_video360_advertiser_link_proposal( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -6167,9 +6200,9 @@ def cancel_display_video360_advertiser_link_proposal( Returns: google.analytics.admin_v1alpha.types.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -7716,7 +7749,7 @@ def get_audience( Returns: google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -7896,7 +7929,7 @@ def create_audience( Returns: google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -7989,7 +8022,7 @@ def update_audience( Returns: google.analytics.admin_v1alpha.types.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -8124,8 +8157,8 @@ def get_search_ads360_link( Returns: google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -8308,8 +8341,8 @@ def create_search_ads360_link( Returns: google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -8477,8 +8510,8 @@ def update_search_ads360_link( Returns: google.analytics.admin_v1alpha.types.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ # Create or coerce a protobuf request object. @@ -8730,14 +8763,19 @@ def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + Args: request (Union[google.analytics.admin_v1alpha.types.RunAccessReportRequest, dict]): The request object. The request for a Data Access Record @@ -9501,9 +9539,7 @@ def get_expanded_data_set( Returns: google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -9685,9 +9721,7 @@ def create_expanded_data_set( Returns: google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -9780,9 +9814,7 @@ def update_expanded_data_set( Returns: google.analytics.admin_v1alpha.types.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ExpandedDataSet. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -10503,8 +10535,8 @@ def create_big_query_link( Returns: google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ # Create or coerce a protobuf request object. @@ -10586,8 +10618,8 @@ def get_big_query_link( Returns: google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ # Create or coerce a protobuf request object. @@ -10845,8 +10877,8 @@ def update_big_query_link( Returns: google.analytics.admin_v1alpha.types.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ # Create or coerce a protobuf request object. @@ -11356,8 +11388,9 @@ def get_ad_sense_link( Returns: google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + A link between a Google Analytics + property and an AdSense for Content ad + client. """ # Create or coerce a protobuf request object. @@ -11443,8 +11476,9 @@ def create_ad_sense_link( Returns: google.analytics.admin_v1alpha.types.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + A link between a Google Analytics + property and an AdSense for Content ad + client. """ # Create or coerce a protobuf request object. @@ -13826,8 +13860,9 @@ def create_subproperty_event_filter( Returns: google.analytics.admin_v1alpha.types.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ # Create or coerce a protobuf request object. @@ -13913,8 +13948,9 @@ def get_subproperty_event_filter( Returns: google.analytics.admin_v1alpha.types.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ # Create or coerce a protobuf request object. @@ -14106,8 +14142,9 @@ def update_subproperty_event_filter( Returns: google.analytics.admin_v1alpha.types.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ # Create or coerce a protobuf request object. diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py index 04146df37ed8..a3349d0d0901 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py @@ -126,7 +126,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class AnalyticsAdminServiceGrpcTransport(AnalyticsAdminServiceTransport): """gRPC backend transport for AnalyticsAdminService. - Service Interface for the Analytics Admin API (GA4). + Service Interface for the Google Analytics Admin API. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -371,7 +371,7 @@ def list_accounts( Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -523,7 +523,7 @@ def get_property( ) -> Callable[[analytics_admin.GetPropertyRequest], resources.Property]: r"""Return a callable for the get property method over gRPC. - Lookup for a single "GA4" Property. + Lookup for a single GA Property. Returns: Callable[[~.GetPropertyRequest], @@ -553,7 +553,6 @@ def list_properties( Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -583,8 +582,8 @@ def create_property( ) -> Callable[[analytics_admin.CreatePropertyRequest], resources.Property]: r"""Return a callable for the create property method over gRPC. - Creates an "GA4" property with the specified location - and attributes. + Creates a Google Analytics property with the + specified location and attributes. Returns: Callable[[~.CreatePropertyRequest], @@ -622,8 +621,7 @@ def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. Returns: Callable[[~.DeletePropertyRequest], @@ -930,7 +928,7 @@ def get_measurement_protocol_secret( r"""Return a callable for the get measurement protocol secret method over gRPC. - Lookup for a single "GA4" MeasurementProtocolSecret. + Lookup for a single MeasurementProtocolSecret. Returns: Callable[[~.GetMeasurementProtocolSecretRequest], @@ -1289,6 +1287,9 @@ def search_change_history_events( Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + Returns: Callable[[~.SearchChangeHistoryEventsRequest], ~.SearchChangeHistoryEventsResponse]: @@ -2844,14 +2845,19 @@ def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + Returns: Callable[[~.RunAccessReportRequest], ~.RunAccessReportResponse]: diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py index 62fa6cfd1056..1bf6bb7b5a27 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py @@ -131,7 +131,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class AnalyticsAdminServiceGrpcAsyncIOTransport(AnalyticsAdminServiceTransport): """gRPC AsyncIO backend transport for AnalyticsAdminService. - Service Interface for the Analytics Admin API (GA4). + Service Interface for the Google Analytics Admin API. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -379,7 +379,7 @@ def list_accounts( Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -531,7 +531,7 @@ def get_property( ) -> Callable[[analytics_admin.GetPropertyRequest], Awaitable[resources.Property]]: r"""Return a callable for the get property method over gRPC. - Lookup for a single "GA4" Property. + Lookup for a single GA Property. Returns: Callable[[~.GetPropertyRequest], @@ -562,7 +562,6 @@ def list_properties( Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -594,8 +593,8 @@ def create_property( ]: r"""Return a callable for the create property method over gRPC. - Creates an "GA4" property with the specified location - and attributes. + Creates a Google Analytics property with the + specified location and attributes. Returns: Callable[[~.CreatePropertyRequest], @@ -635,8 +634,7 @@ def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. Returns: Callable[[~.DeletePropertyRequest], @@ -954,7 +952,7 @@ def get_measurement_protocol_secret( r"""Return a callable for the get measurement protocol secret method over gRPC. - Lookup for a single "GA4" MeasurementProtocolSecret. + Lookup for a single MeasurementProtocolSecret. Returns: Callable[[~.GetMeasurementProtocolSecretRequest], @@ -1315,6 +1313,9 @@ def search_change_history_events( Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + Returns: Callable[[~.SearchChangeHistoryEventsRequest], Awaitable[~.SearchChangeHistoryEventsResponse]]: @@ -2913,14 +2914,19 @@ def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + Returns: Callable[[~.RunAccessReportRequest], Awaitable[~.RunAccessReportResponse]]: diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py index f9cdd0e8ea09..5bf2070d281a 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py @@ -1201,12 +1201,38 @@ def post_acknowledge_user_data_collection( ) -> analytics_admin.AcknowledgeUserDataCollectionResponse: """Post-rpc interceptor for acknowledge_user_data_collection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_acknowledge_user_data_collection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_acknowledge_user_data_collection` interceptor runs + before the `post_acknowledge_user_data_collection_with_metadata` interceptor. """ return response + def post_acknowledge_user_data_collection_with_metadata( + self, + response: analytics_admin.AcknowledgeUserDataCollectionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.AcknowledgeUserDataCollectionResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for acknowledge_user_data_collection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_acknowledge_user_data_collection_with_metadata` + interceptor in new development instead of the `post_acknowledge_user_data_collection` interceptor. + When both interceptors are used, this `post_acknowledge_user_data_collection_with_metadata` interceptor runs after the + `post_acknowledge_user_data_collection` interceptor. The (possibly modified) response returned by + `post_acknowledge_user_data_collection` will be passed to + `post_acknowledge_user_data_collection_with_metadata`. + """ + return response, metadata + def pre_approve_display_video360_advertiser_link_proposal( self, request: analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest, @@ -1228,12 +1254,38 @@ def post_approve_display_video360_advertiser_link_proposal( ) -> analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse: """Post-rpc interceptor for approve_display_video360_advertiser_link_proposal - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_approve_display_video360_advertiser_link_proposal_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_approve_display_video360_advertiser_link_proposal` interceptor runs + before the `post_approve_display_video360_advertiser_link_proposal_with_metadata` interceptor. """ return response + def post_approve_display_video360_advertiser_link_proposal_with_metadata( + self, + response: analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for approve_display_video360_advertiser_link_proposal + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_approve_display_video360_advertiser_link_proposal_with_metadata` + interceptor in new development instead of the `post_approve_display_video360_advertiser_link_proposal` interceptor. + When both interceptors are used, this `post_approve_display_video360_advertiser_link_proposal_with_metadata` interceptor runs after the + `post_approve_display_video360_advertiser_link_proposal` interceptor. The (possibly modified) response returned by + `post_approve_display_video360_advertiser_link_proposal` will be passed to + `post_approve_display_video360_advertiser_link_proposal_with_metadata`. + """ + return response, metadata + def pre_archive_audience( self, request: analytics_admin.ArchiveAudienceRequest, @@ -1298,12 +1350,38 @@ def post_batch_create_access_bindings( ) -> analytics_admin.BatchCreateAccessBindingsResponse: """Post-rpc interceptor for batch_create_access_bindings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_access_bindings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_access_bindings` interceptor runs + before the `post_batch_create_access_bindings_with_metadata` interceptor. """ return response + def post_batch_create_access_bindings_with_metadata( + self, + response: analytics_admin.BatchCreateAccessBindingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.BatchCreateAccessBindingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_create_access_bindings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_batch_create_access_bindings_with_metadata` + interceptor in new development instead of the `post_batch_create_access_bindings` interceptor. + When both interceptors are used, this `post_batch_create_access_bindings_with_metadata` interceptor runs after the + `post_batch_create_access_bindings` interceptor. The (possibly modified) response returned by + `post_batch_create_access_bindings` will be passed to + `post_batch_create_access_bindings_with_metadata`. + """ + return response, metadata + def pre_batch_delete_access_bindings( self, request: analytics_admin.BatchDeleteAccessBindingsRequest, @@ -1339,12 +1417,38 @@ def post_batch_get_access_bindings( ) -> analytics_admin.BatchGetAccessBindingsResponse: """Post-rpc interceptor for batch_get_access_bindings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_access_bindings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_access_bindings` interceptor runs + before the `post_batch_get_access_bindings_with_metadata` interceptor. """ return response + def post_batch_get_access_bindings_with_metadata( + self, + response: analytics_admin.BatchGetAccessBindingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.BatchGetAccessBindingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_get_access_bindings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_batch_get_access_bindings_with_metadata` + interceptor in new development instead of the `post_batch_get_access_bindings` interceptor. + When both interceptors are used, this `post_batch_get_access_bindings_with_metadata` interceptor runs after the + `post_batch_get_access_bindings` interceptor. The (possibly modified) response returned by + `post_batch_get_access_bindings` will be passed to + `post_batch_get_access_bindings_with_metadata`. + """ + return response, metadata + def pre_batch_update_access_bindings( self, request: analytics_admin.BatchUpdateAccessBindingsRequest, @@ -1365,12 +1469,38 @@ def post_batch_update_access_bindings( ) -> analytics_admin.BatchUpdateAccessBindingsResponse: """Post-rpc interceptor for batch_update_access_bindings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_access_bindings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_access_bindings` interceptor runs + before the `post_batch_update_access_bindings_with_metadata` interceptor. """ return response + def post_batch_update_access_bindings_with_metadata( + self, + response: analytics_admin.BatchUpdateAccessBindingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.BatchUpdateAccessBindingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_update_access_bindings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_batch_update_access_bindings_with_metadata` + interceptor in new development instead of the `post_batch_update_access_bindings` interceptor. + When both interceptors are used, this `post_batch_update_access_bindings_with_metadata` interceptor runs after the + `post_batch_update_access_bindings` interceptor. The (possibly modified) response returned by + `post_batch_update_access_bindings` will be passed to + `post_batch_update_access_bindings_with_metadata`. + """ + return response, metadata + def pre_cancel_display_video360_advertiser_link_proposal( self, request: analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest, @@ -1391,12 +1521,38 @@ def post_cancel_display_video360_advertiser_link_proposal( ) -> resources.DisplayVideo360AdvertiserLinkProposal: """Post-rpc interceptor for cancel_display_video360_advertiser_link_proposal - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_display_video360_advertiser_link_proposal_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_cancel_display_video360_advertiser_link_proposal` interceptor runs + before the `post_cancel_display_video360_advertiser_link_proposal_with_metadata` interceptor. """ return response + def post_cancel_display_video360_advertiser_link_proposal_with_metadata( + self, + response: resources.DisplayVideo360AdvertiserLinkProposal, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DisplayVideo360AdvertiserLinkProposal, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for cancel_display_video360_advertiser_link_proposal + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_cancel_display_video360_advertiser_link_proposal_with_metadata` + interceptor in new development instead of the `post_cancel_display_video360_advertiser_link_proposal` interceptor. + When both interceptors are used, this `post_cancel_display_video360_advertiser_link_proposal_with_metadata` interceptor runs after the + `post_cancel_display_video360_advertiser_link_proposal` interceptor. The (possibly modified) response returned by + `post_cancel_display_video360_advertiser_link_proposal` will be passed to + `post_cancel_display_video360_advertiser_link_proposal_with_metadata`. + """ + return response, metadata + def pre_create_access_binding( self, request: analytics_admin.CreateAccessBindingRequest, @@ -1417,12 +1573,35 @@ def post_create_access_binding( ) -> resources.AccessBinding: """Post-rpc interceptor for create_access_binding - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_access_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_access_binding` interceptor runs + before the `post_create_access_binding_with_metadata` interceptor. """ return response + def post_create_access_binding_with_metadata( + self, + response: resources.AccessBinding, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AccessBinding, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_access_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_access_binding_with_metadata` + interceptor in new development instead of the `post_create_access_binding` interceptor. + When both interceptors are used, this `post_create_access_binding_with_metadata` interceptor runs after the + `post_create_access_binding` interceptor. The (possibly modified) response returned by + `post_create_access_binding` will be passed to + `post_create_access_binding_with_metadata`. + """ + return response, metadata + def pre_create_ad_sense_link( self, request: analytics_admin.CreateAdSenseLinkRequest, @@ -1443,12 +1622,35 @@ def post_create_ad_sense_link( ) -> resources.AdSenseLink: """Post-rpc interceptor for create_ad_sense_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_ad_sense_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_ad_sense_link` interceptor runs + before the `post_create_ad_sense_link_with_metadata` interceptor. """ return response + def post_create_ad_sense_link_with_metadata( + self, + response: resources.AdSenseLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AdSenseLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_ad_sense_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_ad_sense_link_with_metadata` + interceptor in new development instead of the `post_create_ad_sense_link` interceptor. + When both interceptors are used, this `post_create_ad_sense_link_with_metadata` interceptor runs after the + `post_create_ad_sense_link` interceptor. The (possibly modified) response returned by + `post_create_ad_sense_link` will be passed to + `post_create_ad_sense_link_with_metadata`. + """ + return response, metadata + def pre_create_audience( self, request: analytics_admin.CreateAudienceRequest, @@ -1468,12 +1670,35 @@ def post_create_audience( ) -> gaa_audience.Audience: """Post-rpc interceptor for create_audience - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_audience_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_audience` interceptor runs + before the `post_create_audience_with_metadata` interceptor. """ return response + def post_create_audience_with_metadata( + self, + response: gaa_audience.Audience, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gaa_audience.Audience, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_audience + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_audience_with_metadata` + interceptor in new development instead of the `post_create_audience` interceptor. + When both interceptors are used, this `post_create_audience_with_metadata` interceptor runs after the + `post_create_audience` interceptor. The (possibly modified) response returned by + `post_create_audience` will be passed to + `post_create_audience_with_metadata`. + """ + return response, metadata + def pre_create_big_query_link( self, request: analytics_admin.CreateBigQueryLinkRequest, @@ -1494,12 +1719,35 @@ def post_create_big_query_link( ) -> resources.BigQueryLink: """Post-rpc interceptor for create_big_query_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_big_query_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_big_query_link` interceptor runs + before the `post_create_big_query_link_with_metadata` interceptor. """ return response + def post_create_big_query_link_with_metadata( + self, + response: resources.BigQueryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.BigQueryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_big_query_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_big_query_link_with_metadata` + interceptor in new development instead of the `post_create_big_query_link` interceptor. + When both interceptors are used, this `post_create_big_query_link_with_metadata` interceptor runs after the + `post_create_big_query_link` interceptor. The (possibly modified) response returned by + `post_create_big_query_link` will be passed to + `post_create_big_query_link_with_metadata`. + """ + return response, metadata + def pre_create_calculated_metric( self, request: analytics_admin.CreateCalculatedMetricRequest, @@ -1520,12 +1768,35 @@ def post_create_calculated_metric( ) -> resources.CalculatedMetric: """Post-rpc interceptor for create_calculated_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_calculated_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_calculated_metric` interceptor runs + before the `post_create_calculated_metric_with_metadata` interceptor. """ return response + def post_create_calculated_metric_with_metadata( + self, + response: resources.CalculatedMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CalculatedMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_calculated_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_calculated_metric_with_metadata` + interceptor in new development instead of the `post_create_calculated_metric` interceptor. + When both interceptors are used, this `post_create_calculated_metric_with_metadata` interceptor runs after the + `post_create_calculated_metric` interceptor. The (possibly modified) response returned by + `post_create_calculated_metric` will be passed to + `post_create_calculated_metric_with_metadata`. + """ + return response, metadata + def pre_create_channel_group( self, request: analytics_admin.CreateChannelGroupRequest, @@ -1546,12 +1817,35 @@ def post_create_channel_group( ) -> gaa_channel_group.ChannelGroup: """Post-rpc interceptor for create_channel_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_channel_group` interceptor runs + before the `post_create_channel_group_with_metadata` interceptor. """ return response + def post_create_channel_group_with_metadata( + self, + response: gaa_channel_group.ChannelGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gaa_channel_group.ChannelGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_channel_group_with_metadata` + interceptor in new development instead of the `post_create_channel_group` interceptor. + When both interceptors are used, this `post_create_channel_group_with_metadata` interceptor runs after the + `post_create_channel_group` interceptor. The (possibly modified) response returned by + `post_create_channel_group` will be passed to + `post_create_channel_group_with_metadata`. + """ + return response, metadata + def pre_create_connected_site_tag( self, request: analytics_admin.CreateConnectedSiteTagRequest, @@ -1572,12 +1866,38 @@ def post_create_connected_site_tag( ) -> analytics_admin.CreateConnectedSiteTagResponse: """Post-rpc interceptor for create_connected_site_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connected_site_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_connected_site_tag` interceptor runs + before the `post_create_connected_site_tag_with_metadata` interceptor. """ return response + def post_create_connected_site_tag_with_metadata( + self, + response: analytics_admin.CreateConnectedSiteTagResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.CreateConnectedSiteTagResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_connected_site_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_connected_site_tag_with_metadata` + interceptor in new development instead of the `post_create_connected_site_tag` interceptor. + When both interceptors are used, this `post_create_connected_site_tag_with_metadata` interceptor runs after the + `post_create_connected_site_tag` interceptor. The (possibly modified) response returned by + `post_create_connected_site_tag` will be passed to + `post_create_connected_site_tag_with_metadata`. + """ + return response, metadata + def pre_create_conversion_event( self, request: analytics_admin.CreateConversionEventRequest, @@ -1598,12 +1918,35 @@ def post_create_conversion_event( ) -> resources.ConversionEvent: """Post-rpc interceptor for create_conversion_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversion_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_conversion_event` interceptor runs + before the `post_create_conversion_event_with_metadata` interceptor. """ return response + def post_create_conversion_event_with_metadata( + self, + response: resources.ConversionEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConversionEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversion_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_conversion_event_with_metadata` + interceptor in new development instead of the `post_create_conversion_event` interceptor. + When both interceptors are used, this `post_create_conversion_event_with_metadata` interceptor runs after the + `post_create_conversion_event` interceptor. The (possibly modified) response returned by + `post_create_conversion_event` will be passed to + `post_create_conversion_event_with_metadata`. + """ + return response, metadata + def pre_create_custom_dimension( self, request: analytics_admin.CreateCustomDimensionRequest, @@ -1624,12 +1967,35 @@ def post_create_custom_dimension( ) -> resources.CustomDimension: """Post-rpc interceptor for create_custom_dimension - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_custom_dimension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_custom_dimension` interceptor runs + before the `post_create_custom_dimension_with_metadata` interceptor. """ return response + def post_create_custom_dimension_with_metadata( + self, + response: resources.CustomDimension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomDimension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_custom_dimension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_custom_dimension_with_metadata` + interceptor in new development instead of the `post_create_custom_dimension` interceptor. + When both interceptors are used, this `post_create_custom_dimension_with_metadata` interceptor runs after the + `post_create_custom_dimension` interceptor. The (possibly modified) response returned by + `post_create_custom_dimension` will be passed to + `post_create_custom_dimension_with_metadata`. + """ + return response, metadata + def pre_create_custom_metric( self, request: analytics_admin.CreateCustomMetricRequest, @@ -1650,12 +2016,35 @@ def post_create_custom_metric( ) -> resources.CustomMetric: """Post-rpc interceptor for create_custom_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_custom_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_custom_metric` interceptor runs + before the `post_create_custom_metric_with_metadata` interceptor. """ return response + def post_create_custom_metric_with_metadata( + self, + response: resources.CustomMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_custom_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_custom_metric_with_metadata` + interceptor in new development instead of the `post_create_custom_metric` interceptor. + When both interceptors are used, this `post_create_custom_metric_with_metadata` interceptor runs after the + `post_create_custom_metric` interceptor. The (possibly modified) response returned by + `post_create_custom_metric` will be passed to + `post_create_custom_metric_with_metadata`. + """ + return response, metadata + def pre_create_data_stream( self, request: analytics_admin.CreateDataStreamRequest, @@ -1675,12 +2064,35 @@ def post_create_data_stream( ) -> resources.DataStream: """Post-rpc interceptor for create_data_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_data_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_data_stream` interceptor runs + before the `post_create_data_stream_with_metadata` interceptor. """ return response + def post_create_data_stream_with_metadata( + self, + response: resources.DataStream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataStream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_data_stream_with_metadata` + interceptor in new development instead of the `post_create_data_stream` interceptor. + When both interceptors are used, this `post_create_data_stream_with_metadata` interceptor runs after the + `post_create_data_stream` interceptor. The (possibly modified) response returned by + `post_create_data_stream` will be passed to + `post_create_data_stream_with_metadata`. + """ + return response, metadata + def pre_create_display_video360_advertiser_link( self, request: analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest, @@ -1701,12 +2113,37 @@ def post_create_display_video360_advertiser_link( ) -> resources.DisplayVideo360AdvertiserLink: """Post-rpc interceptor for create_display_video360_advertiser_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_display_video360_advertiser_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_display_video360_advertiser_link` interceptor runs + before the `post_create_display_video360_advertiser_link_with_metadata` interceptor. """ return response + def post_create_display_video360_advertiser_link_with_metadata( + self, + response: resources.DisplayVideo360AdvertiserLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DisplayVideo360AdvertiserLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_display_video360_advertiser_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_display_video360_advertiser_link_with_metadata` + interceptor in new development instead of the `post_create_display_video360_advertiser_link` interceptor. + When both interceptors are used, this `post_create_display_video360_advertiser_link_with_metadata` interceptor runs after the + `post_create_display_video360_advertiser_link` interceptor. The (possibly modified) response returned by + `post_create_display_video360_advertiser_link` will be passed to + `post_create_display_video360_advertiser_link_with_metadata`. + """ + return response, metadata + def pre_create_display_video360_advertiser_link_proposal( self, request: analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest, @@ -1727,12 +2164,38 @@ def post_create_display_video360_advertiser_link_proposal( ) -> resources.DisplayVideo360AdvertiserLinkProposal: """Post-rpc interceptor for create_display_video360_advertiser_link_proposal - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_display_video360_advertiser_link_proposal_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_display_video360_advertiser_link_proposal` interceptor runs + before the `post_create_display_video360_advertiser_link_proposal_with_metadata` interceptor. """ return response + def post_create_display_video360_advertiser_link_proposal_with_metadata( + self, + response: resources.DisplayVideo360AdvertiserLinkProposal, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DisplayVideo360AdvertiserLinkProposal, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_display_video360_advertiser_link_proposal + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_display_video360_advertiser_link_proposal_with_metadata` + interceptor in new development instead of the `post_create_display_video360_advertiser_link_proposal` interceptor. + When both interceptors are used, this `post_create_display_video360_advertiser_link_proposal_with_metadata` interceptor runs after the + `post_create_display_video360_advertiser_link_proposal` interceptor. The (possibly modified) response returned by + `post_create_display_video360_advertiser_link_proposal` will be passed to + `post_create_display_video360_advertiser_link_proposal_with_metadata`. + """ + return response, metadata + def pre_create_event_create_rule( self, request: analytics_admin.CreateEventCreateRuleRequest, @@ -1753,12 +2216,37 @@ def post_create_event_create_rule( ) -> event_create_and_edit.EventCreateRule: """Post-rpc interceptor for create_event_create_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_event_create_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_event_create_rule` interceptor runs + before the `post_create_event_create_rule_with_metadata` interceptor. """ return response + def post_create_event_create_rule_with_metadata( + self, + response: event_create_and_edit.EventCreateRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + event_create_and_edit.EventCreateRule, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_event_create_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_event_create_rule_with_metadata` + interceptor in new development instead of the `post_create_event_create_rule` interceptor. + When both interceptors are used, this `post_create_event_create_rule_with_metadata` interceptor runs after the + `post_create_event_create_rule` interceptor. The (possibly modified) response returned by + `post_create_event_create_rule` will be passed to + `post_create_event_create_rule_with_metadata`. + """ + return response, metadata + def pre_create_event_edit_rule( self, request: analytics_admin.CreateEventEditRuleRequest, @@ -1779,12 +2267,37 @@ def post_create_event_edit_rule( ) -> event_create_and_edit.EventEditRule: """Post-rpc interceptor for create_event_edit_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_event_edit_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_event_edit_rule` interceptor runs + before the `post_create_event_edit_rule_with_metadata` interceptor. """ return response + def post_create_event_edit_rule_with_metadata( + self, + response: event_create_and_edit.EventEditRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + event_create_and_edit.EventEditRule, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_event_edit_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_event_edit_rule_with_metadata` + interceptor in new development instead of the `post_create_event_edit_rule` interceptor. + When both interceptors are used, this `post_create_event_edit_rule_with_metadata` interceptor runs after the + `post_create_event_edit_rule` interceptor. The (possibly modified) response returned by + `post_create_event_edit_rule` will be passed to + `post_create_event_edit_rule_with_metadata`. + """ + return response, metadata + def pre_create_expanded_data_set( self, request: analytics_admin.CreateExpandedDataSetRequest, @@ -1805,12 +2318,37 @@ def post_create_expanded_data_set( ) -> gaa_expanded_data_set.ExpandedDataSet: """Post-rpc interceptor for create_expanded_data_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_expanded_data_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_expanded_data_set` interceptor runs + before the `post_create_expanded_data_set_with_metadata` interceptor. """ return response + def post_create_expanded_data_set_with_metadata( + self, + response: gaa_expanded_data_set.ExpandedDataSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gaa_expanded_data_set.ExpandedDataSet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_expanded_data_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_expanded_data_set_with_metadata` + interceptor in new development instead of the `post_create_expanded_data_set` interceptor. + When both interceptors are used, this `post_create_expanded_data_set_with_metadata` interceptor runs after the + `post_create_expanded_data_set` interceptor. The (possibly modified) response returned by + `post_create_expanded_data_set` will be passed to + `post_create_expanded_data_set_with_metadata`. + """ + return response, metadata + def pre_create_firebase_link( self, request: analytics_admin.CreateFirebaseLinkRequest, @@ -1831,12 +2369,35 @@ def post_create_firebase_link( ) -> resources.FirebaseLink: """Post-rpc interceptor for create_firebase_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_firebase_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_firebase_link` interceptor runs + before the `post_create_firebase_link_with_metadata` interceptor. """ return response + def post_create_firebase_link_with_metadata( + self, + response: resources.FirebaseLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.FirebaseLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_firebase_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_firebase_link_with_metadata` + interceptor in new development instead of the `post_create_firebase_link` interceptor. + When both interceptors are used, this `post_create_firebase_link_with_metadata` interceptor runs after the + `post_create_firebase_link` interceptor. The (possibly modified) response returned by + `post_create_firebase_link` will be passed to + `post_create_firebase_link_with_metadata`. + """ + return response, metadata + def pre_create_google_ads_link( self, request: analytics_admin.CreateGoogleAdsLinkRequest, @@ -1857,12 +2418,35 @@ def post_create_google_ads_link( ) -> resources.GoogleAdsLink: """Post-rpc interceptor for create_google_ads_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_google_ads_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_google_ads_link` interceptor runs + before the `post_create_google_ads_link_with_metadata` interceptor. """ return response + def post_create_google_ads_link_with_metadata( + self, + response: resources.GoogleAdsLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.GoogleAdsLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_google_ads_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_google_ads_link_with_metadata` + interceptor in new development instead of the `post_create_google_ads_link` interceptor. + When both interceptors are used, this `post_create_google_ads_link_with_metadata` interceptor runs after the + `post_create_google_ads_link` interceptor. The (possibly modified) response returned by + `post_create_google_ads_link` will be passed to + `post_create_google_ads_link_with_metadata`. + """ + return response, metadata + def pre_create_key_event( self, request: analytics_admin.CreateKeyEventRequest, @@ -1880,12 +2464,35 @@ def pre_create_key_event( def post_create_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: """Post-rpc interceptor for create_key_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_key_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_key_event` interceptor runs + before the `post_create_key_event_with_metadata` interceptor. """ return response + def post_create_key_event_with_metadata( + self, + response: resources.KeyEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_key_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_key_event_with_metadata` + interceptor in new development instead of the `post_create_key_event` interceptor. + When both interceptors are used, this `post_create_key_event_with_metadata` interceptor runs after the + `post_create_key_event` interceptor. The (possibly modified) response returned by + `post_create_key_event` will be passed to + `post_create_key_event_with_metadata`. + """ + return response, metadata + def pre_create_measurement_protocol_secret( self, request: analytics_admin.CreateMeasurementProtocolSecretRequest, @@ -1906,12 +2513,37 @@ def post_create_measurement_protocol_secret( ) -> resources.MeasurementProtocolSecret: """Post-rpc interceptor for create_measurement_protocol_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_measurement_protocol_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_measurement_protocol_secret` interceptor runs + before the `post_create_measurement_protocol_secret_with_metadata` interceptor. """ return response + def post_create_measurement_protocol_secret_with_metadata( + self, + response: resources.MeasurementProtocolSecret, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.MeasurementProtocolSecret, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_measurement_protocol_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_measurement_protocol_secret_with_metadata` + interceptor in new development instead of the `post_create_measurement_protocol_secret` interceptor. + When both interceptors are used, this `post_create_measurement_protocol_secret_with_metadata` interceptor runs after the + `post_create_measurement_protocol_secret` interceptor. The (possibly modified) response returned by + `post_create_measurement_protocol_secret` will be passed to + `post_create_measurement_protocol_secret_with_metadata`. + """ + return response, metadata + def pre_create_property( self, request: analytics_admin.CreatePropertyRequest, @@ -1929,18 +2561,41 @@ def pre_create_property( def post_create_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for create_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_property` interceptor runs + before the `post_create_property_with_metadata` interceptor. """ return response - def pre_create_rollup_property( + def post_create_property_with_metadata( self, - request: analytics_admin.CreateRollupPropertyRequest, + response: resources.Property, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[ - analytics_admin.CreateRollupPropertyRequest, + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_property_with_metadata` + interceptor in new development instead of the `post_create_property` interceptor. + When both interceptors are used, this `post_create_property_with_metadata` interceptor runs after the + `post_create_property` interceptor. The (possibly modified) response returned by + `post_create_property` will be passed to + `post_create_property_with_metadata`. + """ + return response, metadata + + def pre_create_rollup_property( + self, + request: analytics_admin.CreateRollupPropertyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.CreateRollupPropertyRequest, Sequence[Tuple[str, Union[str, bytes]]], ]: """Pre-rpc interceptor for create_rollup_property @@ -1955,12 +2610,38 @@ def post_create_rollup_property( ) -> analytics_admin.CreateRollupPropertyResponse: """Post-rpc interceptor for create_rollup_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_rollup_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_rollup_property` interceptor runs + before the `post_create_rollup_property_with_metadata` interceptor. """ return response + def post_create_rollup_property_with_metadata( + self, + response: analytics_admin.CreateRollupPropertyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.CreateRollupPropertyResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_rollup_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_rollup_property_with_metadata` + interceptor in new development instead of the `post_create_rollup_property` interceptor. + When both interceptors are used, this `post_create_rollup_property_with_metadata` interceptor runs after the + `post_create_rollup_property` interceptor. The (possibly modified) response returned by + `post_create_rollup_property` will be passed to + `post_create_rollup_property_with_metadata`. + """ + return response, metadata + def pre_create_rollup_property_source_link( self, request: analytics_admin.CreateRollupPropertySourceLinkRequest, @@ -1981,12 +2662,37 @@ def post_create_rollup_property_source_link( ) -> resources.RollupPropertySourceLink: """Post-rpc interceptor for create_rollup_property_source_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_rollup_property_source_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_rollup_property_source_link` interceptor runs + before the `post_create_rollup_property_source_link_with_metadata` interceptor. """ return response + def post_create_rollup_property_source_link_with_metadata( + self, + response: resources.RollupPropertySourceLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.RollupPropertySourceLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_rollup_property_source_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_rollup_property_source_link_with_metadata` + interceptor in new development instead of the `post_create_rollup_property_source_link` interceptor. + When both interceptors are used, this `post_create_rollup_property_source_link_with_metadata` interceptor runs after the + `post_create_rollup_property_source_link` interceptor. The (possibly modified) response returned by + `post_create_rollup_property_source_link` will be passed to + `post_create_rollup_property_source_link_with_metadata`. + """ + return response, metadata + def pre_create_search_ads360_link( self, request: analytics_admin.CreateSearchAds360LinkRequest, @@ -2007,12 +2713,35 @@ def post_create_search_ads360_link( ) -> resources.SearchAds360Link: """Post-rpc interceptor for create_search_ads360_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_search_ads360_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_search_ads360_link` interceptor runs + before the `post_create_search_ads360_link_with_metadata` interceptor. """ return response + def post_create_search_ads360_link_with_metadata( + self, + response: resources.SearchAds360Link, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.SearchAds360Link, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_search_ads360_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_search_ads360_link_with_metadata` + interceptor in new development instead of the `post_create_search_ads360_link` interceptor. + When both interceptors are used, this `post_create_search_ads360_link_with_metadata` interceptor runs after the + `post_create_search_ads360_link` interceptor. The (possibly modified) response returned by + `post_create_search_ads360_link` will be passed to + `post_create_search_ads360_link_with_metadata`. + """ + return response, metadata + def pre_create_sk_ad_network_conversion_value_schema( self, request: analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest, @@ -2033,12 +2762,38 @@ def post_create_sk_ad_network_conversion_value_schema( ) -> resources.SKAdNetworkConversionValueSchema: """Post-rpc interceptor for create_sk_ad_network_conversion_value_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sk_ad_network_conversion_value_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_sk_ad_network_conversion_value_schema` interceptor runs + before the `post_create_sk_ad_network_conversion_value_schema_with_metadata` interceptor. """ return response + def post_create_sk_ad_network_conversion_value_schema_with_metadata( + self, + response: resources.SKAdNetworkConversionValueSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.SKAdNetworkConversionValueSchema, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_sk_ad_network_conversion_value_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_sk_ad_network_conversion_value_schema_with_metadata` + interceptor in new development instead of the `post_create_sk_ad_network_conversion_value_schema` interceptor. + When both interceptors are used, this `post_create_sk_ad_network_conversion_value_schema_with_metadata` interceptor runs after the + `post_create_sk_ad_network_conversion_value_schema` interceptor. The (possibly modified) response returned by + `post_create_sk_ad_network_conversion_value_schema` will be passed to + `post_create_sk_ad_network_conversion_value_schema_with_metadata`. + """ + return response, metadata + def pre_create_subproperty_event_filter( self, request: analytics_admin.CreateSubpropertyEventFilterRequest, @@ -2059,12 +2814,38 @@ def post_create_subproperty_event_filter( ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: """Post-rpc interceptor for create_subproperty_event_filter - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_subproperty_event_filter_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_subproperty_event_filter` interceptor runs + before the `post_create_subproperty_event_filter_with_metadata` interceptor. """ return response + def post_create_subproperty_event_filter_with_metadata( + self, + response: gaa_subproperty_event_filter.SubpropertyEventFilter, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gaa_subproperty_event_filter.SubpropertyEventFilter, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_subproperty_event_filter + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_subproperty_event_filter_with_metadata` + interceptor in new development instead of the `post_create_subproperty_event_filter` interceptor. + When both interceptors are used, this `post_create_subproperty_event_filter_with_metadata` interceptor runs after the + `post_create_subproperty_event_filter` interceptor. The (possibly modified) response returned by + `post_create_subproperty_event_filter` will be passed to + `post_create_subproperty_event_filter_with_metadata`. + """ + return response, metadata + def pre_delete_access_binding( self, request: analytics_admin.DeleteAccessBindingRequest, @@ -2349,12 +3130,35 @@ def pre_delete_property( def post_delete_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for delete_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_delete_property` interceptor runs + before the `post_delete_property_with_metadata` interceptor. """ return response + def post_delete_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_delete_property_with_metadata` + interceptor in new development instead of the `post_delete_property` interceptor. + When both interceptors are used, this `post_delete_property_with_metadata` interceptor runs after the + `post_delete_property` interceptor. The (possibly modified) response returned by + `post_delete_property` will be passed to + `post_delete_property_with_metadata`. + """ + return response, metadata + def pre_delete_rollup_property_source_link( self, request: analytics_admin.DeleteRollupPropertySourceLinkRequest, @@ -2435,12 +3239,38 @@ def post_fetch_automated_ga4_configuration_opt_out( ) -> analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse: """Post-rpc interceptor for fetch_automated_ga4_configuration_opt_out - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_automated_ga4_configuration_opt_out_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_automated_ga4_configuration_opt_out` interceptor runs + before the `post_fetch_automated_ga4_configuration_opt_out_with_metadata` interceptor. """ return response + def post_fetch_automated_ga4_configuration_opt_out_with_metadata( + self, + response: analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_automated_ga4_configuration_opt_out + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_fetch_automated_ga4_configuration_opt_out_with_metadata` + interceptor in new development instead of the `post_fetch_automated_ga4_configuration_opt_out` interceptor. + When both interceptors are used, this `post_fetch_automated_ga4_configuration_opt_out_with_metadata` interceptor runs after the + `post_fetch_automated_ga4_configuration_opt_out` interceptor. The (possibly modified) response returned by + `post_fetch_automated_ga4_configuration_opt_out` will be passed to + `post_fetch_automated_ga4_configuration_opt_out_with_metadata`. + """ + return response, metadata + def pre_fetch_connected_ga4_property( self, request: analytics_admin.FetchConnectedGa4PropertyRequest, @@ -2461,12 +3291,38 @@ def post_fetch_connected_ga4_property( ) -> analytics_admin.FetchConnectedGa4PropertyResponse: """Post-rpc interceptor for fetch_connected_ga4_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_connected_ga4_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_connected_ga4_property` interceptor runs + before the `post_fetch_connected_ga4_property_with_metadata` interceptor. """ return response + def post_fetch_connected_ga4_property_with_metadata( + self, + response: analytics_admin.FetchConnectedGa4PropertyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.FetchConnectedGa4PropertyResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_connected_ga4_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_fetch_connected_ga4_property_with_metadata` + interceptor in new development instead of the `post_fetch_connected_ga4_property` interceptor. + When both interceptors are used, this `post_fetch_connected_ga4_property_with_metadata` interceptor runs after the + `post_fetch_connected_ga4_property` interceptor. The (possibly modified) response returned by + `post_fetch_connected_ga4_property` will be passed to + `post_fetch_connected_ga4_property_with_metadata`. + """ + return response, metadata + def pre_get_access_binding( self, request: analytics_admin.GetAccessBindingRequest, @@ -2486,12 +3342,35 @@ def post_get_access_binding( ) -> resources.AccessBinding: """Post-rpc interceptor for get_access_binding - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_access_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_access_binding` interceptor runs + before the `post_get_access_binding_with_metadata` interceptor. """ return response + def post_get_access_binding_with_metadata( + self, + response: resources.AccessBinding, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AccessBinding, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_access_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_access_binding_with_metadata` + interceptor in new development instead of the `post_get_access_binding` interceptor. + When both interceptors are used, this `post_get_access_binding_with_metadata` interceptor runs after the + `post_get_access_binding` interceptor. The (possibly modified) response returned by + `post_get_access_binding` will be passed to + `post_get_access_binding_with_metadata`. + """ + return response, metadata + def pre_get_account( self, request: analytics_admin.GetAccountRequest, @@ -2509,12 +3388,35 @@ def pre_get_account( def post_get_account(self, response: resources.Account) -> resources.Account: """Post-rpc interceptor for get_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_account` interceptor runs + before the `post_get_account_with_metadata` interceptor. """ return response + def post_get_account_with_metadata( + self, + response: resources.Account, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Account, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_account_with_metadata` + interceptor in new development instead of the `post_get_account` interceptor. + When both interceptors are used, this `post_get_account_with_metadata` interceptor runs after the + `post_get_account` interceptor. The (possibly modified) response returned by + `post_get_account` will be passed to + `post_get_account_with_metadata`. + """ + return response, metadata + def pre_get_ad_sense_link( self, request: analytics_admin.GetAdSenseLinkRequest, @@ -2534,12 +3436,35 @@ def post_get_ad_sense_link( ) -> resources.AdSenseLink: """Post-rpc interceptor for get_ad_sense_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ad_sense_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_ad_sense_link` interceptor runs + before the `post_get_ad_sense_link_with_metadata` interceptor. """ return response + def post_get_ad_sense_link_with_metadata( + self, + response: resources.AdSenseLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AdSenseLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ad_sense_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_ad_sense_link_with_metadata` + interceptor in new development instead of the `post_get_ad_sense_link` interceptor. + When both interceptors are used, this `post_get_ad_sense_link_with_metadata` interceptor runs after the + `post_get_ad_sense_link` interceptor. The (possibly modified) response returned by + `post_get_ad_sense_link` will be passed to + `post_get_ad_sense_link_with_metadata`. + """ + return response, metadata + def pre_get_attribution_settings( self, request: analytics_admin.GetAttributionSettingsRequest, @@ -2560,12 +3485,35 @@ def post_get_attribution_settings( ) -> resources.AttributionSettings: """Post-rpc interceptor for get_attribution_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attribution_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_attribution_settings` interceptor runs + before the `post_get_attribution_settings_with_metadata` interceptor. """ return response + def post_get_attribution_settings_with_metadata( + self, + response: resources.AttributionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AttributionSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_attribution_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_attribution_settings_with_metadata` + interceptor in new development instead of the `post_get_attribution_settings` interceptor. + When both interceptors are used, this `post_get_attribution_settings_with_metadata` interceptor runs after the + `post_get_attribution_settings` interceptor. The (possibly modified) response returned by + `post_get_attribution_settings` will be passed to + `post_get_attribution_settings_with_metadata`. + """ + return response, metadata + def pre_get_audience( self, request: analytics_admin.GetAudienceRequest, @@ -2583,12 +3531,35 @@ def pre_get_audience( def post_get_audience(self, response: audience.Audience) -> audience.Audience: """Post-rpc interceptor for get_audience - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_audience_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_audience` interceptor runs + before the `post_get_audience_with_metadata` interceptor. """ return response + def post_get_audience_with_metadata( + self, + response: audience.Audience, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[audience.Audience, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_audience + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_audience_with_metadata` + interceptor in new development instead of the `post_get_audience` interceptor. + When both interceptors are used, this `post_get_audience_with_metadata` interceptor runs after the + `post_get_audience` interceptor. The (possibly modified) response returned by + `post_get_audience` will be passed to + `post_get_audience_with_metadata`. + """ + return response, metadata + def pre_get_big_query_link( self, request: analytics_admin.GetBigQueryLinkRequest, @@ -2608,12 +3579,35 @@ def post_get_big_query_link( ) -> resources.BigQueryLink: """Post-rpc interceptor for get_big_query_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_big_query_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_big_query_link` interceptor runs + before the `post_get_big_query_link_with_metadata` interceptor. """ return response + def post_get_big_query_link_with_metadata( + self, + response: resources.BigQueryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.BigQueryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_big_query_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_big_query_link_with_metadata` + interceptor in new development instead of the `post_get_big_query_link` interceptor. + When both interceptors are used, this `post_get_big_query_link_with_metadata` interceptor runs after the + `post_get_big_query_link` interceptor. The (possibly modified) response returned by + `post_get_big_query_link` will be passed to + `post_get_big_query_link_with_metadata`. + """ + return response, metadata + def pre_get_calculated_metric( self, request: analytics_admin.GetCalculatedMetricRequest, @@ -2634,12 +3628,35 @@ def post_get_calculated_metric( ) -> resources.CalculatedMetric: """Post-rpc interceptor for get_calculated_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_calculated_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_calculated_metric` interceptor runs + before the `post_get_calculated_metric_with_metadata` interceptor. """ return response + def post_get_calculated_metric_with_metadata( + self, + response: resources.CalculatedMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CalculatedMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_calculated_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_calculated_metric_with_metadata` + interceptor in new development instead of the `post_get_calculated_metric` interceptor. + When both interceptors are used, this `post_get_calculated_metric_with_metadata` interceptor runs after the + `post_get_calculated_metric` interceptor. The (possibly modified) response returned by + `post_get_calculated_metric` will be passed to + `post_get_calculated_metric_with_metadata`. + """ + return response, metadata + def pre_get_channel_group( self, request: analytics_admin.GetChannelGroupRequest, @@ -2659,12 +3676,35 @@ def post_get_channel_group( ) -> channel_group.ChannelGroup: """Post-rpc interceptor for get_channel_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_channel_group` interceptor runs + before the `post_get_channel_group_with_metadata` interceptor. """ return response + def post_get_channel_group_with_metadata( + self, + response: channel_group.ChannelGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[channel_group.ChannelGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_channel_group_with_metadata` + interceptor in new development instead of the `post_get_channel_group` interceptor. + When both interceptors are used, this `post_get_channel_group_with_metadata` interceptor runs after the + `post_get_channel_group` interceptor. The (possibly modified) response returned by + `post_get_channel_group` will be passed to + `post_get_channel_group_with_metadata`. + """ + return response, metadata + def pre_get_conversion_event( self, request: analytics_admin.GetConversionEventRequest, @@ -2685,12 +3725,35 @@ def post_get_conversion_event( ) -> resources.ConversionEvent: """Post-rpc interceptor for get_conversion_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversion_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_conversion_event` interceptor runs + before the `post_get_conversion_event_with_metadata` interceptor. """ return response + def post_get_conversion_event_with_metadata( + self, + response: resources.ConversionEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConversionEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversion_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_conversion_event_with_metadata` + interceptor in new development instead of the `post_get_conversion_event` interceptor. + When both interceptors are used, this `post_get_conversion_event_with_metadata` interceptor runs after the + `post_get_conversion_event` interceptor. The (possibly modified) response returned by + `post_get_conversion_event` will be passed to + `post_get_conversion_event_with_metadata`. + """ + return response, metadata + def pre_get_custom_dimension( self, request: analytics_admin.GetCustomDimensionRequest, @@ -2711,12 +3774,35 @@ def post_get_custom_dimension( ) -> resources.CustomDimension: """Post-rpc interceptor for get_custom_dimension - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_dimension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_dimension` interceptor runs + before the `post_get_custom_dimension_with_metadata` interceptor. """ return response + def post_get_custom_dimension_with_metadata( + self, + response: resources.CustomDimension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomDimension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_custom_dimension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_custom_dimension_with_metadata` + interceptor in new development instead of the `post_get_custom_dimension` interceptor. + When both interceptors are used, this `post_get_custom_dimension_with_metadata` interceptor runs after the + `post_get_custom_dimension` interceptor. The (possibly modified) response returned by + `post_get_custom_dimension` will be passed to + `post_get_custom_dimension_with_metadata`. + """ + return response, metadata + def pre_get_custom_metric( self, request: analytics_admin.GetCustomMetricRequest, @@ -2736,12 +3822,35 @@ def post_get_custom_metric( ) -> resources.CustomMetric: """Post-rpc interceptor for get_custom_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_metric` interceptor runs + before the `post_get_custom_metric_with_metadata` interceptor. """ return response + def post_get_custom_metric_with_metadata( + self, + response: resources.CustomMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_custom_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_custom_metric_with_metadata` + interceptor in new development instead of the `post_get_custom_metric` interceptor. + When both interceptors are used, this `post_get_custom_metric_with_metadata` interceptor runs after the + `post_get_custom_metric` interceptor. The (possibly modified) response returned by + `post_get_custom_metric` will be passed to + `post_get_custom_metric_with_metadata`. + """ + return response, metadata + def pre_get_data_redaction_settings( self, request: analytics_admin.GetDataRedactionSettingsRequest, @@ -2762,12 +3871,37 @@ def post_get_data_redaction_settings( ) -> resources.DataRedactionSettings: """Post-rpc interceptor for get_data_redaction_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_redaction_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_redaction_settings` interceptor runs + before the `post_get_data_redaction_settings_with_metadata` interceptor. """ return response + def post_get_data_redaction_settings_with_metadata( + self, + response: resources.DataRedactionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DataRedactionSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_data_redaction_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_redaction_settings_with_metadata` + interceptor in new development instead of the `post_get_data_redaction_settings` interceptor. + When both interceptors are used, this `post_get_data_redaction_settings_with_metadata` interceptor runs after the + `post_get_data_redaction_settings` interceptor. The (possibly modified) response returned by + `post_get_data_redaction_settings` will be passed to + `post_get_data_redaction_settings_with_metadata`. + """ + return response, metadata + def pre_get_data_retention_settings( self, request: analytics_admin.GetDataRetentionSettingsRequest, @@ -2788,12 +3922,37 @@ def post_get_data_retention_settings( ) -> resources.DataRetentionSettings: """Post-rpc interceptor for get_data_retention_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_retention_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_retention_settings` interceptor runs + before the `post_get_data_retention_settings_with_metadata` interceptor. """ return response + def post_get_data_retention_settings_with_metadata( + self, + response: resources.DataRetentionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DataRetentionSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_data_retention_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_retention_settings_with_metadata` + interceptor in new development instead of the `post_get_data_retention_settings` interceptor. + When both interceptors are used, this `post_get_data_retention_settings_with_metadata` interceptor runs after the + `post_get_data_retention_settings` interceptor. The (possibly modified) response returned by + `post_get_data_retention_settings` will be passed to + `post_get_data_retention_settings_with_metadata`. + """ + return response, metadata + def pre_get_data_sharing_settings( self, request: analytics_admin.GetDataSharingSettingsRequest, @@ -2814,12 +3973,35 @@ def post_get_data_sharing_settings( ) -> resources.DataSharingSettings: """Post-rpc interceptor for get_data_sharing_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_sharing_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_sharing_settings` interceptor runs + before the `post_get_data_sharing_settings_with_metadata` interceptor. """ return response + def post_get_data_sharing_settings_with_metadata( + self, + response: resources.DataSharingSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataSharingSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_sharing_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_sharing_settings_with_metadata` + interceptor in new development instead of the `post_get_data_sharing_settings` interceptor. + When both interceptors are used, this `post_get_data_sharing_settings_with_metadata` interceptor runs after the + `post_get_data_sharing_settings` interceptor. The (possibly modified) response returned by + `post_get_data_sharing_settings` will be passed to + `post_get_data_sharing_settings_with_metadata`. + """ + return response, metadata + def pre_get_data_stream( self, request: analytics_admin.GetDataStreamRequest, @@ -2839,12 +4021,35 @@ def post_get_data_stream( ) -> resources.DataStream: """Post-rpc interceptor for get_data_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_stream` interceptor runs + before the `post_get_data_stream_with_metadata` interceptor. """ return response + def post_get_data_stream_with_metadata( + self, + response: resources.DataStream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataStream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_stream_with_metadata` + interceptor in new development instead of the `post_get_data_stream` interceptor. + When both interceptors are used, this `post_get_data_stream_with_metadata` interceptor runs after the + `post_get_data_stream` interceptor. The (possibly modified) response returned by + `post_get_data_stream` will be passed to + `post_get_data_stream_with_metadata`. + """ + return response, metadata + def pre_get_display_video360_advertiser_link( self, request: analytics_admin.GetDisplayVideo360AdvertiserLinkRequest, @@ -2865,12 +4070,37 @@ def post_get_display_video360_advertiser_link( ) -> resources.DisplayVideo360AdvertiserLink: """Post-rpc interceptor for get_display_video360_advertiser_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_display_video360_advertiser_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_display_video360_advertiser_link` interceptor runs + before the `post_get_display_video360_advertiser_link_with_metadata` interceptor. """ return response + def post_get_display_video360_advertiser_link_with_metadata( + self, + response: resources.DisplayVideo360AdvertiserLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DisplayVideo360AdvertiserLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_display_video360_advertiser_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_display_video360_advertiser_link_with_metadata` + interceptor in new development instead of the `post_get_display_video360_advertiser_link` interceptor. + When both interceptors are used, this `post_get_display_video360_advertiser_link_with_metadata` interceptor runs after the + `post_get_display_video360_advertiser_link` interceptor. The (possibly modified) response returned by + `post_get_display_video360_advertiser_link` will be passed to + `post_get_display_video360_advertiser_link_with_metadata`. + """ + return response, metadata + def pre_get_display_video360_advertiser_link_proposal( self, request: analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest, @@ -2891,12 +4121,38 @@ def post_get_display_video360_advertiser_link_proposal( ) -> resources.DisplayVideo360AdvertiserLinkProposal: """Post-rpc interceptor for get_display_video360_advertiser_link_proposal - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_display_video360_advertiser_link_proposal_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_display_video360_advertiser_link_proposal` interceptor runs + before the `post_get_display_video360_advertiser_link_proposal_with_metadata` interceptor. """ return response + def post_get_display_video360_advertiser_link_proposal_with_metadata( + self, + response: resources.DisplayVideo360AdvertiserLinkProposal, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DisplayVideo360AdvertiserLinkProposal, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_display_video360_advertiser_link_proposal + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_display_video360_advertiser_link_proposal_with_metadata` + interceptor in new development instead of the `post_get_display_video360_advertiser_link_proposal` interceptor. + When both interceptors are used, this `post_get_display_video360_advertiser_link_proposal_with_metadata` interceptor runs after the + `post_get_display_video360_advertiser_link_proposal` interceptor. The (possibly modified) response returned by + `post_get_display_video360_advertiser_link_proposal` will be passed to + `post_get_display_video360_advertiser_link_proposal_with_metadata`. + """ + return response, metadata + def pre_get_enhanced_measurement_settings( self, request: analytics_admin.GetEnhancedMeasurementSettingsRequest, @@ -2917,12 +4173,37 @@ def post_get_enhanced_measurement_settings( ) -> resources.EnhancedMeasurementSettings: """Post-rpc interceptor for get_enhanced_measurement_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_enhanced_measurement_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_enhanced_measurement_settings` interceptor runs + before the `post_get_enhanced_measurement_settings_with_metadata` interceptor. """ return response + def post_get_enhanced_measurement_settings_with_metadata( + self, + response: resources.EnhancedMeasurementSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.EnhancedMeasurementSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_enhanced_measurement_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_enhanced_measurement_settings_with_metadata` + interceptor in new development instead of the `post_get_enhanced_measurement_settings` interceptor. + When both interceptors are used, this `post_get_enhanced_measurement_settings_with_metadata` interceptor runs after the + `post_get_enhanced_measurement_settings` interceptor. The (possibly modified) response returned by + `post_get_enhanced_measurement_settings` will be passed to + `post_get_enhanced_measurement_settings_with_metadata`. + """ + return response, metadata + def pre_get_event_create_rule( self, request: analytics_admin.GetEventCreateRuleRequest, @@ -2943,15 +4224,40 @@ def post_get_event_create_rule( ) -> event_create_and_edit.EventCreateRule: """Post-rpc interceptor for get_event_create_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_event_create_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_event_create_rule` interceptor runs + before the `post_get_event_create_rule_with_metadata` interceptor. """ return response - def pre_get_event_edit_rule( + def post_get_event_create_rule_with_metadata( self, - request: analytics_admin.GetEventEditRuleRequest, + response: event_create_and_edit.EventCreateRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + event_create_and_edit.EventCreateRule, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_event_create_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_event_create_rule_with_metadata` + interceptor in new development instead of the `post_get_event_create_rule` interceptor. + When both interceptors are used, this `post_get_event_create_rule_with_metadata` interceptor runs after the + `post_get_event_create_rule` interceptor. The (possibly modified) response returned by + `post_get_event_create_rule` will be passed to + `post_get_event_create_rule_with_metadata`. + """ + return response, metadata + + def pre_get_event_edit_rule( + self, + request: analytics_admin.GetEventEditRuleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ analytics_admin.GetEventEditRuleRequest, Sequence[Tuple[str, Union[str, bytes]]] @@ -2968,12 +4274,37 @@ def post_get_event_edit_rule( ) -> event_create_and_edit.EventEditRule: """Post-rpc interceptor for get_event_edit_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_event_edit_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_event_edit_rule` interceptor runs + before the `post_get_event_edit_rule_with_metadata` interceptor. """ return response + def post_get_event_edit_rule_with_metadata( + self, + response: event_create_and_edit.EventEditRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + event_create_and_edit.EventEditRule, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_event_edit_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_event_edit_rule_with_metadata` + interceptor in new development instead of the `post_get_event_edit_rule` interceptor. + When both interceptors are used, this `post_get_event_edit_rule_with_metadata` interceptor runs after the + `post_get_event_edit_rule` interceptor. The (possibly modified) response returned by + `post_get_event_edit_rule` will be passed to + `post_get_event_edit_rule_with_metadata`. + """ + return response, metadata + def pre_get_expanded_data_set( self, request: analytics_admin.GetExpandedDataSetRequest, @@ -2994,12 +4325,37 @@ def post_get_expanded_data_set( ) -> expanded_data_set.ExpandedDataSet: """Post-rpc interceptor for get_expanded_data_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_expanded_data_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_expanded_data_set` interceptor runs + before the `post_get_expanded_data_set_with_metadata` interceptor. """ return response + def post_get_expanded_data_set_with_metadata( + self, + response: expanded_data_set.ExpandedDataSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + expanded_data_set.ExpandedDataSet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_expanded_data_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_expanded_data_set_with_metadata` + interceptor in new development instead of the `post_get_expanded_data_set` interceptor. + When both interceptors are used, this `post_get_expanded_data_set_with_metadata` interceptor runs after the + `post_get_expanded_data_set` interceptor. The (possibly modified) response returned by + `post_get_expanded_data_set` will be passed to + `post_get_expanded_data_set_with_metadata`. + """ + return response, metadata + def pre_get_global_site_tag( self, request: analytics_admin.GetGlobalSiteTagRequest, @@ -3019,12 +4375,35 @@ def post_get_global_site_tag( ) -> resources.GlobalSiteTag: """Post-rpc interceptor for get_global_site_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_global_site_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_global_site_tag` interceptor runs + before the `post_get_global_site_tag_with_metadata` interceptor. """ return response + def post_get_global_site_tag_with_metadata( + self, + response: resources.GlobalSiteTag, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.GlobalSiteTag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_global_site_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_global_site_tag_with_metadata` + interceptor in new development instead of the `post_get_global_site_tag` interceptor. + When both interceptors are used, this `post_get_global_site_tag_with_metadata` interceptor runs after the + `post_get_global_site_tag` interceptor. The (possibly modified) response returned by + `post_get_global_site_tag` will be passed to + `post_get_global_site_tag_with_metadata`. + """ + return response, metadata + def pre_get_google_signals_settings( self, request: analytics_admin.GetGoogleSignalsSettingsRequest, @@ -3045,12 +4424,37 @@ def post_get_google_signals_settings( ) -> resources.GoogleSignalsSettings: """Post-rpc interceptor for get_google_signals_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_google_signals_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_google_signals_settings` interceptor runs + before the `post_get_google_signals_settings_with_metadata` interceptor. """ return response + def post_get_google_signals_settings_with_metadata( + self, + response: resources.GoogleSignalsSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.GoogleSignalsSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_google_signals_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_google_signals_settings_with_metadata` + interceptor in new development instead of the `post_get_google_signals_settings` interceptor. + When both interceptors are used, this `post_get_google_signals_settings_with_metadata` interceptor runs after the + `post_get_google_signals_settings` interceptor. The (possibly modified) response returned by + `post_get_google_signals_settings` will be passed to + `post_get_google_signals_settings_with_metadata`. + """ + return response, metadata + def pre_get_key_event( self, request: analytics_admin.GetKeyEventRequest, @@ -3068,12 +4472,35 @@ def pre_get_key_event( def post_get_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: """Post-rpc interceptor for get_key_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_key_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_key_event` interceptor runs + before the `post_get_key_event_with_metadata` interceptor. """ return response + def post_get_key_event_with_metadata( + self, + response: resources.KeyEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_key_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_key_event_with_metadata` + interceptor in new development instead of the `post_get_key_event` interceptor. + When both interceptors are used, this `post_get_key_event_with_metadata` interceptor runs after the + `post_get_key_event` interceptor. The (possibly modified) response returned by + `post_get_key_event` will be passed to + `post_get_key_event_with_metadata`. + """ + return response, metadata + def pre_get_measurement_protocol_secret( self, request: analytics_admin.GetMeasurementProtocolSecretRequest, @@ -3094,12 +4521,37 @@ def post_get_measurement_protocol_secret( ) -> resources.MeasurementProtocolSecret: """Post-rpc interceptor for get_measurement_protocol_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_measurement_protocol_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_measurement_protocol_secret` interceptor runs + before the `post_get_measurement_protocol_secret_with_metadata` interceptor. """ return response + def post_get_measurement_protocol_secret_with_metadata( + self, + response: resources.MeasurementProtocolSecret, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.MeasurementProtocolSecret, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_measurement_protocol_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_measurement_protocol_secret_with_metadata` + interceptor in new development instead of the `post_get_measurement_protocol_secret` interceptor. + When both interceptors are used, this `post_get_measurement_protocol_secret_with_metadata` interceptor runs after the + `post_get_measurement_protocol_secret` interceptor. The (possibly modified) response returned by + `post_get_measurement_protocol_secret` will be passed to + `post_get_measurement_protocol_secret_with_metadata`. + """ + return response, metadata + def pre_get_property( self, request: analytics_admin.GetPropertyRequest, @@ -3117,12 +4569,35 @@ def pre_get_property( def post_get_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for get_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_property` interceptor runs + before the `post_get_property_with_metadata` interceptor. """ return response + def post_get_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_property_with_metadata` + interceptor in new development instead of the `post_get_property` interceptor. + When both interceptors are used, this `post_get_property_with_metadata` interceptor runs after the + `post_get_property` interceptor. The (possibly modified) response returned by + `post_get_property` will be passed to + `post_get_property_with_metadata`. + """ + return response, metadata + def pre_get_rollup_property_source_link( self, request: analytics_admin.GetRollupPropertySourceLinkRequest, @@ -3143,12 +4618,37 @@ def post_get_rollup_property_source_link( ) -> resources.RollupPropertySourceLink: """Post-rpc interceptor for get_rollup_property_source_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rollup_property_source_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_rollup_property_source_link` interceptor runs + before the `post_get_rollup_property_source_link_with_metadata` interceptor. """ return response + def post_get_rollup_property_source_link_with_metadata( + self, + response: resources.RollupPropertySourceLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.RollupPropertySourceLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_rollup_property_source_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_rollup_property_source_link_with_metadata` + interceptor in new development instead of the `post_get_rollup_property_source_link` interceptor. + When both interceptors are used, this `post_get_rollup_property_source_link_with_metadata` interceptor runs after the + `post_get_rollup_property_source_link` interceptor. The (possibly modified) response returned by + `post_get_rollup_property_source_link` will be passed to + `post_get_rollup_property_source_link_with_metadata`. + """ + return response, metadata + def pre_get_search_ads360_link( self, request: analytics_admin.GetSearchAds360LinkRequest, @@ -3169,12 +4669,35 @@ def post_get_search_ads360_link( ) -> resources.SearchAds360Link: """Post-rpc interceptor for get_search_ads360_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_search_ads360_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_search_ads360_link` interceptor runs + before the `post_get_search_ads360_link_with_metadata` interceptor. """ return response + def post_get_search_ads360_link_with_metadata( + self, + response: resources.SearchAds360Link, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.SearchAds360Link, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_search_ads360_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_search_ads360_link_with_metadata` + interceptor in new development instead of the `post_get_search_ads360_link` interceptor. + When both interceptors are used, this `post_get_search_ads360_link_with_metadata` interceptor runs after the + `post_get_search_ads360_link` interceptor. The (possibly modified) response returned by + `post_get_search_ads360_link` will be passed to + `post_get_search_ads360_link_with_metadata`. + """ + return response, metadata + def pre_get_sk_ad_network_conversion_value_schema( self, request: analytics_admin.GetSKAdNetworkConversionValueSchemaRequest, @@ -3195,12 +4718,38 @@ def post_get_sk_ad_network_conversion_value_schema( ) -> resources.SKAdNetworkConversionValueSchema: """Post-rpc interceptor for get_sk_ad_network_conversion_value_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sk_ad_network_conversion_value_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_sk_ad_network_conversion_value_schema` interceptor runs + before the `post_get_sk_ad_network_conversion_value_schema_with_metadata` interceptor. """ return response + def post_get_sk_ad_network_conversion_value_schema_with_metadata( + self, + response: resources.SKAdNetworkConversionValueSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.SKAdNetworkConversionValueSchema, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_sk_ad_network_conversion_value_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_sk_ad_network_conversion_value_schema_with_metadata` + interceptor in new development instead of the `post_get_sk_ad_network_conversion_value_schema` interceptor. + When both interceptors are used, this `post_get_sk_ad_network_conversion_value_schema_with_metadata` interceptor runs after the + `post_get_sk_ad_network_conversion_value_schema` interceptor. The (possibly modified) response returned by + `post_get_sk_ad_network_conversion_value_schema` will be passed to + `post_get_sk_ad_network_conversion_value_schema_with_metadata`. + """ + return response, metadata + def pre_get_subproperty_event_filter( self, request: analytics_admin.GetSubpropertyEventFilterRequest, @@ -3221,12 +4770,38 @@ def post_get_subproperty_event_filter( ) -> subproperty_event_filter.SubpropertyEventFilter: """Post-rpc interceptor for get_subproperty_event_filter - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_subproperty_event_filter_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_subproperty_event_filter` interceptor runs + before the `post_get_subproperty_event_filter_with_metadata` interceptor. """ return response + def post_get_subproperty_event_filter_with_metadata( + self, + response: subproperty_event_filter.SubpropertyEventFilter, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + subproperty_event_filter.SubpropertyEventFilter, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_subproperty_event_filter + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_subproperty_event_filter_with_metadata` + interceptor in new development instead of the `post_get_subproperty_event_filter` interceptor. + When both interceptors are used, this `post_get_subproperty_event_filter_with_metadata` interceptor runs after the + `post_get_subproperty_event_filter` interceptor. The (possibly modified) response returned by + `post_get_subproperty_event_filter` will be passed to + `post_get_subproperty_event_filter_with_metadata`. + """ + return response, metadata + def pre_list_access_bindings( self, request: analytics_admin.ListAccessBindingsRequest, @@ -3247,12 +4822,38 @@ def post_list_access_bindings( ) -> analytics_admin.ListAccessBindingsResponse: """Post-rpc interceptor for list_access_bindings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_access_bindings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_access_bindings` interceptor runs + before the `post_list_access_bindings_with_metadata` interceptor. """ return response + def post_list_access_bindings_with_metadata( + self, + response: analytics_admin.ListAccessBindingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAccessBindingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_access_bindings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_access_bindings_with_metadata` + interceptor in new development instead of the `post_list_access_bindings` interceptor. + When both interceptors are used, this `post_list_access_bindings_with_metadata` interceptor runs after the + `post_list_access_bindings` interceptor. The (possibly modified) response returned by + `post_list_access_bindings` will be passed to + `post_list_access_bindings_with_metadata`. + """ + return response, metadata + def pre_list_accounts( self, request: analytics_admin.ListAccountsRequest, @@ -3272,12 +4873,37 @@ def post_list_accounts( ) -> analytics_admin.ListAccountsResponse: """Post-rpc interceptor for list_accounts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_accounts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_accounts` interceptor runs + before the `post_list_accounts_with_metadata` interceptor. """ return response + def post_list_accounts_with_metadata( + self, + response: analytics_admin.ListAccountsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAccountsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_accounts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_accounts_with_metadata` + interceptor in new development instead of the `post_list_accounts` interceptor. + When both interceptors are used, this `post_list_accounts_with_metadata` interceptor runs after the + `post_list_accounts` interceptor. The (possibly modified) response returned by + `post_list_accounts` will be passed to + `post_list_accounts_with_metadata`. + """ + return response, metadata + def pre_list_account_summaries( self, request: analytics_admin.ListAccountSummariesRequest, @@ -3298,12 +4924,38 @@ def post_list_account_summaries( ) -> analytics_admin.ListAccountSummariesResponse: """Post-rpc interceptor for list_account_summaries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_account_summaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_account_summaries` interceptor runs + before the `post_list_account_summaries_with_metadata` interceptor. """ return response + def post_list_account_summaries_with_metadata( + self, + response: analytics_admin.ListAccountSummariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAccountSummariesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_account_summaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_account_summaries_with_metadata` + interceptor in new development instead of the `post_list_account_summaries` interceptor. + When both interceptors are used, this `post_list_account_summaries_with_metadata` interceptor runs after the + `post_list_account_summaries` interceptor. The (possibly modified) response returned by + `post_list_account_summaries` will be passed to + `post_list_account_summaries_with_metadata`. + """ + return response, metadata + def pre_list_ad_sense_links( self, request: analytics_admin.ListAdSenseLinksRequest, @@ -3323,12 +4975,38 @@ def post_list_ad_sense_links( ) -> analytics_admin.ListAdSenseLinksResponse: """Post-rpc interceptor for list_ad_sense_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_ad_sense_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_ad_sense_links` interceptor runs + before the `post_list_ad_sense_links_with_metadata` interceptor. """ return response + def post_list_ad_sense_links_with_metadata( + self, + response: analytics_admin.ListAdSenseLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAdSenseLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_ad_sense_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_ad_sense_links_with_metadata` + interceptor in new development instead of the `post_list_ad_sense_links` interceptor. + When both interceptors are used, this `post_list_ad_sense_links_with_metadata` interceptor runs after the + `post_list_ad_sense_links` interceptor. The (possibly modified) response returned by + `post_list_ad_sense_links` will be passed to + `post_list_ad_sense_links_with_metadata`. + """ + return response, metadata + def pre_list_audiences( self, request: analytics_admin.ListAudiencesRequest, @@ -3348,12 +5026,37 @@ def post_list_audiences( ) -> analytics_admin.ListAudiencesResponse: """Post-rpc interceptor for list_audiences - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_audiences_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_audiences` interceptor runs + before the `post_list_audiences_with_metadata` interceptor. """ return response + def post_list_audiences_with_metadata( + self, + response: analytics_admin.ListAudiencesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAudiencesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_audiences + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_audiences_with_metadata` + interceptor in new development instead of the `post_list_audiences` interceptor. + When both interceptors are used, this `post_list_audiences_with_metadata` interceptor runs after the + `post_list_audiences` interceptor. The (possibly modified) response returned by + `post_list_audiences` will be passed to + `post_list_audiences_with_metadata`. + """ + return response, metadata + def pre_list_big_query_links( self, request: analytics_admin.ListBigQueryLinksRequest, @@ -3374,12 +5077,38 @@ def post_list_big_query_links( ) -> analytics_admin.ListBigQueryLinksResponse: """Post-rpc interceptor for list_big_query_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_big_query_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_big_query_links` interceptor runs + before the `post_list_big_query_links_with_metadata` interceptor. """ return response + def post_list_big_query_links_with_metadata( + self, + response: analytics_admin.ListBigQueryLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListBigQueryLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_big_query_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_big_query_links_with_metadata` + interceptor in new development instead of the `post_list_big_query_links` interceptor. + When both interceptors are used, this `post_list_big_query_links_with_metadata` interceptor runs after the + `post_list_big_query_links` interceptor. The (possibly modified) response returned by + `post_list_big_query_links` will be passed to + `post_list_big_query_links_with_metadata`. + """ + return response, metadata + def pre_list_calculated_metrics( self, request: analytics_admin.ListCalculatedMetricsRequest, @@ -3400,12 +5129,38 @@ def post_list_calculated_metrics( ) -> analytics_admin.ListCalculatedMetricsResponse: """Post-rpc interceptor for list_calculated_metrics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_calculated_metrics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_calculated_metrics` interceptor runs + before the `post_list_calculated_metrics_with_metadata` interceptor. """ return response + def post_list_calculated_metrics_with_metadata( + self, + response: analytics_admin.ListCalculatedMetricsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListCalculatedMetricsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_calculated_metrics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_calculated_metrics_with_metadata` + interceptor in new development instead of the `post_list_calculated_metrics` interceptor. + When both interceptors are used, this `post_list_calculated_metrics_with_metadata` interceptor runs after the + `post_list_calculated_metrics` interceptor. The (possibly modified) response returned by + `post_list_calculated_metrics` will be passed to + `post_list_calculated_metrics_with_metadata`. + """ + return response, metadata + def pre_list_channel_groups( self, request: analytics_admin.ListChannelGroupsRequest, @@ -3426,12 +5181,38 @@ def post_list_channel_groups( ) -> analytics_admin.ListChannelGroupsResponse: """Post-rpc interceptor for list_channel_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channel_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_channel_groups` interceptor runs + before the `post_list_channel_groups_with_metadata` interceptor. """ return response + def post_list_channel_groups_with_metadata( + self, + response: analytics_admin.ListChannelGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListChannelGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_channel_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_channel_groups_with_metadata` + interceptor in new development instead of the `post_list_channel_groups` interceptor. + When both interceptors are used, this `post_list_channel_groups_with_metadata` interceptor runs after the + `post_list_channel_groups` interceptor. The (possibly modified) response returned by + `post_list_channel_groups` will be passed to + `post_list_channel_groups_with_metadata`. + """ + return response, metadata + def pre_list_connected_site_tags( self, request: analytics_admin.ListConnectedSiteTagsRequest, @@ -3452,12 +5233,38 @@ def post_list_connected_site_tags( ) -> analytics_admin.ListConnectedSiteTagsResponse: """Post-rpc interceptor for list_connected_site_tags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connected_site_tags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_connected_site_tags` interceptor runs + before the `post_list_connected_site_tags_with_metadata` interceptor. """ return response + def post_list_connected_site_tags_with_metadata( + self, + response: analytics_admin.ListConnectedSiteTagsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListConnectedSiteTagsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_connected_site_tags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_connected_site_tags_with_metadata` + interceptor in new development instead of the `post_list_connected_site_tags` interceptor. + When both interceptors are used, this `post_list_connected_site_tags_with_metadata` interceptor runs after the + `post_list_connected_site_tags` interceptor. The (possibly modified) response returned by + `post_list_connected_site_tags` will be passed to + `post_list_connected_site_tags_with_metadata`. + """ + return response, metadata + def pre_list_conversion_events( self, request: analytics_admin.ListConversionEventsRequest, @@ -3478,12 +5285,38 @@ def post_list_conversion_events( ) -> analytics_admin.ListConversionEventsResponse: """Post-rpc interceptor for list_conversion_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversion_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_conversion_events` interceptor runs + before the `post_list_conversion_events_with_metadata` interceptor. """ return response + def post_list_conversion_events_with_metadata( + self, + response: analytics_admin.ListConversionEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListConversionEventsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversion_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_conversion_events_with_metadata` + interceptor in new development instead of the `post_list_conversion_events` interceptor. + When both interceptors are used, this `post_list_conversion_events_with_metadata` interceptor runs after the + `post_list_conversion_events` interceptor. The (possibly modified) response returned by + `post_list_conversion_events` will be passed to + `post_list_conversion_events_with_metadata`. + """ + return response, metadata + def pre_list_custom_dimensions( self, request: analytics_admin.ListCustomDimensionsRequest, @@ -3504,12 +5337,38 @@ def post_list_custom_dimensions( ) -> analytics_admin.ListCustomDimensionsResponse: """Post-rpc interceptor for list_custom_dimensions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_dimensions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_dimensions` interceptor runs + before the `post_list_custom_dimensions_with_metadata` interceptor. """ return response + def post_list_custom_dimensions_with_metadata( + self, + response: analytics_admin.ListCustomDimensionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListCustomDimensionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_dimensions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_custom_dimensions_with_metadata` + interceptor in new development instead of the `post_list_custom_dimensions` interceptor. + When both interceptors are used, this `post_list_custom_dimensions_with_metadata` interceptor runs after the + `post_list_custom_dimensions` interceptor. The (possibly modified) response returned by + `post_list_custom_dimensions` will be passed to + `post_list_custom_dimensions_with_metadata`. + """ + return response, metadata + def pre_list_custom_metrics( self, request: analytics_admin.ListCustomMetricsRequest, @@ -3530,12 +5389,38 @@ def post_list_custom_metrics( ) -> analytics_admin.ListCustomMetricsResponse: """Post-rpc interceptor for list_custom_metrics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_metrics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_metrics` interceptor runs + before the `post_list_custom_metrics_with_metadata` interceptor. """ return response + def post_list_custom_metrics_with_metadata( + self, + response: analytics_admin.ListCustomMetricsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListCustomMetricsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_metrics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_custom_metrics_with_metadata` + interceptor in new development instead of the `post_list_custom_metrics` interceptor. + When both interceptors are used, this `post_list_custom_metrics_with_metadata` interceptor runs after the + `post_list_custom_metrics` interceptor. The (possibly modified) response returned by + `post_list_custom_metrics` will be passed to + `post_list_custom_metrics_with_metadata`. + """ + return response, metadata + def pre_list_data_streams( self, request: analytics_admin.ListDataStreamsRequest, @@ -3555,12 +5440,37 @@ def post_list_data_streams( ) -> analytics_admin.ListDataStreamsResponse: """Post-rpc interceptor for list_data_streams - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_streams_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_streams` interceptor runs + before the `post_list_data_streams_with_metadata` interceptor. """ return response + def post_list_data_streams_with_metadata( + self, + response: analytics_admin.ListDataStreamsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListDataStreamsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_streams + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_data_streams_with_metadata` + interceptor in new development instead of the `post_list_data_streams` interceptor. + When both interceptors are used, this `post_list_data_streams_with_metadata` interceptor runs after the + `post_list_data_streams` interceptor. The (possibly modified) response returned by + `post_list_data_streams` will be passed to + `post_list_data_streams_with_metadata`. + """ + return response, metadata + def pre_list_display_video360_advertiser_link_proposals( self, request: analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest, @@ -3582,12 +5492,38 @@ def post_list_display_video360_advertiser_link_proposals( ) -> analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse: """Post-rpc interceptor for list_display_video360_advertiser_link_proposals - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_display_video360_advertiser_link_proposals_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_display_video360_advertiser_link_proposals` interceptor runs + before the `post_list_display_video360_advertiser_link_proposals_with_metadata` interceptor. """ return response + def post_list_display_video360_advertiser_link_proposals_with_metadata( + self, + response: analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_display_video360_advertiser_link_proposals + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_display_video360_advertiser_link_proposals_with_metadata` + interceptor in new development instead of the `post_list_display_video360_advertiser_link_proposals` interceptor. + When both interceptors are used, this `post_list_display_video360_advertiser_link_proposals_with_metadata` interceptor runs after the + `post_list_display_video360_advertiser_link_proposals` interceptor. The (possibly modified) response returned by + `post_list_display_video360_advertiser_link_proposals` will be passed to + `post_list_display_video360_advertiser_link_proposals_with_metadata`. + """ + return response, metadata + def pre_list_display_video360_advertiser_links( self, request: analytics_admin.ListDisplayVideo360AdvertiserLinksRequest, @@ -3608,12 +5544,38 @@ def post_list_display_video360_advertiser_links( ) -> analytics_admin.ListDisplayVideo360AdvertiserLinksResponse: """Post-rpc interceptor for list_display_video360_advertiser_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_display_video360_advertiser_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_display_video360_advertiser_links` interceptor runs + before the `post_list_display_video360_advertiser_links_with_metadata` interceptor. """ return response + def post_list_display_video360_advertiser_links_with_metadata( + self, + response: analytics_admin.ListDisplayVideo360AdvertiserLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_display_video360_advertiser_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_display_video360_advertiser_links_with_metadata` + interceptor in new development instead of the `post_list_display_video360_advertiser_links` interceptor. + When both interceptors are used, this `post_list_display_video360_advertiser_links_with_metadata` interceptor runs after the + `post_list_display_video360_advertiser_links` interceptor. The (possibly modified) response returned by + `post_list_display_video360_advertiser_links` will be passed to + `post_list_display_video360_advertiser_links_with_metadata`. + """ + return response, metadata + def pre_list_event_create_rules( self, request: analytics_admin.ListEventCreateRulesRequest, @@ -3634,12 +5596,38 @@ def post_list_event_create_rules( ) -> analytics_admin.ListEventCreateRulesResponse: """Post-rpc interceptor for list_event_create_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_event_create_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_event_create_rules` interceptor runs + before the `post_list_event_create_rules_with_metadata` interceptor. """ return response + def post_list_event_create_rules_with_metadata( + self, + response: analytics_admin.ListEventCreateRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListEventCreateRulesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_event_create_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_event_create_rules_with_metadata` + interceptor in new development instead of the `post_list_event_create_rules` interceptor. + When both interceptors are used, this `post_list_event_create_rules_with_metadata` interceptor runs after the + `post_list_event_create_rules` interceptor. The (possibly modified) response returned by + `post_list_event_create_rules` will be passed to + `post_list_event_create_rules_with_metadata`. + """ + return response, metadata + def pre_list_event_edit_rules( self, request: analytics_admin.ListEventEditRulesRequest, @@ -3660,12 +5648,38 @@ def post_list_event_edit_rules( ) -> analytics_admin.ListEventEditRulesResponse: """Post-rpc interceptor for list_event_edit_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_event_edit_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_event_edit_rules` interceptor runs + before the `post_list_event_edit_rules_with_metadata` interceptor. """ return response + def post_list_event_edit_rules_with_metadata( + self, + response: analytics_admin.ListEventEditRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListEventEditRulesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_event_edit_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_event_edit_rules_with_metadata` + interceptor in new development instead of the `post_list_event_edit_rules` interceptor. + When both interceptors are used, this `post_list_event_edit_rules_with_metadata` interceptor runs after the + `post_list_event_edit_rules` interceptor. The (possibly modified) response returned by + `post_list_event_edit_rules` will be passed to + `post_list_event_edit_rules_with_metadata`. + """ + return response, metadata + def pre_list_expanded_data_sets( self, request: analytics_admin.ListExpandedDataSetsRequest, @@ -3686,12 +5700,38 @@ def post_list_expanded_data_sets( ) -> analytics_admin.ListExpandedDataSetsResponse: """Post-rpc interceptor for list_expanded_data_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_expanded_data_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_expanded_data_sets` interceptor runs + before the `post_list_expanded_data_sets_with_metadata` interceptor. """ return response + def post_list_expanded_data_sets_with_metadata( + self, + response: analytics_admin.ListExpandedDataSetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListExpandedDataSetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_expanded_data_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_expanded_data_sets_with_metadata` + interceptor in new development instead of the `post_list_expanded_data_sets` interceptor. + When both interceptors are used, this `post_list_expanded_data_sets_with_metadata` interceptor runs after the + `post_list_expanded_data_sets` interceptor. The (possibly modified) response returned by + `post_list_expanded_data_sets` will be passed to + `post_list_expanded_data_sets_with_metadata`. + """ + return response, metadata + def pre_list_firebase_links( self, request: analytics_admin.ListFirebaseLinksRequest, @@ -3712,12 +5752,38 @@ def post_list_firebase_links( ) -> analytics_admin.ListFirebaseLinksResponse: """Post-rpc interceptor for list_firebase_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_firebase_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_firebase_links` interceptor runs + before the `post_list_firebase_links_with_metadata` interceptor. """ return response + def post_list_firebase_links_with_metadata( + self, + response: analytics_admin.ListFirebaseLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListFirebaseLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_firebase_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_firebase_links_with_metadata` + interceptor in new development instead of the `post_list_firebase_links` interceptor. + When both interceptors are used, this `post_list_firebase_links_with_metadata` interceptor runs after the + `post_list_firebase_links` interceptor. The (possibly modified) response returned by + `post_list_firebase_links` will be passed to + `post_list_firebase_links_with_metadata`. + """ + return response, metadata + def pre_list_google_ads_links( self, request: analytics_admin.ListGoogleAdsLinksRequest, @@ -3738,12 +5804,38 @@ def post_list_google_ads_links( ) -> analytics_admin.ListGoogleAdsLinksResponse: """Post-rpc interceptor for list_google_ads_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_google_ads_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_google_ads_links` interceptor runs + before the `post_list_google_ads_links_with_metadata` interceptor. """ return response + def post_list_google_ads_links_with_metadata( + self, + response: analytics_admin.ListGoogleAdsLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListGoogleAdsLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_google_ads_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_google_ads_links_with_metadata` + interceptor in new development instead of the `post_list_google_ads_links` interceptor. + When both interceptors are used, this `post_list_google_ads_links_with_metadata` interceptor runs after the + `post_list_google_ads_links` interceptor. The (possibly modified) response returned by + `post_list_google_ads_links` will be passed to + `post_list_google_ads_links_with_metadata`. + """ + return response, metadata + def pre_list_key_events( self, request: analytics_admin.ListKeyEventsRequest, @@ -3763,12 +5855,37 @@ def post_list_key_events( ) -> analytics_admin.ListKeyEventsResponse: """Post-rpc interceptor for list_key_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_key_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_key_events` interceptor runs + before the `post_list_key_events_with_metadata` interceptor. """ return response + def post_list_key_events_with_metadata( + self, + response: analytics_admin.ListKeyEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListKeyEventsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_key_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_key_events_with_metadata` + interceptor in new development instead of the `post_list_key_events` interceptor. + When both interceptors are used, this `post_list_key_events_with_metadata` interceptor runs after the + `post_list_key_events` interceptor. The (possibly modified) response returned by + `post_list_key_events` will be passed to + `post_list_key_events_with_metadata`. + """ + return response, metadata + def pre_list_measurement_protocol_secrets( self, request: analytics_admin.ListMeasurementProtocolSecretsRequest, @@ -3789,12 +5906,38 @@ def post_list_measurement_protocol_secrets( ) -> analytics_admin.ListMeasurementProtocolSecretsResponse: """Post-rpc interceptor for list_measurement_protocol_secrets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_measurement_protocol_secrets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_measurement_protocol_secrets` interceptor runs + before the `post_list_measurement_protocol_secrets_with_metadata` interceptor. """ return response + def post_list_measurement_protocol_secrets_with_metadata( + self, + response: analytics_admin.ListMeasurementProtocolSecretsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListMeasurementProtocolSecretsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_measurement_protocol_secrets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_measurement_protocol_secrets_with_metadata` + interceptor in new development instead of the `post_list_measurement_protocol_secrets` interceptor. + When both interceptors are used, this `post_list_measurement_protocol_secrets_with_metadata` interceptor runs after the + `post_list_measurement_protocol_secrets` interceptor. The (possibly modified) response returned by + `post_list_measurement_protocol_secrets` will be passed to + `post_list_measurement_protocol_secrets_with_metadata`. + """ + return response, metadata + def pre_list_properties( self, request: analytics_admin.ListPropertiesRequest, @@ -3814,11 +5957,36 @@ def post_list_properties( ) -> analytics_admin.ListPropertiesResponse: """Post-rpc interceptor for list_properties - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_properties_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_properties` interceptor runs + before the `post_list_properties_with_metadata` interceptor. + """ + return response + + def post_list_properties_with_metadata( + self, + response: analytics_admin.ListPropertiesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListPropertiesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_properties + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_properties_with_metadata` + interceptor in new development instead of the `post_list_properties` interceptor. + When both interceptors are used, this `post_list_properties_with_metadata` interceptor runs after the + `post_list_properties` interceptor. The (possibly modified) response returned by + `post_list_properties` will be passed to + `post_list_properties_with_metadata`. """ - return response + return response, metadata def pre_list_rollup_property_source_links( self, @@ -3840,12 +6008,38 @@ def post_list_rollup_property_source_links( ) -> analytics_admin.ListRollupPropertySourceLinksResponse: """Post-rpc interceptor for list_rollup_property_source_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_rollup_property_source_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_rollup_property_source_links` interceptor runs + before the `post_list_rollup_property_source_links_with_metadata` interceptor. """ return response + def post_list_rollup_property_source_links_with_metadata( + self, + response: analytics_admin.ListRollupPropertySourceLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListRollupPropertySourceLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_rollup_property_source_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_rollup_property_source_links_with_metadata` + interceptor in new development instead of the `post_list_rollup_property_source_links` interceptor. + When both interceptors are used, this `post_list_rollup_property_source_links_with_metadata` interceptor runs after the + `post_list_rollup_property_source_links` interceptor. The (possibly modified) response returned by + `post_list_rollup_property_source_links` will be passed to + `post_list_rollup_property_source_links_with_metadata`. + """ + return response, metadata + def pre_list_search_ads360_links( self, request: analytics_admin.ListSearchAds360LinksRequest, @@ -3866,12 +6060,38 @@ def post_list_search_ads360_links( ) -> analytics_admin.ListSearchAds360LinksResponse: """Post-rpc interceptor for list_search_ads360_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_search_ads360_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_search_ads360_links` interceptor runs + before the `post_list_search_ads360_links_with_metadata` interceptor. """ return response + def post_list_search_ads360_links_with_metadata( + self, + response: analytics_admin.ListSearchAds360LinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListSearchAds360LinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_search_ads360_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_search_ads360_links_with_metadata` + interceptor in new development instead of the `post_list_search_ads360_links` interceptor. + When both interceptors are used, this `post_list_search_ads360_links_with_metadata` interceptor runs after the + `post_list_search_ads360_links` interceptor. The (possibly modified) response returned by + `post_list_search_ads360_links` will be passed to + `post_list_search_ads360_links_with_metadata`. + """ + return response, metadata + def pre_list_sk_ad_network_conversion_value_schemas( self, request: analytics_admin.ListSKAdNetworkConversionValueSchemasRequest, @@ -3892,12 +6112,38 @@ def post_list_sk_ad_network_conversion_value_schemas( ) -> analytics_admin.ListSKAdNetworkConversionValueSchemasResponse: """Post-rpc interceptor for list_sk_ad_network_conversion_value_schemas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sk_ad_network_conversion_value_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_sk_ad_network_conversion_value_schemas` interceptor runs + before the `post_list_sk_ad_network_conversion_value_schemas_with_metadata` interceptor. """ return response + def post_list_sk_ad_network_conversion_value_schemas_with_metadata( + self, + response: analytics_admin.ListSKAdNetworkConversionValueSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sk_ad_network_conversion_value_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_sk_ad_network_conversion_value_schemas_with_metadata` + interceptor in new development instead of the `post_list_sk_ad_network_conversion_value_schemas` interceptor. + When both interceptors are used, this `post_list_sk_ad_network_conversion_value_schemas_with_metadata` interceptor runs after the + `post_list_sk_ad_network_conversion_value_schemas` interceptor. The (possibly modified) response returned by + `post_list_sk_ad_network_conversion_value_schemas` will be passed to + `post_list_sk_ad_network_conversion_value_schemas_with_metadata`. + """ + return response, metadata + def pre_list_subproperty_event_filters( self, request: analytics_admin.ListSubpropertyEventFiltersRequest, @@ -3918,12 +6164,38 @@ def post_list_subproperty_event_filters( ) -> analytics_admin.ListSubpropertyEventFiltersResponse: """Post-rpc interceptor for list_subproperty_event_filters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_subproperty_event_filters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_subproperty_event_filters` interceptor runs + before the `post_list_subproperty_event_filters_with_metadata` interceptor. """ return response + def post_list_subproperty_event_filters_with_metadata( + self, + response: analytics_admin.ListSubpropertyEventFiltersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListSubpropertyEventFiltersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_subproperty_event_filters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_subproperty_event_filters_with_metadata` + interceptor in new development instead of the `post_list_subproperty_event_filters` interceptor. + When both interceptors are used, this `post_list_subproperty_event_filters_with_metadata` interceptor runs after the + `post_list_subproperty_event_filters` interceptor. The (possibly modified) response returned by + `post_list_subproperty_event_filters` will be passed to + `post_list_subproperty_event_filters_with_metadata`. + """ + return response, metadata + def pre_provision_account_ticket( self, request: analytics_admin.ProvisionAccountTicketRequest, @@ -3944,12 +6216,38 @@ def post_provision_account_ticket( ) -> analytics_admin.ProvisionAccountTicketResponse: """Post-rpc interceptor for provision_account_ticket - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_provision_account_ticket_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_provision_account_ticket` interceptor runs + before the `post_provision_account_ticket_with_metadata` interceptor. """ return response + def post_provision_account_ticket_with_metadata( + self, + response: analytics_admin.ProvisionAccountTicketResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ProvisionAccountTicketResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for provision_account_ticket + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_provision_account_ticket_with_metadata` + interceptor in new development instead of the `post_provision_account_ticket` interceptor. + When both interceptors are used, this `post_provision_account_ticket_with_metadata` interceptor runs after the + `post_provision_account_ticket` interceptor. The (possibly modified) response returned by + `post_provision_account_ticket` will be passed to + `post_provision_account_ticket_with_metadata`. + """ + return response, metadata + def pre_provision_subproperty( self, request: analytics_admin.ProvisionSubpropertyRequest, @@ -3970,12 +6268,38 @@ def post_provision_subproperty( ) -> analytics_admin.ProvisionSubpropertyResponse: """Post-rpc interceptor for provision_subproperty - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_provision_subproperty_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_provision_subproperty` interceptor runs + before the `post_provision_subproperty_with_metadata` interceptor. """ return response + def post_provision_subproperty_with_metadata( + self, + response: analytics_admin.ProvisionSubpropertyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ProvisionSubpropertyResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for provision_subproperty + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_provision_subproperty_with_metadata` + interceptor in new development instead of the `post_provision_subproperty` interceptor. + When both interceptors are used, this `post_provision_subproperty_with_metadata` interceptor runs after the + `post_provision_subproperty` interceptor. The (possibly modified) response returned by + `post_provision_subproperty` will be passed to + `post_provision_subproperty_with_metadata`. + """ + return response, metadata + def pre_reorder_event_edit_rules( self, request: analytics_admin.ReorderEventEditRulesRequest, @@ -4010,12 +6334,37 @@ def post_run_access_report( ) -> analytics_admin.RunAccessReportResponse: """Post-rpc interceptor for run_access_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_access_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_run_access_report` interceptor runs + before the `post_run_access_report_with_metadata` interceptor. """ return response + def post_run_access_report_with_metadata( + self, + response: analytics_admin.RunAccessReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.RunAccessReportResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_access_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_run_access_report_with_metadata` + interceptor in new development instead of the `post_run_access_report` interceptor. + When both interceptors are used, this `post_run_access_report_with_metadata` interceptor runs after the + `post_run_access_report` interceptor. The (possibly modified) response returned by + `post_run_access_report` will be passed to + `post_run_access_report_with_metadata`. + """ + return response, metadata + def pre_search_change_history_events( self, request: analytics_admin.SearchChangeHistoryEventsRequest, @@ -4036,12 +6385,38 @@ def post_search_change_history_events( ) -> analytics_admin.SearchChangeHistoryEventsResponse: """Post-rpc interceptor for search_change_history_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_change_history_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_search_change_history_events` interceptor runs + before the `post_search_change_history_events_with_metadata` interceptor. """ return response + def post_search_change_history_events_with_metadata( + self, + response: analytics_admin.SearchChangeHistoryEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.SearchChangeHistoryEventsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_change_history_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_search_change_history_events_with_metadata` + interceptor in new development instead of the `post_search_change_history_events` interceptor. + When both interceptors are used, this `post_search_change_history_events_with_metadata` interceptor runs after the + `post_search_change_history_events` interceptor. The (possibly modified) response returned by + `post_search_change_history_events` will be passed to + `post_search_change_history_events_with_metadata`. + """ + return response, metadata + def pre_set_automated_ga4_configuration_opt_out( self, request: analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest, @@ -4062,12 +6437,38 @@ def post_set_automated_ga4_configuration_opt_out( ) -> analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse: """Post-rpc interceptor for set_automated_ga4_configuration_opt_out - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_automated_ga4_configuration_opt_out_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_set_automated_ga4_configuration_opt_out` interceptor runs + before the `post_set_automated_ga4_configuration_opt_out_with_metadata` interceptor. """ return response + def post_set_automated_ga4_configuration_opt_out_with_metadata( + self, + response: analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for set_automated_ga4_configuration_opt_out + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_set_automated_ga4_configuration_opt_out_with_metadata` + interceptor in new development instead of the `post_set_automated_ga4_configuration_opt_out` interceptor. + When both interceptors are used, this `post_set_automated_ga4_configuration_opt_out_with_metadata` interceptor runs after the + `post_set_automated_ga4_configuration_opt_out` interceptor. The (possibly modified) response returned by + `post_set_automated_ga4_configuration_opt_out` will be passed to + `post_set_automated_ga4_configuration_opt_out_with_metadata`. + """ + return response, metadata + def pre_update_access_binding( self, request: analytics_admin.UpdateAccessBindingRequest, @@ -4088,12 +6489,35 @@ def post_update_access_binding( ) -> resources.AccessBinding: """Post-rpc interceptor for update_access_binding - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_access_binding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_access_binding` interceptor runs + before the `post_update_access_binding_with_metadata` interceptor. """ return response + def post_update_access_binding_with_metadata( + self, + response: resources.AccessBinding, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AccessBinding, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_access_binding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_access_binding_with_metadata` + interceptor in new development instead of the `post_update_access_binding` interceptor. + When both interceptors are used, this `post_update_access_binding_with_metadata` interceptor runs after the + `post_update_access_binding` interceptor. The (possibly modified) response returned by + `post_update_access_binding` will be passed to + `post_update_access_binding_with_metadata`. + """ + return response, metadata + def pre_update_account( self, request: analytics_admin.UpdateAccountRequest, @@ -4111,12 +6535,35 @@ def pre_update_account( def post_update_account(self, response: resources.Account) -> resources.Account: """Post-rpc interceptor for update_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_account` interceptor runs + before the `post_update_account_with_metadata` interceptor. """ return response + def post_update_account_with_metadata( + self, + response: resources.Account, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Account, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_account_with_metadata` + interceptor in new development instead of the `post_update_account` interceptor. + When both interceptors are used, this `post_update_account_with_metadata` interceptor runs after the + `post_update_account` interceptor. The (possibly modified) response returned by + `post_update_account` will be passed to + `post_update_account_with_metadata`. + """ + return response, metadata + def pre_update_attribution_settings( self, request: analytics_admin.UpdateAttributionSettingsRequest, @@ -4137,12 +6584,35 @@ def post_update_attribution_settings( ) -> resources.AttributionSettings: """Post-rpc interceptor for update_attribution_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_attribution_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_attribution_settings` interceptor runs + before the `post_update_attribution_settings_with_metadata` interceptor. """ return response + def post_update_attribution_settings_with_metadata( + self, + response: resources.AttributionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AttributionSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_attribution_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_attribution_settings_with_metadata` + interceptor in new development instead of the `post_update_attribution_settings` interceptor. + When both interceptors are used, this `post_update_attribution_settings_with_metadata` interceptor runs after the + `post_update_attribution_settings` interceptor. The (possibly modified) response returned by + `post_update_attribution_settings` will be passed to + `post_update_attribution_settings_with_metadata`. + """ + return response, metadata + def pre_update_audience( self, request: analytics_admin.UpdateAudienceRequest, @@ -4162,12 +6632,35 @@ def post_update_audience( ) -> gaa_audience.Audience: """Post-rpc interceptor for update_audience - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_audience_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_audience` interceptor runs + before the `post_update_audience_with_metadata` interceptor. """ return response + def post_update_audience_with_metadata( + self, + response: gaa_audience.Audience, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gaa_audience.Audience, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_audience + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_audience_with_metadata` + interceptor in new development instead of the `post_update_audience` interceptor. + When both interceptors are used, this `post_update_audience_with_metadata` interceptor runs after the + `post_update_audience` interceptor. The (possibly modified) response returned by + `post_update_audience` will be passed to + `post_update_audience_with_metadata`. + """ + return response, metadata + def pre_update_big_query_link( self, request: analytics_admin.UpdateBigQueryLinkRequest, @@ -4188,12 +6681,35 @@ def post_update_big_query_link( ) -> resources.BigQueryLink: """Post-rpc interceptor for update_big_query_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_big_query_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_big_query_link` interceptor runs + before the `post_update_big_query_link_with_metadata` interceptor. """ return response + def post_update_big_query_link_with_metadata( + self, + response: resources.BigQueryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.BigQueryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_big_query_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_big_query_link_with_metadata` + interceptor in new development instead of the `post_update_big_query_link` interceptor. + When both interceptors are used, this `post_update_big_query_link_with_metadata` interceptor runs after the + `post_update_big_query_link` interceptor. The (possibly modified) response returned by + `post_update_big_query_link` will be passed to + `post_update_big_query_link_with_metadata`. + """ + return response, metadata + def pre_update_calculated_metric( self, request: analytics_admin.UpdateCalculatedMetricRequest, @@ -4214,12 +6730,35 @@ def post_update_calculated_metric( ) -> resources.CalculatedMetric: """Post-rpc interceptor for update_calculated_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_calculated_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_calculated_metric` interceptor runs + before the `post_update_calculated_metric_with_metadata` interceptor. """ return response + def post_update_calculated_metric_with_metadata( + self, + response: resources.CalculatedMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CalculatedMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_calculated_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_calculated_metric_with_metadata` + interceptor in new development instead of the `post_update_calculated_metric` interceptor. + When both interceptors are used, this `post_update_calculated_metric_with_metadata` interceptor runs after the + `post_update_calculated_metric` interceptor. The (possibly modified) response returned by + `post_update_calculated_metric` will be passed to + `post_update_calculated_metric_with_metadata`. + """ + return response, metadata + def pre_update_channel_group( self, request: analytics_admin.UpdateChannelGroupRequest, @@ -4240,12 +6779,35 @@ def post_update_channel_group( ) -> gaa_channel_group.ChannelGroup: """Post-rpc interceptor for update_channel_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_channel_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_channel_group` interceptor runs + before the `post_update_channel_group_with_metadata` interceptor. """ return response + def post_update_channel_group_with_metadata( + self, + response: gaa_channel_group.ChannelGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gaa_channel_group.ChannelGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_channel_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_channel_group_with_metadata` + interceptor in new development instead of the `post_update_channel_group` interceptor. + When both interceptors are used, this `post_update_channel_group_with_metadata` interceptor runs after the + `post_update_channel_group` interceptor. The (possibly modified) response returned by + `post_update_channel_group` will be passed to + `post_update_channel_group_with_metadata`. + """ + return response, metadata + def pre_update_conversion_event( self, request: analytics_admin.UpdateConversionEventRequest, @@ -4266,12 +6828,35 @@ def post_update_conversion_event( ) -> resources.ConversionEvent: """Post-rpc interceptor for update_conversion_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversion_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_conversion_event` interceptor runs + before the `post_update_conversion_event_with_metadata` interceptor. """ return response + def post_update_conversion_event_with_metadata( + self, + response: resources.ConversionEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConversionEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_conversion_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_conversion_event_with_metadata` + interceptor in new development instead of the `post_update_conversion_event` interceptor. + When both interceptors are used, this `post_update_conversion_event_with_metadata` interceptor runs after the + `post_update_conversion_event` interceptor. The (possibly modified) response returned by + `post_update_conversion_event` will be passed to + `post_update_conversion_event_with_metadata`. + """ + return response, metadata + def pre_update_custom_dimension( self, request: analytics_admin.UpdateCustomDimensionRequest, @@ -4292,12 +6877,35 @@ def post_update_custom_dimension( ) -> resources.CustomDimension: """Post-rpc interceptor for update_custom_dimension - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_custom_dimension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_custom_dimension` interceptor runs + before the `post_update_custom_dimension_with_metadata` interceptor. """ return response + def post_update_custom_dimension_with_metadata( + self, + response: resources.CustomDimension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomDimension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_custom_dimension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_custom_dimension_with_metadata` + interceptor in new development instead of the `post_update_custom_dimension` interceptor. + When both interceptors are used, this `post_update_custom_dimension_with_metadata` interceptor runs after the + `post_update_custom_dimension` interceptor. The (possibly modified) response returned by + `post_update_custom_dimension` will be passed to + `post_update_custom_dimension_with_metadata`. + """ + return response, metadata + def pre_update_custom_metric( self, request: analytics_admin.UpdateCustomMetricRequest, @@ -4318,12 +6926,35 @@ def post_update_custom_metric( ) -> resources.CustomMetric: """Post-rpc interceptor for update_custom_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_custom_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_custom_metric` interceptor runs + before the `post_update_custom_metric_with_metadata` interceptor. """ return response + def post_update_custom_metric_with_metadata( + self, + response: resources.CustomMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_custom_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_custom_metric_with_metadata` + interceptor in new development instead of the `post_update_custom_metric` interceptor. + When both interceptors are used, this `post_update_custom_metric_with_metadata` interceptor runs after the + `post_update_custom_metric` interceptor. The (possibly modified) response returned by + `post_update_custom_metric` will be passed to + `post_update_custom_metric_with_metadata`. + """ + return response, metadata + def pre_update_data_redaction_settings( self, request: analytics_admin.UpdateDataRedactionSettingsRequest, @@ -4344,12 +6975,37 @@ def post_update_data_redaction_settings( ) -> resources.DataRedactionSettings: """Post-rpc interceptor for update_data_redaction_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_redaction_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_redaction_settings` interceptor runs + before the `post_update_data_redaction_settings_with_metadata` interceptor. """ return response + def post_update_data_redaction_settings_with_metadata( + self, + response: resources.DataRedactionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DataRedactionSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_data_redaction_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_data_redaction_settings_with_metadata` + interceptor in new development instead of the `post_update_data_redaction_settings` interceptor. + When both interceptors are used, this `post_update_data_redaction_settings_with_metadata` interceptor runs after the + `post_update_data_redaction_settings` interceptor. The (possibly modified) response returned by + `post_update_data_redaction_settings` will be passed to + `post_update_data_redaction_settings_with_metadata`. + """ + return response, metadata + def pre_update_data_retention_settings( self, request: analytics_admin.UpdateDataRetentionSettingsRequest, @@ -4370,12 +7026,37 @@ def post_update_data_retention_settings( ) -> resources.DataRetentionSettings: """Post-rpc interceptor for update_data_retention_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_retention_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_retention_settings` interceptor runs + before the `post_update_data_retention_settings_with_metadata` interceptor. """ return response + def post_update_data_retention_settings_with_metadata( + self, + response: resources.DataRetentionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DataRetentionSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_data_retention_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_data_retention_settings_with_metadata` + interceptor in new development instead of the `post_update_data_retention_settings` interceptor. + When both interceptors are used, this `post_update_data_retention_settings_with_metadata` interceptor runs after the + `post_update_data_retention_settings` interceptor. The (possibly modified) response returned by + `post_update_data_retention_settings` will be passed to + `post_update_data_retention_settings_with_metadata`. + """ + return response, metadata + def pre_update_data_stream( self, request: analytics_admin.UpdateDataStreamRequest, @@ -4395,12 +7076,35 @@ def post_update_data_stream( ) -> resources.DataStream: """Post-rpc interceptor for update_data_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_stream` interceptor runs + before the `post_update_data_stream_with_metadata` interceptor. """ return response + def post_update_data_stream_with_metadata( + self, + response: resources.DataStream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataStream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_data_stream_with_metadata` + interceptor in new development instead of the `post_update_data_stream` interceptor. + When both interceptors are used, this `post_update_data_stream_with_metadata` interceptor runs after the + `post_update_data_stream` interceptor. The (possibly modified) response returned by + `post_update_data_stream` will be passed to + `post_update_data_stream_with_metadata`. + """ + return response, metadata + def pre_update_display_video360_advertiser_link( self, request: analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest, @@ -4421,12 +7125,37 @@ def post_update_display_video360_advertiser_link( ) -> resources.DisplayVideo360AdvertiserLink: """Post-rpc interceptor for update_display_video360_advertiser_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_display_video360_advertiser_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_display_video360_advertiser_link` interceptor runs + before the `post_update_display_video360_advertiser_link_with_metadata` interceptor. """ return response + def post_update_display_video360_advertiser_link_with_metadata( + self, + response: resources.DisplayVideo360AdvertiserLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DisplayVideo360AdvertiserLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_display_video360_advertiser_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_display_video360_advertiser_link_with_metadata` + interceptor in new development instead of the `post_update_display_video360_advertiser_link` interceptor. + When both interceptors are used, this `post_update_display_video360_advertiser_link_with_metadata` interceptor runs after the + `post_update_display_video360_advertiser_link` interceptor. The (possibly modified) response returned by + `post_update_display_video360_advertiser_link` will be passed to + `post_update_display_video360_advertiser_link_with_metadata`. + """ + return response, metadata + def pre_update_enhanced_measurement_settings( self, request: analytics_admin.UpdateEnhancedMeasurementSettingsRequest, @@ -4447,12 +7176,37 @@ def post_update_enhanced_measurement_settings( ) -> resources.EnhancedMeasurementSettings: """Post-rpc interceptor for update_enhanced_measurement_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_enhanced_measurement_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_enhanced_measurement_settings` interceptor runs + before the `post_update_enhanced_measurement_settings_with_metadata` interceptor. """ return response + def post_update_enhanced_measurement_settings_with_metadata( + self, + response: resources.EnhancedMeasurementSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.EnhancedMeasurementSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_enhanced_measurement_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_enhanced_measurement_settings_with_metadata` + interceptor in new development instead of the `post_update_enhanced_measurement_settings` interceptor. + When both interceptors are used, this `post_update_enhanced_measurement_settings_with_metadata` interceptor runs after the + `post_update_enhanced_measurement_settings` interceptor. The (possibly modified) response returned by + `post_update_enhanced_measurement_settings` will be passed to + `post_update_enhanced_measurement_settings_with_metadata`. + """ + return response, metadata + def pre_update_event_create_rule( self, request: analytics_admin.UpdateEventCreateRuleRequest, @@ -4473,12 +7227,37 @@ def post_update_event_create_rule( ) -> event_create_and_edit.EventCreateRule: """Post-rpc interceptor for update_event_create_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_event_create_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_event_create_rule` interceptor runs + before the `post_update_event_create_rule_with_metadata` interceptor. """ return response + def post_update_event_create_rule_with_metadata( + self, + response: event_create_and_edit.EventCreateRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + event_create_and_edit.EventCreateRule, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_event_create_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_event_create_rule_with_metadata` + interceptor in new development instead of the `post_update_event_create_rule` interceptor. + When both interceptors are used, this `post_update_event_create_rule_with_metadata` interceptor runs after the + `post_update_event_create_rule` interceptor. The (possibly modified) response returned by + `post_update_event_create_rule` will be passed to + `post_update_event_create_rule_with_metadata`. + """ + return response, metadata + def pre_update_event_edit_rule( self, request: analytics_admin.UpdateEventEditRuleRequest, @@ -4499,11 +7278,36 @@ def post_update_event_edit_rule( ) -> event_create_and_edit.EventEditRule: """Post-rpc interceptor for update_event_edit_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_event_edit_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_event_edit_rule` interceptor runs + before the `post_update_event_edit_rule_with_metadata` interceptor. + """ + return response + + def post_update_event_edit_rule_with_metadata( + self, + response: event_create_and_edit.EventEditRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + event_create_and_edit.EventEditRule, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_event_edit_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_event_edit_rule_with_metadata` + interceptor in new development instead of the `post_update_event_edit_rule` interceptor. + When both interceptors are used, this `post_update_event_edit_rule_with_metadata` interceptor runs after the + `post_update_event_edit_rule` interceptor. The (possibly modified) response returned by + `post_update_event_edit_rule` will be passed to + `post_update_event_edit_rule_with_metadata`. """ - return response + return response, metadata def pre_update_expanded_data_set( self, @@ -4525,12 +7329,37 @@ def post_update_expanded_data_set( ) -> gaa_expanded_data_set.ExpandedDataSet: """Post-rpc interceptor for update_expanded_data_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_expanded_data_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_expanded_data_set` interceptor runs + before the `post_update_expanded_data_set_with_metadata` interceptor. """ return response + def post_update_expanded_data_set_with_metadata( + self, + response: gaa_expanded_data_set.ExpandedDataSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gaa_expanded_data_set.ExpandedDataSet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_expanded_data_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_expanded_data_set_with_metadata` + interceptor in new development instead of the `post_update_expanded_data_set` interceptor. + When both interceptors are used, this `post_update_expanded_data_set_with_metadata` interceptor runs after the + `post_update_expanded_data_set` interceptor. The (possibly modified) response returned by + `post_update_expanded_data_set` will be passed to + `post_update_expanded_data_set_with_metadata`. + """ + return response, metadata + def pre_update_google_ads_link( self, request: analytics_admin.UpdateGoogleAdsLinkRequest, @@ -4551,12 +7380,35 @@ def post_update_google_ads_link( ) -> resources.GoogleAdsLink: """Post-rpc interceptor for update_google_ads_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_ads_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_google_ads_link` interceptor runs + before the `post_update_google_ads_link_with_metadata` interceptor. """ return response + def post_update_google_ads_link_with_metadata( + self, + response: resources.GoogleAdsLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.GoogleAdsLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_google_ads_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_google_ads_link_with_metadata` + interceptor in new development instead of the `post_update_google_ads_link` interceptor. + When both interceptors are used, this `post_update_google_ads_link_with_metadata` interceptor runs after the + `post_update_google_ads_link` interceptor. The (possibly modified) response returned by + `post_update_google_ads_link` will be passed to + `post_update_google_ads_link_with_metadata`. + """ + return response, metadata + def pre_update_google_signals_settings( self, request: analytics_admin.UpdateGoogleSignalsSettingsRequest, @@ -4577,12 +7429,37 @@ def post_update_google_signals_settings( ) -> resources.GoogleSignalsSettings: """Post-rpc interceptor for update_google_signals_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_signals_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_google_signals_settings` interceptor runs + before the `post_update_google_signals_settings_with_metadata` interceptor. """ return response + def post_update_google_signals_settings_with_metadata( + self, + response: resources.GoogleSignalsSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.GoogleSignalsSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_google_signals_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_google_signals_settings_with_metadata` + interceptor in new development instead of the `post_update_google_signals_settings` interceptor. + When both interceptors are used, this `post_update_google_signals_settings_with_metadata` interceptor runs after the + `post_update_google_signals_settings` interceptor. The (possibly modified) response returned by + `post_update_google_signals_settings` will be passed to + `post_update_google_signals_settings_with_metadata`. + """ + return response, metadata + def pre_update_key_event( self, request: analytics_admin.UpdateKeyEventRequest, @@ -4600,12 +7477,35 @@ def pre_update_key_event( def post_update_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: """Post-rpc interceptor for update_key_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_key_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_key_event` interceptor runs + before the `post_update_key_event_with_metadata` interceptor. """ return response + def post_update_key_event_with_metadata( + self, + response: resources.KeyEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_key_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_key_event_with_metadata` + interceptor in new development instead of the `post_update_key_event` interceptor. + When both interceptors are used, this `post_update_key_event_with_metadata` interceptor runs after the + `post_update_key_event` interceptor. The (possibly modified) response returned by + `post_update_key_event` will be passed to + `post_update_key_event_with_metadata`. + """ + return response, metadata + def pre_update_measurement_protocol_secret( self, request: analytics_admin.UpdateMeasurementProtocolSecretRequest, @@ -4626,12 +7526,37 @@ def post_update_measurement_protocol_secret( ) -> resources.MeasurementProtocolSecret: """Post-rpc interceptor for update_measurement_protocol_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_measurement_protocol_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_measurement_protocol_secret` interceptor runs + before the `post_update_measurement_protocol_secret_with_metadata` interceptor. """ return response + def post_update_measurement_protocol_secret_with_metadata( + self, + response: resources.MeasurementProtocolSecret, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.MeasurementProtocolSecret, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_measurement_protocol_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_measurement_protocol_secret_with_metadata` + interceptor in new development instead of the `post_update_measurement_protocol_secret` interceptor. + When both interceptors are used, this `post_update_measurement_protocol_secret_with_metadata` interceptor runs after the + `post_update_measurement_protocol_secret` interceptor. The (possibly modified) response returned by + `post_update_measurement_protocol_secret` will be passed to + `post_update_measurement_protocol_secret_with_metadata`. + """ + return response, metadata + def pre_update_property( self, request: analytics_admin.UpdatePropertyRequest, @@ -4649,12 +7574,35 @@ def pre_update_property( def post_update_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for update_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_property` interceptor runs + before the `post_update_property_with_metadata` interceptor. """ return response + def post_update_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_property_with_metadata` + interceptor in new development instead of the `post_update_property` interceptor. + When both interceptors are used, this `post_update_property_with_metadata` interceptor runs after the + `post_update_property` interceptor. The (possibly modified) response returned by + `post_update_property` will be passed to + `post_update_property_with_metadata`. + """ + return response, metadata + def pre_update_search_ads360_link( self, request: analytics_admin.UpdateSearchAds360LinkRequest, @@ -4675,12 +7623,35 @@ def post_update_search_ads360_link( ) -> resources.SearchAds360Link: """Post-rpc interceptor for update_search_ads360_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_search_ads360_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_search_ads360_link` interceptor runs + before the `post_update_search_ads360_link_with_metadata` interceptor. """ return response + def post_update_search_ads360_link_with_metadata( + self, + response: resources.SearchAds360Link, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.SearchAds360Link, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_search_ads360_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_search_ads360_link_with_metadata` + interceptor in new development instead of the `post_update_search_ads360_link` interceptor. + When both interceptors are used, this `post_update_search_ads360_link_with_metadata` interceptor runs after the + `post_update_search_ads360_link` interceptor. The (possibly modified) response returned by + `post_update_search_ads360_link` will be passed to + `post_update_search_ads360_link_with_metadata`. + """ + return response, metadata + def pre_update_sk_ad_network_conversion_value_schema( self, request: analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest, @@ -4701,12 +7672,38 @@ def post_update_sk_ad_network_conversion_value_schema( ) -> resources.SKAdNetworkConversionValueSchema: """Post-rpc interceptor for update_sk_ad_network_conversion_value_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_sk_ad_network_conversion_value_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_sk_ad_network_conversion_value_schema` interceptor runs + before the `post_update_sk_ad_network_conversion_value_schema_with_metadata` interceptor. """ return response + def post_update_sk_ad_network_conversion_value_schema_with_metadata( + self, + response: resources.SKAdNetworkConversionValueSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.SKAdNetworkConversionValueSchema, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_sk_ad_network_conversion_value_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_sk_ad_network_conversion_value_schema_with_metadata` + interceptor in new development instead of the `post_update_sk_ad_network_conversion_value_schema` interceptor. + When both interceptors are used, this `post_update_sk_ad_network_conversion_value_schema_with_metadata` interceptor runs after the + `post_update_sk_ad_network_conversion_value_schema` interceptor. The (possibly modified) response returned by + `post_update_sk_ad_network_conversion_value_schema` will be passed to + `post_update_sk_ad_network_conversion_value_schema_with_metadata`. + """ + return response, metadata + def pre_update_subproperty_event_filter( self, request: analytics_admin.UpdateSubpropertyEventFilterRequest, @@ -4727,12 +7724,38 @@ def post_update_subproperty_event_filter( ) -> gaa_subproperty_event_filter.SubpropertyEventFilter: """Post-rpc interceptor for update_subproperty_event_filter - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_subproperty_event_filter_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_subproperty_event_filter` interceptor runs + before the `post_update_subproperty_event_filter_with_metadata` interceptor. """ return response + def post_update_subproperty_event_filter_with_metadata( + self, + response: gaa_subproperty_event_filter.SubpropertyEventFilter, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gaa_subproperty_event_filter.SubpropertyEventFilter, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_subproperty_event_filter + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_subproperty_event_filter_with_metadata` + interceptor in new development instead of the `post_update_subproperty_event_filter` interceptor. + When both interceptors are used, this `post_update_subproperty_event_filter_with_metadata` interceptor runs after the + `post_update_subproperty_event_filter` interceptor. The (possibly modified) response returned by + `post_update_subproperty_event_filter` will be passed to + `post_update_subproperty_event_filter_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AnalyticsAdminServiceRestStub: @@ -4744,7 +7767,7 @@ class AnalyticsAdminServiceRestStub: class AnalyticsAdminServiceRestTransport(_BaseAnalyticsAdminServiceRestTransport): """REST backend synchronous transport for AnalyticsAdminService. - Service Interface for the Analytics Admin API (GA4). + Service Interface for the Google Analytics Admin API. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -4952,6 +7975,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_acknowledge_user_data_collection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_acknowledge_user_data_collection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5124,6 +8154,13 @@ def __call__( resp = self._interceptor.post_approve_display_video360_advertiser_link_proposal( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_approve_display_video360_advertiser_link_proposal_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5631,6 +8668,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_access_bindings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_access_bindings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5898,6 +8939,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_access_bindings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_access_bindings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6053,6 +9098,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_access_bindings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_access_bindings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6138,9 +9187,9 @@ def __call__( Returns: ~.resources.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -6228,6 +9277,13 @@ def __call__( resp ) ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_cancel_display_video360_advertiser_link_proposal_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6386,6 +9442,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_access_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_access_binding_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6463,8 +9523,9 @@ def __call__( Returns: ~.resources.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + A link between a Google Analytics + property and an AdSense for Content ad + client. """ @@ -6540,6 +9601,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_ad_sense_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_ad_sense_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6617,7 +9682,7 @@ def __call__( Returns: ~.gaa_audience.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -6690,6 +9755,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_audience(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_audience_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6767,8 +9836,8 @@ def __call__( Returns: ~.resources.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ @@ -6844,6 +9913,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_big_query_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_big_query_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6994,6 +10067,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_calculated_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_calculated_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7148,6 +10225,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_channel_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7300,6 +10381,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connected_site_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connected_site_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7456,6 +10541,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversion_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversion_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7608,6 +10697,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_custom_dimension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_custom_dimension_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7760,6 +10853,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_custom_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_custom_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7914,6 +11011,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_data_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7994,8 +11095,9 @@ def __call__( Returns: ~.resources.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ @@ -8072,6 +11174,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_display_video360_advertiser_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_display_video360_advertiser_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8155,9 +11264,9 @@ def __call__( Returns: ~.resources.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -8245,6 +11354,13 @@ def __call__( resp ) ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_display_video360_advertiser_link_proposal_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8415,6 +11531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_event_create_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_event_create_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8583,6 +11703,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_event_edit_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_event_edit_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8662,9 +11786,7 @@ def __call__( Returns: ~.gaa_expanded_data_set.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ``ExpandedDataSet``. """ http_options = ( @@ -8739,6 +11861,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_expanded_data_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_expanded_data_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8818,8 +11944,8 @@ def __call__( Returns: ~.resources.FirebaseLink: - A link between a GA4 property and a - Firebase project. + A link between a Google Analytics + property and a Firebase project. """ @@ -8895,6 +12021,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_firebase_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_firebase_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8972,8 +12102,8 @@ def __call__( Returns: ~.resources.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ @@ -9049,6 +12179,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_google_ads_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_google_ads_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9201,6 +12335,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_key_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_key_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9359,6 +12497,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_measurement_protocol_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_measurement_protocol_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9439,7 +12584,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -9511,6 +12656,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9665,6 +12814,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_rollup_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_rollup_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9826,6 +12979,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_rollup_property_source_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_rollup_property_source_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9905,8 +13065,8 @@ def __call__( Returns: ~.resources.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ @@ -9980,6 +13140,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_search_ads360_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_search_ads360_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10141,6 +13305,13 @@ def __call__( resp = self._interceptor.post_create_sk_ad_network_conversion_value_schema( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_sk_ad_network_conversion_value_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10223,8 +13394,9 @@ def __call__( Returns: ~.gaa_subproperty_event_filter.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ @@ -10298,6 +13470,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_subproperty_event_filter(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_subproperty_event_filter_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12404,7 +15583,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -12471,6 +15650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13091,6 +16274,13 @@ def __call__( resp = self._interceptor.post_fetch_automated_ga4_configuration_opt_out( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_automated_ga4_configuration_opt_out_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13240,6 +16430,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_connected_ga4_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_connected_ga4_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13392,6 +16586,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_access_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_access_binding_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13535,6 +16733,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13611,8 +16813,9 @@ def __call__( Returns: ~.resources.AdSenseLink: - A link between a GA4 Property and an - AdSense for Content ad client. + A link between a Google Analytics + property and an AdSense for Content ad + client. """ @@ -13681,6 +16884,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ad_sense_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ad_sense_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13828,6 +17035,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attribution_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attribution_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13903,7 +17114,7 @@ def __call__( Returns: ~.audience.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -13971,6 +17182,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_audience(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_audience_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -14047,8 +17262,8 @@ def __call__( Returns: ~.resources.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ @@ -14119,6 +17334,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_big_query_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_big_query_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -14265,6 +17484,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_calculated_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_calculated_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -14413,6 +17636,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_channel_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -14561,6 +17788,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversion_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversion_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -14707,6 +17938,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_dimension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_dimension_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -14853,6 +18088,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15001,6 +18240,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_redaction_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_redaction_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15148,6 +18391,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_retention_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_retention_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15295,6 +18542,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_sharing_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_sharing_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15439,6 +18690,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15518,8 +18773,9 @@ def __call__( Returns: ~.resources.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ @@ -15591,6 +18847,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_display_video360_advertiser_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_display_video360_advertiser_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15673,9 +18936,9 @@ def __call__( Returns: ~.resources.DisplayVideo360AdvertiserLinkProposal: - A proposal for a link between a GA4 - property and a Display & Video 360 - advertiser. + A proposal for a link between a + Google Analytics property and a Display + & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once @@ -15756,6 +19019,13 @@ def __call__( resp = self._interceptor.post_get_display_video360_advertiser_link_proposal( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_display_video360_advertiser_link_proposal_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -15911,6 +19181,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_enhanced_measurement_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_enhanced_measurement_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16073,6 +19350,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_event_create_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_event_create_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16235,6 +19516,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_event_edit_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_event_edit_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16313,9 +19598,7 @@ def __call__( Returns: ~.expanded_data_set.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ``ExpandedDataSet``. """ http_options = ( @@ -16385,6 +19668,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_expanded_data_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_expanded_data_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16537,6 +19824,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_global_site_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_global_site_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16684,6 +19975,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_google_signals_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_signals_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16827,6 +20122,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_key_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -16976,6 +20275,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_measurement_protocol_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_measurement_protocol_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -17054,7 +20360,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -17121,6 +20427,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -17271,6 +20581,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rollup_property_source_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_rollup_property_source_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -17349,8 +20666,8 @@ def __call__( Returns: ~.resources.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ @@ -17421,6 +20738,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_search_ads360_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_search_ads360_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -17575,6 +20896,13 @@ def __call__( resp = self._interceptor.post_get_sk_ad_network_conversion_value_schema( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_sk_ad_network_conversion_value_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -17654,8 +20982,9 @@ def __call__( Returns: ~.subproperty_event_filter.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ @@ -17724,6 +21053,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_subproperty_event_filter(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_subproperty_event_filter_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -17876,6 +21209,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_access_bindings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_access_bindings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18019,6 +21356,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_accounts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_accounts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18169,6 +21510,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_account_summaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_account_summaries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18319,6 +21664,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_ad_sense_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_ad_sense_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18465,6 +21814,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_audiences(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_audiences_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18615,6 +21968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_big_query_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_big_query_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18765,6 +22122,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_calculated_metrics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_calculated_metrics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -18915,6 +22276,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_channel_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channel_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19071,6 +22436,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connected_site_tags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connected_site_tags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19221,6 +22590,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversion_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversion_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19371,6 +22744,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_dimensions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_dimensions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19521,6 +22898,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_metrics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_metrics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19671,6 +23052,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_streams(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_streams_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19835,6 +23220,13 @@ def __call__( resp ) ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_display_video360_advertiser_link_proposals_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -19991,6 +23383,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_display_video360_advertiser_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_display_video360_advertiser_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -20141,6 +23540,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_event_create_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_event_create_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -20291,6 +23694,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_event_edit_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_event_edit_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -20441,6 +23848,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_expanded_data_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_expanded_data_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -20591,6 +24002,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_firebase_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_firebase_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -20741,6 +24156,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_google_ads_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_google_ads_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -20886,6 +24305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_key_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_key_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21037,6 +24460,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_measurement_protocol_secrets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_measurement_protocol_secrets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21185,6 +24615,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_properties(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_properties_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21336,6 +24770,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_rollup_property_source_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_rollup_property_source_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21488,6 +24929,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_search_ads360_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_search_ads360_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21648,6 +25093,13 @@ def __call__( resp = self._interceptor.post_list_sk_ad_network_conversion_value_schemas( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_sk_ad_network_conversion_value_schemas_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21799,6 +25251,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_subproperty_event_filters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_subproperty_event_filters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -21955,6 +25414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_provision_account_ticket(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_provision_account_ticket_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -22111,6 +25574,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_provision_subproperty(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_provision_subproperty_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -22385,6 +25852,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_access_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_access_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -22540,6 +26011,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_change_history_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_change_history_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -22706,6 +26181,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_automated_ga4_configuration_opt_out(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_automated_ga4_configuration_opt_out_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -22862,6 +26344,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_access_binding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_access_binding_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23012,6 +26498,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23166,6 +26656,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_attribution_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_attribution_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23243,7 +26737,7 @@ def __call__( Returns: ~.gaa_audience.Audience: - A resource message representing a GA4 + A resource message representing an Audience. """ @@ -23316,6 +26810,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_audience(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_audience_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23393,8 +26891,8 @@ def __call__( Returns: ~.resources.BigQueryLink: - A link between a GA4 Property and - BigQuery project. + A link between a Google Analytics + property and BigQuery project. """ @@ -23470,6 +26968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_big_query_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_big_query_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23620,6 +27122,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_calculated_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_calculated_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23774,6 +27280,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_channel_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_channel_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -23928,6 +27438,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversion_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversion_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -24080,6 +27594,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_custom_dimension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_custom_dimension_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -24232,6 +27750,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_custom_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_custom_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -24388,6 +27910,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_redaction_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_data_redaction_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -24543,6 +28072,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_retention_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_data_retention_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -24697,6 +28233,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -24777,8 +28317,9 @@ def __call__( Returns: ~.resources.DisplayVideo360AdvertiserLink: - A link between a GA4 property and a - Display & Video 360 advertiser. + A link between a Google Analytics + property and a Display & Video 360 + advertiser. """ @@ -24855,6 +28396,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_display_video360_advertiser_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_display_video360_advertiser_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25017,6 +28565,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_enhanced_measurement_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_enhanced_measurement_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25185,6 +28740,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_event_create_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_event_create_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25353,6 +28912,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_event_edit_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_event_edit_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25432,9 +28995,7 @@ def __call__( Returns: ~.gaa_expanded_data_set.ExpandedDataSet: - A resource message representing a GA4 - ExpandedDataSet. - + A resource message representing an ``ExpandedDataSet``. """ http_options = ( @@ -25509,6 +29070,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_expanded_data_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_expanded_data_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25588,8 +29153,8 @@ def __call__( Returns: ~.resources.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ @@ -25665,6 +29230,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_google_ads_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_google_ads_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25820,6 +29389,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_google_signals_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_google_signals_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -25972,6 +29548,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_key_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_key_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -26130,6 +29710,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_measurement_protocol_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_measurement_protocol_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -26210,7 +29797,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -26282,6 +29869,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -26359,8 +29950,8 @@ def __call__( Returns: ~.resources.SearchAds360Link: - A link between a GA4 property and a - Search Ads 360 entity. + A link between a Google Analytics + property and a Search Ads 360 entity. """ @@ -26434,6 +30025,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_search_ads360_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_search_ads360_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -26595,6 +30190,13 @@ def __call__( resp = self._interceptor.post_update_sk_ad_network_conversion_value_schema( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_sk_ad_network_conversion_value_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -26677,8 +30279,9 @@ def __call__( Returns: ~.gaa_subproperty_event_filter.SubpropertyEventFilter: - A resource message representing a GA4 - Subproperty event filter. + A resource message representing a + Google Analytics subproperty event + filter. """ @@ -26752,6 +30355,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_subproperty_event_filter(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_subproperty_event_filter_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py index 97fb01fe2add..2c1482e577c7 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py @@ -244,10 +244,10 @@ class RunAccessReportRequest(proto.Message): To request at the property level, entity should be for example 'properties/123' if "123" is your - GA4 property ID. To request at the account - level, entity should be for example - 'accounts/1234' if "1234" is your GA4 Account - ID. + Google Analytics property ID. To request at the + account level, entity should be for example + 'accounts/1234' if "1234" is your Google + Analytics Account ID. dimensions (MutableSequence[google.analytics.admin_v1alpha.types.AccessDimension]): The dimensions requested and displayed in the response. Requests are allowed up to 9 @@ -1169,12 +1169,17 @@ class SearchChangeHistoryEventsRequest(proto.Message): Optional. If set, only return changes made before this time (inclusive). page_size (int): - Optional. The maximum number of - ChangeHistoryEvent items to return. The service - may return fewer than this value, even if there - are additional pages. If unspecified, at most 50 - items will be returned. The maximum value is 200 - (higher values will be coerced to the maximum). + Optional. The maximum number of ChangeHistoryEvent items to + return. If unspecified, at most 50 items will be returned. + The maximum value is 200 (higher values will be coerced to + the maximum). + + Note that the service may return a page with fewer items + than this value specifies (potentially even zero), and that + there still may be additional pages. If you want a + particular number of items, you'll need to continue + requesting additional pages using ``page_token`` until you + get the needed number. page_token (str): Optional. A page token, received from a previous ``SearchChangeHistoryEvents`` call. Provide this to retrieve diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py index fad860d1132e..b4ce9e84e922 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/audience.py @@ -680,7 +680,7 @@ class LogCondition(proto.Enum): class Audience(proto.Message): - r"""A resource message representing a GA4 Audience. + r"""A resource message representing an Audience. Attributes: name (str): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/expanded_data_set.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/expanded_data_set.py index 8330b7d9bda6..1968b9558188 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/expanded_data_set.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/expanded_data_set.py @@ -214,7 +214,7 @@ class ExpandedDataSetFilterExpressionList(proto.Message): class ExpandedDataSet(proto.Message): - r"""A resource message representing a GA4 ExpandedDataSet. + r"""A resource message representing an ``ExpandedDataSet``. Attributes: name (str): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py index dee751dedb76..3532bffd23d3 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py @@ -413,17 +413,17 @@ class LinkProposalState(proto.Enum): class PropertyType(proto.Enum): - r"""Types of Property resources. + r"""Types of ``Property`` resources. Values: PROPERTY_TYPE_UNSPECIFIED (0): Unknown or unspecified property type PROPERTY_TYPE_ORDINARY (1): - Ordinary GA4 property + Ordinary Google Analytics property PROPERTY_TYPE_SUBPROPERTY (2): - GA4 subproperty + Google Analytics subproperty PROPERTY_TYPE_ROLLUP (3): - GA4 rollup property + Google Analytics rollup property """ PROPERTY_TYPE_UNSPECIFIED = 0 PROPERTY_TYPE_ORDINARY = 1 @@ -518,8 +518,7 @@ class Account(proto.Message): class Property(proto.Message): - r"""A resource message representing a Google Analytics GA4 - property. + r"""A resource message representing a Google Analytics property. Attributes: name (str): @@ -831,7 +830,8 @@ class IosAppStreamData(proto.Message): class FirebaseLink(proto.Message): - r"""A link between a GA4 property and a Firebase project. + r"""A link between a Google Analytics property and a Firebase + project. Attributes: name (str): @@ -893,7 +893,8 @@ class GlobalSiteTag(proto.Message): class GoogleAdsLink(proto.Message): - r"""A link between a GA4 property and a Google Ads account. + r"""A link between a Google Analytics property and a Google Ads + account. Attributes: name (str): @@ -1020,7 +1021,7 @@ class DataSharingSettings(proto.Message): class AccountSummary(proto.Message): r"""A virtual resource representing an overview of an account and - all its child GA4 properties. + all its child Google Analytics properties. Attributes: name (str): @@ -1058,7 +1059,8 @@ class AccountSummary(proto.Message): class PropertySummary(proto.Message): - r"""A virtual resource representing metadata for a GA4 property. + r"""A virtual resource representing metadata for a Google + Analytics property. Attributes: property (str): @@ -1312,9 +1314,9 @@ class EventMapping(proto.Message): Attributes: event_name (str): - Required. Name of the GA4 event. It must - always be set. The max allowed display name - length is 40 UTF-16 code units. + Required. Name of the Google Analytics event. + It must always be set. The max allowed display + name length is 40 UTF-16 code units. min_event_count (int): At least one of the following four min/max values must be set. The values set will be ANDed @@ -1770,8 +1772,8 @@ class ChangeHistoryResource(proto.Message): class DisplayVideo360AdvertiserLink(proto.Message): - r"""A link between a GA4 property and a Display & Video 360 - advertiser. + r"""A link between a Google Analytics property and a Display & + Video 360 advertiser. Attributes: name (str): @@ -1794,18 +1796,18 @@ class DisplayVideo360AdvertiserLink(proto.Message): on create/update, it will be defaulted to true. campaign_data_sharing_enabled (google.protobuf.wrappers_pb2.BoolValue): Immutable. Enables the import of campaign - data from Display & Video 360 into the GA4 - property. After link creation, this can only be - updated from the Display & Video 360 product. If - this field is not set on create, it will be - defaulted to true. + data from Display & Video 360 into the Google + Analytics property. After link creation, this + can only be updated from the Display & Video 360 + product. If this field is not set on create, it + will be defaulted to true. cost_data_sharing_enabled (google.protobuf.wrappers_pb2.BoolValue): Immutable. Enables the import of cost data from Display & - Video 360 into the GA4 property. This can only be enabled if - campaign_data_sharing_enabled is enabled. After link - creation, this can only be updated from the Display & Video - 360 product. If this field is not set on create, it will be - defaulted to true. + Video 360 into the Google Analytics property. This can only + be enabled if ``campaign_data_sharing_enabled`` is true. + After link creation, this can only be updated from the + Display & Video 360 product. If this field is not set on + create, it will be defaulted to true. """ name: str = proto.Field( @@ -1838,8 +1840,8 @@ class DisplayVideo360AdvertiserLink(proto.Message): class DisplayVideo360AdvertiserLinkProposal(proto.Message): - r"""A proposal for a link between a GA4 property and a Display & - Video 360 advertiser. + r"""A proposal for a link between a Google Analytics property and + a Display & Video 360 advertiser. A proposal is converted to a DisplayVideo360AdvertiserLink once approved. Google Analytics admins approve inbound proposals @@ -1929,7 +1931,8 @@ class DisplayVideo360AdvertiserLinkProposal(proto.Message): class SearchAds360Link(proto.Message): - r"""A link between a GA4 property and a Search Ads 360 entity. + r"""A link between a Google Analytics property and a Search Ads + 360 entity. Attributes: name (str): @@ -1945,17 +1948,17 @@ class SearchAds360Link(proto.Message): that has been linked. campaign_data_sharing_enabled (google.protobuf.wrappers_pb2.BoolValue): Immutable. Enables the import of campaign - data from Search Ads 360 into the GA4 property. - After link creation, this can only be updated - from the Search Ads 360 product. - If this field is not set on create, it will be - defaulted to true. + data from Search Ads 360 into the Google + Analytics property. After link creation, this + can only be updated from the Search Ads 360 + product. If this field is not set on create, it + will be defaulted to true. cost_data_sharing_enabled (google.protobuf.wrappers_pb2.BoolValue): Immutable. Enables the import of cost data from Search Ads - 360 to the GA4 property. This can only be enabled if - campaign_data_sharing_enabled is enabled. After link - creation, this can only be updated from the Search Ads 360 - product. If this field is not set on create, it will be + 360 to the Google Analytics property. This can only be + enabled if campaign_data_sharing_enabled is enabled. After + link creation, this can only be updated from the Search Ads + 360 product. If this field is not set on create, it will be defaulted to true. advertiser_display_name (str): Output only. The display name of the Search @@ -2687,8 +2690,11 @@ class DataRetentionSettings(proto.Message): DataRetentionSetting resource. Format: properties/{property}/dataRetentionSettings event_data_retention (google.analytics.admin_v1alpha.types.DataRetentionSettings.RetentionDuration): - The length of time that event-level data is - retained. + Required. The length of time that event-level + data is retained. + user_data_retention (google.analytics.admin_v1alpha.types.DataRetentionSettings.RetentionDuration): + Required. The length of time that user-level + data is retained. reset_user_data_on_new_activity (bool): If true, reset the retention period for the user identifier with every event from that user. @@ -2709,12 +2715,15 @@ class RetentionDuration(proto.Enum): TWENTY_SIX_MONTHS (4): The data retention time duration is 26 months. Available to 360 properties only. + Available for event data only. THIRTY_EIGHT_MONTHS (5): The data retention time duration is 38 months. Available to 360 properties only. + Available for event data only. FIFTY_MONTHS (6): The data retention time duration is 50 months. Available to 360 properties only. + Available for event data only. """ RETENTION_DURATION_UNSPECIFIED = 0 TWO_MONTHS = 1 @@ -2732,6 +2741,11 @@ class RetentionDuration(proto.Enum): number=2, enum=RetentionDuration, ) + user_data_retention: RetentionDuration = proto.Field( + proto.ENUM, + number=4, + enum=RetentionDuration, + ) reset_user_data_on_new_activity: bool = proto.Field( proto.BOOL, number=3, @@ -2940,7 +2954,8 @@ class AccessBinding(proto.Message): class BigQueryLink(proto.Message): - r"""A link between a GA4 Property and BigQuery project. + r"""A link between a Google Analytics property and BigQuery + project. Attributes: name (str): @@ -3207,8 +3222,8 @@ class DataRedactionSettings(proto.Message): class AdSenseLink(proto.Message): - r"""A link between a GA4 Property and an AdSense for Content ad - client. + r"""A link between a Google Analytics property and an AdSense for + Content ad client. Attributes: name (str): @@ -3218,8 +3233,8 @@ class AdSenseLink(proto.Message): Example: properties/1234/adSenseLinks/6789 ad_client_code (str): Immutable. The AdSense ad client code that - the GA4 property is linked to. Example format: - "ca-pub-1234567890". + the Google Analytics property is linked to. + Example format: "ca-pub-1234567890". """ name: str = proto.Field( diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/subproperty_event_filter.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/subproperty_event_filter.py index 11cbc4abd244..10b12dafdfe2 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/subproperty_event_filter.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/subproperty_event_filter.py @@ -248,8 +248,8 @@ class FilterClauseType(proto.Enum): class SubpropertyEventFilter(proto.Message): - r"""A resource message representing a GA4 Subproperty event - filter. + r"""A resource message representing a Google Analytics + subproperty event filter. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py index dd30746d3360..eb79334047e7 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.23.3" # {x-release-please-version} +__version__ = "0.23.4" # {x-release-please-version} diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py index 107b8f72bce8..cdd574dd08f3 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/async_client.py @@ -70,7 +70,7 @@ class AnalyticsAdminServiceAsyncClient: - """Service Interface for the Analytics Admin API (GA4).""" + """Service Interface for the Google Analytics Admin API.""" _client: AnalyticsAdminServiceClient @@ -483,7 +483,7 @@ async def list_accounts( ) -> pagers.ListAccountsAsyncPager: r"""Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -993,7 +993,7 @@ async def get_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Lookup for a single "GA4" Property. + r"""Lookup for a single GA Property. .. code-block:: python @@ -1042,7 +1042,7 @@ async def sample_get_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1101,7 +1101,6 @@ async def list_properties( ) -> pagers.ListPropertiesAsyncPager: r"""Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -1201,8 +1200,8 @@ async def create_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Creates an "GA4" property with the specified location - and attributes. + r"""Creates a Google Analytics property with the + specified location and attributes. .. code-block:: python @@ -1257,7 +1256,7 @@ async def sample_create_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1321,8 +1320,7 @@ async def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. .. code-block:: python @@ -1373,7 +1371,7 @@ async def sample_delete_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1497,7 +1495,7 @@ async def sample_update_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1621,8 +1619,8 @@ async def sample_create_firebase_link(): Returns: google.analytics.admin_v1beta.types.FirebaseLink: - A link between a GA4 property and a - Firebase project. + A link between a Google Analytics + property and a Firebase project. """ # Create or coerce a protobuf request object. @@ -1966,8 +1964,8 @@ async def sample_create_google_ads_link(): Returns: google.analytics.admin_v1beta.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2086,8 +2084,8 @@ async def sample_update_google_ads_link(): Returns: google.analytics.admin_v1beta.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2488,7 +2486,7 @@ async def get_measurement_protocol_secret( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.MeasurementProtocolSecret: - r"""Lookup for a single "GA4" MeasurementProtocolSecret. + r"""Lookup for a single MeasurementProtocolSecret. .. code-block:: python @@ -3194,6 +3192,9 @@ async def search_change_history_events( r"""Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -5773,7 +5774,12 @@ async def sample_update_data_retention_settings(): client = admin_v1beta.AnalyticsAdminServiceAsyncClient() # Initialize request argument(s) + data_retention_settings = admin_v1beta.DataRetentionSettings() + data_retention_settings.event_data_retention = "FIFTY_MONTHS" + data_retention_settings.user_data_retention = "FIFTY_MONTHS" + request = admin_v1beta.UpdateDataRetentionSettingsRequest( + data_retention_settings=data_retention_settings, ) # Make the request @@ -6457,14 +6463,19 @@ async def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py index 3a4869c9106b..dd39883f3888 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -113,7 +115,7 @@ def get_transport_class( class AnalyticsAdminServiceClient(metaclass=AnalyticsAdminServiceClientMeta): - """Service Interface for the Analytics Admin API (GA4).""" + """Service Interface for the Google Analytics Admin API.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -720,6 +722,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1036,7 +1065,7 @@ def list_accounts( ) -> pagers.ListAccountsPager: r"""Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -1534,7 +1563,7 @@ def get_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Lookup for a single "GA4" Property. + r"""Lookup for a single GA Property. .. code-block:: python @@ -1583,7 +1612,7 @@ def sample_get_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1639,7 +1668,6 @@ def list_properties( ) -> pagers.ListPropertiesPager: r"""Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -1737,8 +1765,8 @@ def create_property( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.Property: - r"""Creates an "GA4" property with the specified location - and attributes. + r"""Creates a Google Analytics property with the + specified location and attributes. .. code-block:: python @@ -1793,7 +1821,7 @@ def sample_create_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -1854,8 +1882,7 @@ def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. .. code-block:: python @@ -1906,7 +1933,7 @@ def sample_delete_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -2027,7 +2054,7 @@ def sample_update_property(): Returns: google.analytics.admin_v1beta.types.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ # Create or coerce a protobuf request object. @@ -2148,8 +2175,8 @@ def sample_create_firebase_link(): Returns: google.analytics.admin_v1beta.types.FirebaseLink: - A link between a GA4 property and a - Firebase project. + A link between a Google Analytics + property and a Firebase project. """ # Create or coerce a protobuf request object. @@ -2484,8 +2511,8 @@ def sample_create_google_ads_link(): Returns: google.analytics.admin_v1beta.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2601,8 +2628,8 @@ def sample_update_google_ads_link(): Returns: google.analytics.admin_v1beta.types.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ # Create or coerce a protobuf request object. @@ -2993,7 +3020,7 @@ def get_measurement_protocol_secret( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.MeasurementProtocolSecret: - r"""Lookup for a single "GA4" MeasurementProtocolSecret. + r"""Lookup for a single MeasurementProtocolSecret. .. code-block:: python @@ -3694,6 +3721,9 @@ def search_change_history_events( r"""Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -6212,7 +6242,12 @@ def sample_update_data_retention_settings(): client = admin_v1beta.AnalyticsAdminServiceClient() # Initialize request argument(s) + data_retention_settings = admin_v1beta.DataRetentionSettings() + data_retention_settings.event_data_retention = "FIFTY_MONTHS" + data_retention_settings.user_data_retention = "FIFTY_MONTHS" + request = admin_v1beta.UpdateDataRetentionSettingsRequest( + data_retention_settings=data_retention_settings, ) # Make the request @@ -6880,14 +6915,19 @@ def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc.py index e9a092d465a2..409b4a9f2ea5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc.py @@ -112,7 +112,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class AnalyticsAdminServiceGrpcTransport(AnalyticsAdminServiceTransport): """gRPC backend transport for AnalyticsAdminService. - Service Interface for the Analytics Admin API (GA4). + Service Interface for the Google Analytics Admin API. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -357,7 +357,7 @@ def list_accounts( Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -509,7 +509,7 @@ def get_property( ) -> Callable[[analytics_admin.GetPropertyRequest], resources.Property]: r"""Return a callable for the get property method over gRPC. - Lookup for a single "GA4" Property. + Lookup for a single GA Property. Returns: Callable[[~.GetPropertyRequest], @@ -539,7 +539,6 @@ def list_properties( Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -569,8 +568,8 @@ def create_property( ) -> Callable[[analytics_admin.CreatePropertyRequest], resources.Property]: r"""Return a callable for the create property method over gRPC. - Creates an "GA4" property with the specified location - and attributes. + Creates a Google Analytics property with the + specified location and attributes. Returns: Callable[[~.CreatePropertyRequest], @@ -608,8 +607,7 @@ def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. Returns: Callable[[~.DeletePropertyRequest], @@ -889,7 +887,7 @@ def get_measurement_protocol_secret( r"""Return a callable for the get measurement protocol secret method over gRPC. - Lookup for a single "GA4" MeasurementProtocolSecret. + Lookup for a single MeasurementProtocolSecret. Returns: Callable[[~.GetMeasurementProtocolSecretRequest], @@ -1087,6 +1085,9 @@ def search_change_history_events( Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + Returns: Callable[[~.SearchChangeHistoryEventsRequest], ~.SearchChangeHistoryEventsResponse]: @@ -1874,14 +1875,19 @@ def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + Returns: Callable[[~.RunAccessReportRequest], ~.RunAccessReportResponse]: diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc_asyncio.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc_asyncio.py index 70f5f4969ec4..33070e7cec55 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc_asyncio.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/grpc_asyncio.py @@ -117,7 +117,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class AnalyticsAdminServiceGrpcAsyncIOTransport(AnalyticsAdminServiceTransport): """gRPC AsyncIO backend transport for AnalyticsAdminService. - Service Interface for the Analytics Admin API (GA4). + Service Interface for the Google Analytics Admin API. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -365,7 +365,7 @@ def list_accounts( Returns all accounts accessible by the caller. - Note that these accounts might not currently have GA4 + Note that these accounts might not currently have GA properties. Soft-deleted (ie: "trashed") accounts are excluded by default. Returns an empty list if no relevant accounts are found. @@ -517,7 +517,7 @@ def get_property( ) -> Callable[[analytics_admin.GetPropertyRequest], Awaitable[resources.Property]]: r"""Return a callable for the get property method over gRPC. - Lookup for a single "GA4" Property. + Lookup for a single GA Property. Returns: Callable[[~.GetPropertyRequest], @@ -548,7 +548,6 @@ def list_properties( Returns child Properties under the specified parent Account. - Only "GA4" properties will be returned. Properties will be excluded if the caller does not have access. Soft-deleted (ie: "trashed") properties are excluded by default. Returns an empty list if no @@ -580,8 +579,8 @@ def create_property( ]: r"""Return a callable for the create property method over gRPC. - Creates an "GA4" property with the specified location - and attributes. + Creates a Google Analytics property with the + specified location and attributes. Returns: Callable[[~.CreatePropertyRequest], @@ -621,8 +620,7 @@ def delete_property( permanently purged. https://support.google.com/analytics/answer/6154772 - Returns an error if the target is not found, or is not a - GA4 Property. + Returns an error if the target is not found. Returns: Callable[[~.DeletePropertyRequest], @@ -911,7 +909,7 @@ def get_measurement_protocol_secret( r"""Return a callable for the get measurement protocol secret method over gRPC. - Lookup for a single "GA4" MeasurementProtocolSecret. + Lookup for a single MeasurementProtocolSecret. Returns: Callable[[~.GetMeasurementProtocolSecretRequest], @@ -1110,6 +1108,9 @@ def search_change_history_events( Searches through all changes to an account or its children given the specified set of filters. + Only returns the subset of changes supported by the API. + The UI may return additional changes. + Returns: Callable[[~.SearchChangeHistoryEventsRequest], Awaitable[~.SearchChangeHistoryEventsResponse]]: @@ -1928,14 +1929,19 @@ def run_access_report( related to quota can only be requested on Google Analytics 360 properties. This method is only available to Administrators. - These data access records include GA4 UI Reporting, GA4 UI - Explorations, GA4 Data API, and other products like Firebase & + These data access records include GA UI Reporting, GA UI + Explorations, GA Data API, and other products like Firebase & Admob that can retrieve data from Google Analytics through a linkage. These records don't include property configuration changes like adding a stream or changing a property's time zone. For configuration change history, see `searchChangeHistoryEvents `__. + To give your feedback on this API, complete the `Google + Analytics Access Reports + feedback `__ + form. + Returns: Callable[[~.RunAccessReportRequest], Awaitable[~.RunAccessReportResponse]]: diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/rest.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/rest.py index 8d87cf5c5233..08ba6a9db674 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/rest.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/services/analytics_admin_service/transports/rest.py @@ -491,12 +491,38 @@ def post_acknowledge_user_data_collection( ) -> analytics_admin.AcknowledgeUserDataCollectionResponse: """Post-rpc interceptor for acknowledge_user_data_collection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_acknowledge_user_data_collection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_acknowledge_user_data_collection` interceptor runs + before the `post_acknowledge_user_data_collection_with_metadata` interceptor. """ return response + def post_acknowledge_user_data_collection_with_metadata( + self, + response: analytics_admin.AcknowledgeUserDataCollectionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.AcknowledgeUserDataCollectionResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for acknowledge_user_data_collection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_acknowledge_user_data_collection_with_metadata` + interceptor in new development instead of the `post_acknowledge_user_data_collection` interceptor. + When both interceptors are used, this `post_acknowledge_user_data_collection_with_metadata` interceptor runs after the + `post_acknowledge_user_data_collection` interceptor. The (possibly modified) response returned by + `post_acknowledge_user_data_collection` will be passed to + `post_acknowledge_user_data_collection_with_metadata`. + """ + return response, metadata + def pre_archive_custom_dimension( self, request: analytics_admin.ArchiveCustomDimensionRequest, @@ -547,12 +573,35 @@ def post_create_conversion_event( ) -> resources.ConversionEvent: """Post-rpc interceptor for create_conversion_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversion_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_conversion_event` interceptor runs + before the `post_create_conversion_event_with_metadata` interceptor. """ return response + def post_create_conversion_event_with_metadata( + self, + response: resources.ConversionEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConversionEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversion_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_conversion_event_with_metadata` + interceptor in new development instead of the `post_create_conversion_event` interceptor. + When both interceptors are used, this `post_create_conversion_event_with_metadata` interceptor runs after the + `post_create_conversion_event` interceptor. The (possibly modified) response returned by + `post_create_conversion_event` will be passed to + `post_create_conversion_event_with_metadata`. + """ + return response, metadata + def pre_create_custom_dimension( self, request: analytics_admin.CreateCustomDimensionRequest, @@ -573,12 +622,35 @@ def post_create_custom_dimension( ) -> resources.CustomDimension: """Post-rpc interceptor for create_custom_dimension - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_custom_dimension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_custom_dimension` interceptor runs + before the `post_create_custom_dimension_with_metadata` interceptor. """ return response + def post_create_custom_dimension_with_metadata( + self, + response: resources.CustomDimension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomDimension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_custom_dimension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_custom_dimension_with_metadata` + interceptor in new development instead of the `post_create_custom_dimension` interceptor. + When both interceptors are used, this `post_create_custom_dimension_with_metadata` interceptor runs after the + `post_create_custom_dimension` interceptor. The (possibly modified) response returned by + `post_create_custom_dimension` will be passed to + `post_create_custom_dimension_with_metadata`. + """ + return response, metadata + def pre_create_custom_metric( self, request: analytics_admin.CreateCustomMetricRequest, @@ -599,12 +671,35 @@ def post_create_custom_metric( ) -> resources.CustomMetric: """Post-rpc interceptor for create_custom_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_custom_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_custom_metric` interceptor runs + before the `post_create_custom_metric_with_metadata` interceptor. """ return response + def post_create_custom_metric_with_metadata( + self, + response: resources.CustomMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_custom_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_custom_metric_with_metadata` + interceptor in new development instead of the `post_create_custom_metric` interceptor. + When both interceptors are used, this `post_create_custom_metric_with_metadata` interceptor runs after the + `post_create_custom_metric` interceptor. The (possibly modified) response returned by + `post_create_custom_metric` will be passed to + `post_create_custom_metric_with_metadata`. + """ + return response, metadata + def pre_create_data_stream( self, request: analytics_admin.CreateDataStreamRequest, @@ -624,12 +719,35 @@ def post_create_data_stream( ) -> resources.DataStream: """Post-rpc interceptor for create_data_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_data_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_data_stream` interceptor runs + before the `post_create_data_stream_with_metadata` interceptor. """ return response + def post_create_data_stream_with_metadata( + self, + response: resources.DataStream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataStream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_data_stream_with_metadata` + interceptor in new development instead of the `post_create_data_stream` interceptor. + When both interceptors are used, this `post_create_data_stream_with_metadata` interceptor runs after the + `post_create_data_stream` interceptor. The (possibly modified) response returned by + `post_create_data_stream` will be passed to + `post_create_data_stream_with_metadata`. + """ + return response, metadata + def pre_create_firebase_link( self, request: analytics_admin.CreateFirebaseLinkRequest, @@ -650,12 +768,35 @@ def post_create_firebase_link( ) -> resources.FirebaseLink: """Post-rpc interceptor for create_firebase_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_firebase_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_firebase_link` interceptor runs + before the `post_create_firebase_link_with_metadata` interceptor. """ return response + def post_create_firebase_link_with_metadata( + self, + response: resources.FirebaseLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.FirebaseLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_firebase_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_firebase_link_with_metadata` + interceptor in new development instead of the `post_create_firebase_link` interceptor. + When both interceptors are used, this `post_create_firebase_link_with_metadata` interceptor runs after the + `post_create_firebase_link` interceptor. The (possibly modified) response returned by + `post_create_firebase_link` will be passed to + `post_create_firebase_link_with_metadata`. + """ + return response, metadata + def pre_create_google_ads_link( self, request: analytics_admin.CreateGoogleAdsLinkRequest, @@ -676,12 +817,35 @@ def post_create_google_ads_link( ) -> resources.GoogleAdsLink: """Post-rpc interceptor for create_google_ads_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_google_ads_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_google_ads_link` interceptor runs + before the `post_create_google_ads_link_with_metadata` interceptor. """ return response + def post_create_google_ads_link_with_metadata( + self, + response: resources.GoogleAdsLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.GoogleAdsLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_google_ads_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_google_ads_link_with_metadata` + interceptor in new development instead of the `post_create_google_ads_link` interceptor. + When both interceptors are used, this `post_create_google_ads_link_with_metadata` interceptor runs after the + `post_create_google_ads_link` interceptor. The (possibly modified) response returned by + `post_create_google_ads_link` will be passed to + `post_create_google_ads_link_with_metadata`. + """ + return response, metadata + def pre_create_key_event( self, request: analytics_admin.CreateKeyEventRequest, @@ -699,12 +863,35 @@ def pre_create_key_event( def post_create_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: """Post-rpc interceptor for create_key_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_key_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_key_event` interceptor runs + before the `post_create_key_event_with_metadata` interceptor. """ return response + def post_create_key_event_with_metadata( + self, + response: resources.KeyEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_key_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_key_event_with_metadata` + interceptor in new development instead of the `post_create_key_event` interceptor. + When both interceptors are used, this `post_create_key_event_with_metadata` interceptor runs after the + `post_create_key_event` interceptor. The (possibly modified) response returned by + `post_create_key_event` will be passed to + `post_create_key_event_with_metadata`. + """ + return response, metadata + def pre_create_measurement_protocol_secret( self, request: analytics_admin.CreateMeasurementProtocolSecretRequest, @@ -725,12 +912,37 @@ def post_create_measurement_protocol_secret( ) -> resources.MeasurementProtocolSecret: """Post-rpc interceptor for create_measurement_protocol_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_measurement_protocol_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_measurement_protocol_secret` interceptor runs + before the `post_create_measurement_protocol_secret_with_metadata` interceptor. """ return response + def post_create_measurement_protocol_secret_with_metadata( + self, + response: resources.MeasurementProtocolSecret, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.MeasurementProtocolSecret, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_measurement_protocol_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_measurement_protocol_secret_with_metadata` + interceptor in new development instead of the `post_create_measurement_protocol_secret` interceptor. + When both interceptors are used, this `post_create_measurement_protocol_secret_with_metadata` interceptor runs after the + `post_create_measurement_protocol_secret` interceptor. The (possibly modified) response returned by + `post_create_measurement_protocol_secret` will be passed to + `post_create_measurement_protocol_secret_with_metadata`. + """ + return response, metadata + def pre_create_property( self, request: analytics_admin.CreatePropertyRequest, @@ -748,12 +960,35 @@ def pre_create_property( def post_create_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for create_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_property` interceptor runs + before the `post_create_property_with_metadata` interceptor. """ return response + def post_create_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_create_property_with_metadata` + interceptor in new development instead of the `post_create_property` interceptor. + When both interceptors are used, this `post_create_property_with_metadata` interceptor runs after the + `post_create_property` interceptor. The (possibly modified) response returned by + `post_create_property` will be passed to + `post_create_property_with_metadata`. + """ + return response, metadata + def pre_delete_account( self, request: analytics_admin.DeleteAccountRequest, @@ -873,12 +1108,35 @@ def pre_delete_property( def post_delete_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for delete_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_delete_property` interceptor runs + before the `post_delete_property_with_metadata` interceptor. """ return response + def post_delete_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_delete_property_with_metadata` + interceptor in new development instead of the `post_delete_property` interceptor. + When both interceptors are used, this `post_delete_property_with_metadata` interceptor runs after the + `post_delete_property` interceptor. The (possibly modified) response returned by + `post_delete_property` will be passed to + `post_delete_property_with_metadata`. + """ + return response, metadata + def pre_get_account( self, request: analytics_admin.GetAccountRequest, @@ -896,12 +1154,35 @@ def pre_get_account( def post_get_account(self, response: resources.Account) -> resources.Account: """Post-rpc interceptor for get_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_account` interceptor runs + before the `post_get_account_with_metadata` interceptor. """ return response + def post_get_account_with_metadata( + self, + response: resources.Account, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Account, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_account_with_metadata` + interceptor in new development instead of the `post_get_account` interceptor. + When both interceptors are used, this `post_get_account_with_metadata` interceptor runs after the + `post_get_account` interceptor. The (possibly modified) response returned by + `post_get_account` will be passed to + `post_get_account_with_metadata`. + """ + return response, metadata + def pre_get_conversion_event( self, request: analytics_admin.GetConversionEventRequest, @@ -922,12 +1203,35 @@ def post_get_conversion_event( ) -> resources.ConversionEvent: """Post-rpc interceptor for get_conversion_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversion_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_conversion_event` interceptor runs + before the `post_get_conversion_event_with_metadata` interceptor. """ return response + def post_get_conversion_event_with_metadata( + self, + response: resources.ConversionEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConversionEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversion_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_conversion_event_with_metadata` + interceptor in new development instead of the `post_get_conversion_event` interceptor. + When both interceptors are used, this `post_get_conversion_event_with_metadata` interceptor runs after the + `post_get_conversion_event` interceptor. The (possibly modified) response returned by + `post_get_conversion_event` will be passed to + `post_get_conversion_event_with_metadata`. + """ + return response, metadata + def pre_get_custom_dimension( self, request: analytics_admin.GetCustomDimensionRequest, @@ -948,12 +1252,35 @@ def post_get_custom_dimension( ) -> resources.CustomDimension: """Post-rpc interceptor for get_custom_dimension - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_dimension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_dimension` interceptor runs + before the `post_get_custom_dimension_with_metadata` interceptor. """ return response + def post_get_custom_dimension_with_metadata( + self, + response: resources.CustomDimension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomDimension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_custom_dimension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_custom_dimension_with_metadata` + interceptor in new development instead of the `post_get_custom_dimension` interceptor. + When both interceptors are used, this `post_get_custom_dimension_with_metadata` interceptor runs after the + `post_get_custom_dimension` interceptor. The (possibly modified) response returned by + `post_get_custom_dimension` will be passed to + `post_get_custom_dimension_with_metadata`. + """ + return response, metadata + def pre_get_custom_metric( self, request: analytics_admin.GetCustomMetricRequest, @@ -973,12 +1300,35 @@ def post_get_custom_metric( ) -> resources.CustomMetric: """Post-rpc interceptor for get_custom_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_metric` interceptor runs + before the `post_get_custom_metric_with_metadata` interceptor. """ return response + def post_get_custom_metric_with_metadata( + self, + response: resources.CustomMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_custom_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_custom_metric_with_metadata` + interceptor in new development instead of the `post_get_custom_metric` interceptor. + When both interceptors are used, this `post_get_custom_metric_with_metadata` interceptor runs after the + `post_get_custom_metric` interceptor. The (possibly modified) response returned by + `post_get_custom_metric` will be passed to + `post_get_custom_metric_with_metadata`. + """ + return response, metadata + def pre_get_data_retention_settings( self, request: analytics_admin.GetDataRetentionSettingsRequest, @@ -999,12 +1349,37 @@ def post_get_data_retention_settings( ) -> resources.DataRetentionSettings: """Post-rpc interceptor for get_data_retention_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_retention_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_retention_settings` interceptor runs + before the `post_get_data_retention_settings_with_metadata` interceptor. """ return response + def post_get_data_retention_settings_with_metadata( + self, + response: resources.DataRetentionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DataRetentionSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_data_retention_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_retention_settings_with_metadata` + interceptor in new development instead of the `post_get_data_retention_settings` interceptor. + When both interceptors are used, this `post_get_data_retention_settings_with_metadata` interceptor runs after the + `post_get_data_retention_settings` interceptor. The (possibly modified) response returned by + `post_get_data_retention_settings` will be passed to + `post_get_data_retention_settings_with_metadata`. + """ + return response, metadata + def pre_get_data_sharing_settings( self, request: analytics_admin.GetDataSharingSettingsRequest, @@ -1025,12 +1400,35 @@ def post_get_data_sharing_settings( ) -> resources.DataSharingSettings: """Post-rpc interceptor for get_data_sharing_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_sharing_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_sharing_settings` interceptor runs + before the `post_get_data_sharing_settings_with_metadata` interceptor. """ return response + def post_get_data_sharing_settings_with_metadata( + self, + response: resources.DataSharingSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataSharingSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_sharing_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_sharing_settings_with_metadata` + interceptor in new development instead of the `post_get_data_sharing_settings` interceptor. + When both interceptors are used, this `post_get_data_sharing_settings_with_metadata` interceptor runs after the + `post_get_data_sharing_settings` interceptor. The (possibly modified) response returned by + `post_get_data_sharing_settings` will be passed to + `post_get_data_sharing_settings_with_metadata`. + """ + return response, metadata + def pre_get_data_stream( self, request: analytics_admin.GetDataStreamRequest, @@ -1050,12 +1448,35 @@ def post_get_data_stream( ) -> resources.DataStream: """Post-rpc interceptor for get_data_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_stream` interceptor runs + before the `post_get_data_stream_with_metadata` interceptor. """ return response + def post_get_data_stream_with_metadata( + self, + response: resources.DataStream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataStream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_data_stream_with_metadata` + interceptor in new development instead of the `post_get_data_stream` interceptor. + When both interceptors are used, this `post_get_data_stream_with_metadata` interceptor runs after the + `post_get_data_stream` interceptor. The (possibly modified) response returned by + `post_get_data_stream` will be passed to + `post_get_data_stream_with_metadata`. + """ + return response, metadata + def pre_get_key_event( self, request: analytics_admin.GetKeyEventRequest, @@ -1073,12 +1494,35 @@ def pre_get_key_event( def post_get_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: """Post-rpc interceptor for get_key_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_key_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_key_event` interceptor runs + before the `post_get_key_event_with_metadata` interceptor. """ return response + def post_get_key_event_with_metadata( + self, + response: resources.KeyEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_key_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_key_event_with_metadata` + interceptor in new development instead of the `post_get_key_event` interceptor. + When both interceptors are used, this `post_get_key_event_with_metadata` interceptor runs after the + `post_get_key_event` interceptor. The (possibly modified) response returned by + `post_get_key_event` will be passed to + `post_get_key_event_with_metadata`. + """ + return response, metadata + def pre_get_measurement_protocol_secret( self, request: analytics_admin.GetMeasurementProtocolSecretRequest, @@ -1099,12 +1543,37 @@ def post_get_measurement_protocol_secret( ) -> resources.MeasurementProtocolSecret: """Post-rpc interceptor for get_measurement_protocol_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_measurement_protocol_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_measurement_protocol_secret` interceptor runs + before the `post_get_measurement_protocol_secret_with_metadata` interceptor. """ return response + def post_get_measurement_protocol_secret_with_metadata( + self, + response: resources.MeasurementProtocolSecret, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.MeasurementProtocolSecret, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_measurement_protocol_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_measurement_protocol_secret_with_metadata` + interceptor in new development instead of the `post_get_measurement_protocol_secret` interceptor. + When both interceptors are used, this `post_get_measurement_protocol_secret_with_metadata` interceptor runs after the + `post_get_measurement_protocol_secret` interceptor. The (possibly modified) response returned by + `post_get_measurement_protocol_secret` will be passed to + `post_get_measurement_protocol_secret_with_metadata`. + """ + return response, metadata + def pre_get_property( self, request: analytics_admin.GetPropertyRequest, @@ -1122,12 +1591,35 @@ def pre_get_property( def post_get_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for get_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_property` interceptor runs + before the `post_get_property_with_metadata` interceptor. """ return response + def post_get_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_property_with_metadata` + interceptor in new development instead of the `post_get_property` interceptor. + When both interceptors are used, this `post_get_property_with_metadata` interceptor runs after the + `post_get_property` interceptor. The (possibly modified) response returned by + `post_get_property` will be passed to + `post_get_property_with_metadata`. + """ + return response, metadata + def pre_list_accounts( self, request: analytics_admin.ListAccountsRequest, @@ -1147,12 +1639,37 @@ def post_list_accounts( ) -> analytics_admin.ListAccountsResponse: """Post-rpc interceptor for list_accounts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_accounts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_accounts` interceptor runs + before the `post_list_accounts_with_metadata` interceptor. """ return response + def post_list_accounts_with_metadata( + self, + response: analytics_admin.ListAccountsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAccountsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_accounts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_accounts_with_metadata` + interceptor in new development instead of the `post_list_accounts` interceptor. + When both interceptors are used, this `post_list_accounts_with_metadata` interceptor runs after the + `post_list_accounts` interceptor. The (possibly modified) response returned by + `post_list_accounts` will be passed to + `post_list_accounts_with_metadata`. + """ + return response, metadata + def pre_list_account_summaries( self, request: analytics_admin.ListAccountSummariesRequest, @@ -1173,12 +1690,38 @@ def post_list_account_summaries( ) -> analytics_admin.ListAccountSummariesResponse: """Post-rpc interceptor for list_account_summaries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_account_summaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_account_summaries` interceptor runs + before the `post_list_account_summaries_with_metadata` interceptor. """ return response + def post_list_account_summaries_with_metadata( + self, + response: analytics_admin.ListAccountSummariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListAccountSummariesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_account_summaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_account_summaries_with_metadata` + interceptor in new development instead of the `post_list_account_summaries` interceptor. + When both interceptors are used, this `post_list_account_summaries_with_metadata` interceptor runs after the + `post_list_account_summaries` interceptor. The (possibly modified) response returned by + `post_list_account_summaries` will be passed to + `post_list_account_summaries_with_metadata`. + """ + return response, metadata + def pre_list_conversion_events( self, request: analytics_admin.ListConversionEventsRequest, @@ -1199,12 +1742,38 @@ def post_list_conversion_events( ) -> analytics_admin.ListConversionEventsResponse: """Post-rpc interceptor for list_conversion_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversion_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_conversion_events` interceptor runs + before the `post_list_conversion_events_with_metadata` interceptor. """ return response + def post_list_conversion_events_with_metadata( + self, + response: analytics_admin.ListConversionEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListConversionEventsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversion_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_conversion_events_with_metadata` + interceptor in new development instead of the `post_list_conversion_events` interceptor. + When both interceptors are used, this `post_list_conversion_events_with_metadata` interceptor runs after the + `post_list_conversion_events` interceptor. The (possibly modified) response returned by + `post_list_conversion_events` will be passed to + `post_list_conversion_events_with_metadata`. + """ + return response, metadata + def pre_list_custom_dimensions( self, request: analytics_admin.ListCustomDimensionsRequest, @@ -1225,12 +1794,38 @@ def post_list_custom_dimensions( ) -> analytics_admin.ListCustomDimensionsResponse: """Post-rpc interceptor for list_custom_dimensions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_dimensions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_dimensions` interceptor runs + before the `post_list_custom_dimensions_with_metadata` interceptor. """ return response + def post_list_custom_dimensions_with_metadata( + self, + response: analytics_admin.ListCustomDimensionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListCustomDimensionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_dimensions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_custom_dimensions_with_metadata` + interceptor in new development instead of the `post_list_custom_dimensions` interceptor. + When both interceptors are used, this `post_list_custom_dimensions_with_metadata` interceptor runs after the + `post_list_custom_dimensions` interceptor. The (possibly modified) response returned by + `post_list_custom_dimensions` will be passed to + `post_list_custom_dimensions_with_metadata`. + """ + return response, metadata + def pre_list_custom_metrics( self, request: analytics_admin.ListCustomMetricsRequest, @@ -1251,12 +1846,38 @@ def post_list_custom_metrics( ) -> analytics_admin.ListCustomMetricsResponse: """Post-rpc interceptor for list_custom_metrics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_metrics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_metrics` interceptor runs + before the `post_list_custom_metrics_with_metadata` interceptor. """ return response + def post_list_custom_metrics_with_metadata( + self, + response: analytics_admin.ListCustomMetricsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListCustomMetricsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_metrics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_custom_metrics_with_metadata` + interceptor in new development instead of the `post_list_custom_metrics` interceptor. + When both interceptors are used, this `post_list_custom_metrics_with_metadata` interceptor runs after the + `post_list_custom_metrics` interceptor. The (possibly modified) response returned by + `post_list_custom_metrics` will be passed to + `post_list_custom_metrics_with_metadata`. + """ + return response, metadata + def pre_list_data_streams( self, request: analytics_admin.ListDataStreamsRequest, @@ -1276,12 +1897,37 @@ def post_list_data_streams( ) -> analytics_admin.ListDataStreamsResponse: """Post-rpc interceptor for list_data_streams - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_streams_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_streams` interceptor runs + before the `post_list_data_streams_with_metadata` interceptor. """ return response + def post_list_data_streams_with_metadata( + self, + response: analytics_admin.ListDataStreamsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListDataStreamsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_streams + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_data_streams_with_metadata` + interceptor in new development instead of the `post_list_data_streams` interceptor. + When both interceptors are used, this `post_list_data_streams_with_metadata` interceptor runs after the + `post_list_data_streams` interceptor. The (possibly modified) response returned by + `post_list_data_streams` will be passed to + `post_list_data_streams_with_metadata`. + """ + return response, metadata + def pre_list_firebase_links( self, request: analytics_admin.ListFirebaseLinksRequest, @@ -1302,12 +1948,38 @@ def post_list_firebase_links( ) -> analytics_admin.ListFirebaseLinksResponse: """Post-rpc interceptor for list_firebase_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_firebase_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_firebase_links` interceptor runs + before the `post_list_firebase_links_with_metadata` interceptor. """ return response + def post_list_firebase_links_with_metadata( + self, + response: analytics_admin.ListFirebaseLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListFirebaseLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_firebase_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_firebase_links_with_metadata` + interceptor in new development instead of the `post_list_firebase_links` interceptor. + When both interceptors are used, this `post_list_firebase_links_with_metadata` interceptor runs after the + `post_list_firebase_links` interceptor. The (possibly modified) response returned by + `post_list_firebase_links` will be passed to + `post_list_firebase_links_with_metadata`. + """ + return response, metadata + def pre_list_google_ads_links( self, request: analytics_admin.ListGoogleAdsLinksRequest, @@ -1328,12 +2000,38 @@ def post_list_google_ads_links( ) -> analytics_admin.ListGoogleAdsLinksResponse: """Post-rpc interceptor for list_google_ads_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_google_ads_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_google_ads_links` interceptor runs + before the `post_list_google_ads_links_with_metadata` interceptor. """ return response + def post_list_google_ads_links_with_metadata( + self, + response: analytics_admin.ListGoogleAdsLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListGoogleAdsLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_google_ads_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_google_ads_links_with_metadata` + interceptor in new development instead of the `post_list_google_ads_links` interceptor. + When both interceptors are used, this `post_list_google_ads_links_with_metadata` interceptor runs after the + `post_list_google_ads_links` interceptor. The (possibly modified) response returned by + `post_list_google_ads_links` will be passed to + `post_list_google_ads_links_with_metadata`. + """ + return response, metadata + def pre_list_key_events( self, request: analytics_admin.ListKeyEventsRequest, @@ -1353,12 +2051,37 @@ def post_list_key_events( ) -> analytics_admin.ListKeyEventsResponse: """Post-rpc interceptor for list_key_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_key_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_key_events` interceptor runs + before the `post_list_key_events_with_metadata` interceptor. """ return response + def post_list_key_events_with_metadata( + self, + response: analytics_admin.ListKeyEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListKeyEventsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_key_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_key_events_with_metadata` + interceptor in new development instead of the `post_list_key_events` interceptor. + When both interceptors are used, this `post_list_key_events_with_metadata` interceptor runs after the + `post_list_key_events` interceptor. The (possibly modified) response returned by + `post_list_key_events` will be passed to + `post_list_key_events_with_metadata`. + """ + return response, metadata + def pre_list_measurement_protocol_secrets( self, request: analytics_admin.ListMeasurementProtocolSecretsRequest, @@ -1379,12 +2102,38 @@ def post_list_measurement_protocol_secrets( ) -> analytics_admin.ListMeasurementProtocolSecretsResponse: """Post-rpc interceptor for list_measurement_protocol_secrets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_measurement_protocol_secrets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_measurement_protocol_secrets` interceptor runs + before the `post_list_measurement_protocol_secrets_with_metadata` interceptor. """ return response + def post_list_measurement_protocol_secrets_with_metadata( + self, + response: analytics_admin.ListMeasurementProtocolSecretsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListMeasurementProtocolSecretsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_measurement_protocol_secrets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_measurement_protocol_secrets_with_metadata` + interceptor in new development instead of the `post_list_measurement_protocol_secrets` interceptor. + When both interceptors are used, this `post_list_measurement_protocol_secrets_with_metadata` interceptor runs after the + `post_list_measurement_protocol_secrets` interceptor. The (possibly modified) response returned by + `post_list_measurement_protocol_secrets` will be passed to + `post_list_measurement_protocol_secrets_with_metadata`. + """ + return response, metadata + def pre_list_properties( self, request: analytics_admin.ListPropertiesRequest, @@ -1404,12 +2153,37 @@ def post_list_properties( ) -> analytics_admin.ListPropertiesResponse: """Post-rpc interceptor for list_properties - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_properties_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_properties` interceptor runs + before the `post_list_properties_with_metadata` interceptor. """ return response + def post_list_properties_with_metadata( + self, + response: analytics_admin.ListPropertiesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ListPropertiesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_properties + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_list_properties_with_metadata` + interceptor in new development instead of the `post_list_properties` interceptor. + When both interceptors are used, this `post_list_properties_with_metadata` interceptor runs after the + `post_list_properties` interceptor. The (possibly modified) response returned by + `post_list_properties` will be passed to + `post_list_properties_with_metadata`. + """ + return response, metadata + def pre_provision_account_ticket( self, request: analytics_admin.ProvisionAccountTicketRequest, @@ -1430,12 +2204,38 @@ def post_provision_account_ticket( ) -> analytics_admin.ProvisionAccountTicketResponse: """Post-rpc interceptor for provision_account_ticket - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_provision_account_ticket_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_provision_account_ticket` interceptor runs + before the `post_provision_account_ticket_with_metadata` interceptor. """ return response + def post_provision_account_ticket_with_metadata( + self, + response: analytics_admin.ProvisionAccountTicketResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.ProvisionAccountTicketResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for provision_account_ticket + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_provision_account_ticket_with_metadata` + interceptor in new development instead of the `post_provision_account_ticket` interceptor. + When both interceptors are used, this `post_provision_account_ticket_with_metadata` interceptor runs after the + `post_provision_account_ticket` interceptor. The (possibly modified) response returned by + `post_provision_account_ticket` will be passed to + `post_provision_account_ticket_with_metadata`. + """ + return response, metadata + def pre_run_access_report( self, request: analytics_admin.RunAccessReportRequest, @@ -1455,12 +2255,37 @@ def post_run_access_report( ) -> analytics_admin.RunAccessReportResponse: """Post-rpc interceptor for run_access_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_access_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_run_access_report` interceptor runs + before the `post_run_access_report_with_metadata` interceptor. """ return response + def post_run_access_report_with_metadata( + self, + response: analytics_admin.RunAccessReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.RunAccessReportResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_access_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_run_access_report_with_metadata` + interceptor in new development instead of the `post_run_access_report` interceptor. + When both interceptors are used, this `post_run_access_report_with_metadata` interceptor runs after the + `post_run_access_report` interceptor. The (possibly modified) response returned by + `post_run_access_report` will be passed to + `post_run_access_report_with_metadata`. + """ + return response, metadata + def pre_search_change_history_events( self, request: analytics_admin.SearchChangeHistoryEventsRequest, @@ -1481,12 +2306,38 @@ def post_search_change_history_events( ) -> analytics_admin.SearchChangeHistoryEventsResponse: """Post-rpc interceptor for search_change_history_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_change_history_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_search_change_history_events` interceptor runs + before the `post_search_change_history_events_with_metadata` interceptor. """ return response + def post_search_change_history_events_with_metadata( + self, + response: analytics_admin.SearchChangeHistoryEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.SearchChangeHistoryEventsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_change_history_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_search_change_history_events_with_metadata` + interceptor in new development instead of the `post_search_change_history_events` interceptor. + When both interceptors are used, this `post_search_change_history_events_with_metadata` interceptor runs after the + `post_search_change_history_events` interceptor. The (possibly modified) response returned by + `post_search_change_history_events` will be passed to + `post_search_change_history_events_with_metadata`. + """ + return response, metadata + def pre_update_account( self, request: analytics_admin.UpdateAccountRequest, @@ -1504,12 +2355,35 @@ def pre_update_account( def post_update_account(self, response: resources.Account) -> resources.Account: """Post-rpc interceptor for update_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_account` interceptor runs + before the `post_update_account_with_metadata` interceptor. """ return response + def post_update_account_with_metadata( + self, + response: resources.Account, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Account, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_account_with_metadata` + interceptor in new development instead of the `post_update_account` interceptor. + When both interceptors are used, this `post_update_account_with_metadata` interceptor runs after the + `post_update_account` interceptor. The (possibly modified) response returned by + `post_update_account` will be passed to + `post_update_account_with_metadata`. + """ + return response, metadata + def pre_update_conversion_event( self, request: analytics_admin.UpdateConversionEventRequest, @@ -1530,12 +2404,35 @@ def post_update_conversion_event( ) -> resources.ConversionEvent: """Post-rpc interceptor for update_conversion_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversion_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_conversion_event` interceptor runs + before the `post_update_conversion_event_with_metadata` interceptor. """ return response + def post_update_conversion_event_with_metadata( + self, + response: resources.ConversionEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConversionEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_conversion_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_conversion_event_with_metadata` + interceptor in new development instead of the `post_update_conversion_event` interceptor. + When both interceptors are used, this `post_update_conversion_event_with_metadata` interceptor runs after the + `post_update_conversion_event` interceptor. The (possibly modified) response returned by + `post_update_conversion_event` will be passed to + `post_update_conversion_event_with_metadata`. + """ + return response, metadata + def pre_update_custom_dimension( self, request: analytics_admin.UpdateCustomDimensionRequest, @@ -1556,12 +2453,35 @@ def post_update_custom_dimension( ) -> resources.CustomDimension: """Post-rpc interceptor for update_custom_dimension - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_custom_dimension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_custom_dimension` interceptor runs + before the `post_update_custom_dimension_with_metadata` interceptor. """ return response + def post_update_custom_dimension_with_metadata( + self, + response: resources.CustomDimension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomDimension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_custom_dimension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_custom_dimension_with_metadata` + interceptor in new development instead of the `post_update_custom_dimension` interceptor. + When both interceptors are used, this `post_update_custom_dimension_with_metadata` interceptor runs after the + `post_update_custom_dimension` interceptor. The (possibly modified) response returned by + `post_update_custom_dimension` will be passed to + `post_update_custom_dimension_with_metadata`. + """ + return response, metadata + def pre_update_custom_metric( self, request: analytics_admin.UpdateCustomMetricRequest, @@ -1582,12 +2502,35 @@ def post_update_custom_metric( ) -> resources.CustomMetric: """Post-rpc interceptor for update_custom_metric - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_custom_metric_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_custom_metric` interceptor runs + before the `post_update_custom_metric_with_metadata` interceptor. """ return response + def post_update_custom_metric_with_metadata( + self, + response: resources.CustomMetric, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CustomMetric, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_custom_metric + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_custom_metric_with_metadata` + interceptor in new development instead of the `post_update_custom_metric` interceptor. + When both interceptors are used, this `post_update_custom_metric_with_metadata` interceptor runs after the + `post_update_custom_metric` interceptor. The (possibly modified) response returned by + `post_update_custom_metric` will be passed to + `post_update_custom_metric_with_metadata`. + """ + return response, metadata + def pre_update_data_retention_settings( self, request: analytics_admin.UpdateDataRetentionSettingsRequest, @@ -1608,12 +2551,37 @@ def post_update_data_retention_settings( ) -> resources.DataRetentionSettings: """Post-rpc interceptor for update_data_retention_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_retention_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_retention_settings` interceptor runs + before the `post_update_data_retention_settings_with_metadata` interceptor. """ return response + def post_update_data_retention_settings_with_metadata( + self, + response: resources.DataRetentionSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.DataRetentionSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_data_retention_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_data_retention_settings_with_metadata` + interceptor in new development instead of the `post_update_data_retention_settings` interceptor. + When both interceptors are used, this `post_update_data_retention_settings_with_metadata` interceptor runs after the + `post_update_data_retention_settings` interceptor. The (possibly modified) response returned by + `post_update_data_retention_settings` will be passed to + `post_update_data_retention_settings_with_metadata`. + """ + return response, metadata + def pre_update_data_stream( self, request: analytics_admin.UpdateDataStreamRequest, @@ -1633,12 +2601,35 @@ def post_update_data_stream( ) -> resources.DataStream: """Post-rpc interceptor for update_data_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_stream` interceptor runs + before the `post_update_data_stream_with_metadata` interceptor. """ return response + def post_update_data_stream_with_metadata( + self, + response: resources.DataStream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.DataStream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_data_stream_with_metadata` + interceptor in new development instead of the `post_update_data_stream` interceptor. + When both interceptors are used, this `post_update_data_stream_with_metadata` interceptor runs after the + `post_update_data_stream` interceptor. The (possibly modified) response returned by + `post_update_data_stream` will be passed to + `post_update_data_stream_with_metadata`. + """ + return response, metadata + def pre_update_google_ads_link( self, request: analytics_admin.UpdateGoogleAdsLinkRequest, @@ -1659,12 +2650,35 @@ def post_update_google_ads_link( ) -> resources.GoogleAdsLink: """Post-rpc interceptor for update_google_ads_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_ads_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_google_ads_link` interceptor runs + before the `post_update_google_ads_link_with_metadata` interceptor. """ return response + def post_update_google_ads_link_with_metadata( + self, + response: resources.GoogleAdsLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.GoogleAdsLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_google_ads_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_google_ads_link_with_metadata` + interceptor in new development instead of the `post_update_google_ads_link` interceptor. + When both interceptors are used, this `post_update_google_ads_link_with_metadata` interceptor runs after the + `post_update_google_ads_link` interceptor. The (possibly modified) response returned by + `post_update_google_ads_link` will be passed to + `post_update_google_ads_link_with_metadata`. + """ + return response, metadata + def pre_update_key_event( self, request: analytics_admin.UpdateKeyEventRequest, @@ -1682,12 +2696,35 @@ def pre_update_key_event( def post_update_key_event(self, response: resources.KeyEvent) -> resources.KeyEvent: """Post-rpc interceptor for update_key_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_key_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_key_event` interceptor runs + before the `post_update_key_event_with_metadata` interceptor. """ return response + def post_update_key_event_with_metadata( + self, + response: resources.KeyEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_key_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_key_event_with_metadata` + interceptor in new development instead of the `post_update_key_event` interceptor. + When both interceptors are used, this `post_update_key_event_with_metadata` interceptor runs after the + `post_update_key_event` interceptor. The (possibly modified) response returned by + `post_update_key_event` will be passed to + `post_update_key_event_with_metadata`. + """ + return response, metadata + def pre_update_measurement_protocol_secret( self, request: analytics_admin.UpdateMeasurementProtocolSecretRequest, @@ -1708,12 +2745,37 @@ def post_update_measurement_protocol_secret( ) -> resources.MeasurementProtocolSecret: """Post-rpc interceptor for update_measurement_protocol_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_measurement_protocol_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_measurement_protocol_secret` interceptor runs + before the `post_update_measurement_protocol_secret_with_metadata` interceptor. """ return response + def post_update_measurement_protocol_secret_with_metadata( + self, + response: resources.MeasurementProtocolSecret, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.MeasurementProtocolSecret, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_measurement_protocol_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_measurement_protocol_secret_with_metadata` + interceptor in new development instead of the `post_update_measurement_protocol_secret` interceptor. + When both interceptors are used, this `post_update_measurement_protocol_secret_with_metadata` interceptor runs after the + `post_update_measurement_protocol_secret` interceptor. The (possibly modified) response returned by + `post_update_measurement_protocol_secret` will be passed to + `post_update_measurement_protocol_secret_with_metadata`. + """ + return response, metadata + def pre_update_property( self, request: analytics_admin.UpdatePropertyRequest, @@ -1731,12 +2793,35 @@ def pre_update_property( def post_update_property(self, response: resources.Property) -> resources.Property: """Post-rpc interceptor for update_property - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_property_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnalyticsAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_property` interceptor runs + before the `post_update_property_with_metadata` interceptor. """ return response + def post_update_property_with_metadata( + self, + response: resources.Property, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Property, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_property + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_update_property_with_metadata` + interceptor in new development instead of the `post_update_property` interceptor. + When both interceptors are used, this `post_update_property_with_metadata` interceptor runs after the + `post_update_property` interceptor. The (possibly modified) response returned by + `post_update_property` will be passed to + `post_update_property_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AnalyticsAdminServiceRestStub: @@ -1748,7 +2833,7 @@ class AnalyticsAdminServiceRestStub: class AnalyticsAdminServiceRestTransport(_BaseAnalyticsAdminServiceRestTransport): """REST backend synchronous transport for AnalyticsAdminService. - Service Interface for the Analytics Admin API (GA4). + Service Interface for the Google Analytics Admin API. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -1956,6 +3041,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_acknowledge_user_data_collection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_acknowledge_user_data_collection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2348,6 +3440,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversion_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversion_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2500,6 +3596,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_custom_dimension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_custom_dimension_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2652,6 +3752,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_custom_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_custom_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2806,6 +3910,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_data_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2883,8 +3991,8 @@ def __call__( Returns: ~.resources.FirebaseLink: - A link between a GA4 property and a - Firebase project. + A link between a Google Analytics + property and a Firebase project. """ @@ -2960,6 +4068,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_firebase_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_firebase_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3037,8 +4149,8 @@ def __call__( Returns: ~.resources.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ @@ -3114,6 +4226,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_google_ads_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_google_ads_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3266,6 +4382,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_key_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_key_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3424,6 +4544,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_measurement_protocol_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_measurement_protocol_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3504,7 +4631,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -3576,6 +4703,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4435,7 +5566,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -4502,6 +5633,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4645,6 +5780,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4793,6 +5932,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversion_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversion_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4939,6 +6082,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_dimension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_dimension_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5085,6 +6232,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5232,6 +6383,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_retention_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_retention_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5379,6 +6534,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_sharing_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_sharing_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5523,6 +6682,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5666,6 +6829,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_key_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5815,6 +6982,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_measurement_protocol_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_measurement_protocol_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5893,7 +7067,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -5960,6 +7134,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6101,6 +7279,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_accounts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_accounts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6251,6 +7433,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_account_summaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_account_summaries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6401,6 +7587,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversion_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversion_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6551,6 +7741,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_dimensions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_dimensions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6701,6 +7895,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_metrics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_metrics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6851,6 +8049,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_streams(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_streams_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7001,6 +8203,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_firebase_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_firebase_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7151,6 +8357,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_google_ads_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_google_ads_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7296,6 +8506,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_key_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_key_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7447,6 +8661,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_measurement_protocol_secrets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_measurement_protocol_secrets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7595,6 +8816,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_properties(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_properties_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7749,6 +8974,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_provision_account_ticket(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_provision_account_ticket_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7905,6 +9134,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_access_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_access_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8060,6 +9293,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_change_history_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_change_history_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8214,6 +9451,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8368,6 +9609,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversion_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversion_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8520,6 +9765,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_custom_dimension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_custom_dimension_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8672,6 +9921,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_custom_metric(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_custom_metric_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8827,6 +10080,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_retention_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_data_retention_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8981,6 +10241,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9058,8 +10322,8 @@ def __call__( Returns: ~.resources.GoogleAdsLink: - A link between a GA4 property and a - Google Ads account. + A link between a Google Analytics + property and a Google Ads account. """ @@ -9135,6 +10399,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_google_ads_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_google_ads_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9287,6 +10555,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_key_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_key_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9445,6 +10717,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_measurement_protocol_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_measurement_protocol_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9525,7 +10804,7 @@ def __call__( Returns: ~.resources.Property: A resource message representing a - Google Analytics GA4 property. + Google Analytics property. """ @@ -9597,6 +10876,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_property(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_property_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/types/analytics_admin.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/types/analytics_admin.py index 777729066725..ca5d2d3ed521 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/types/analytics_admin.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/types/analytics_admin.py @@ -112,10 +112,10 @@ class RunAccessReportRequest(proto.Message): To request at the property level, entity should be for example 'properties/123' if "123" is your - GA4 property ID. To request at the account - level, entity should be for example - 'accounts/1234' if "1234" is your GA4 Account - ID. + Google Analytics property ID. To request at the + account level, entity should be for example + 'accounts/1234' if "1234" is your Google + Analytics Account ID. dimensions (MutableSequence[google.analytics.admin_v1beta.types.AccessDimension]): The dimensions requested and displayed in the response. Requests are allowed up to 9 @@ -1019,12 +1019,17 @@ class SearchChangeHistoryEventsRequest(proto.Message): Optional. If set, only return changes made before this time (inclusive). page_size (int): - Optional. The maximum number of - ChangeHistoryEvent items to return. The service - may return fewer than this value, even if there - are additional pages. If unspecified, at most 50 - items will be returned. The maximum value is 200 - (higher values will be coerced to the maximum). + Optional. The maximum number of ChangeHistoryEvent items to + return. If unspecified, at most 50 items will be returned. + The maximum value is 200 (higher values will be coerced to + the maximum). + + Note that the service may return a page with fewer items + than this value specifies (potentially even zero), and that + there still may be additional pages. If you want a + particular number of items, you'll need to continue + requesting additional pages using ``page_token`` until you + get the needed number. page_token (str): Optional. A page token, received from a previous ``SearchChangeHistoryEvents`` call. Provide this to retrieve diff --git a/packages/google-analytics-admin/google/analytics/admin_v1beta/types/resources.py b/packages/google-analytics-admin/google/analytics/admin_v1beta/types/resources.py index 536fd5361a41..581ec88d033e 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1beta/types/resources.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1beta/types/resources.py @@ -217,6 +217,10 @@ class ChangeHistoryResourceType(proto.Enum): ConversionEvent resource MEASUREMENT_PROTOCOL_SECRET (10): MeasurementProtocolSecret resource + CUSTOM_DIMENSION (11): + CustomDimension resource + CUSTOM_METRIC (12): + CustomMetric resource DATA_RETENTION_SETTINGS (13): DataRetentionSettings resource DISPLAY_VIDEO_360_ADVERTISER_LINK (14): @@ -237,6 +241,8 @@ class ChangeHistoryResourceType(proto.Enum): GOOGLE_SIGNALS_SETTINGS = 8 CONVERSION_EVENT = 9 MEASUREMENT_PROTOCOL_SECRET = 10 + CUSTOM_DIMENSION = 11 + CUSTOM_METRIC = 12 DATA_RETENTION_SETTINGS = 13 DISPLAY_VIDEO_360_ADVERTISER_LINK = 14 DISPLAY_VIDEO_360_ADVERTISER_LINK_PROPOSAL = 15 @@ -245,17 +251,17 @@ class ChangeHistoryResourceType(proto.Enum): class PropertyType(proto.Enum): - r"""Types of Property resources. + r"""Types of ``Property`` resources. Values: PROPERTY_TYPE_UNSPECIFIED (0): Unknown or unspecified property type PROPERTY_TYPE_ORDINARY (1): - Ordinary GA4 property + Ordinary Google Analytics property PROPERTY_TYPE_SUBPROPERTY (2): - GA4 subproperty + Google Analytics subproperty PROPERTY_TYPE_ROLLUP (3): - GA4 rollup property + Google Analytics rollup property """ PROPERTY_TYPE_UNSPECIFIED = 0 PROPERTY_TYPE_ORDINARY = 1 @@ -328,8 +334,7 @@ class Account(proto.Message): class Property(proto.Message): - r"""A resource message representing a Google Analytics GA4 - property. + r"""A resource message representing a Google Analytics property. Attributes: name (str): @@ -641,7 +646,8 @@ class IosAppStreamData(proto.Message): class FirebaseLink(proto.Message): - r"""A link between a GA4 property and a Firebase project. + r"""A link between a Google Analytics property and a Firebase + project. Attributes: name (str): @@ -676,7 +682,8 @@ class FirebaseLink(proto.Message): class GoogleAdsLink(proto.Message): - r"""A link between a GA4 property and a Google Ads account. + r"""A link between a Google Analytics property and a Google Ads + account. Attributes: name (str): @@ -803,7 +810,7 @@ class DataSharingSettings(proto.Message): class AccountSummary(proto.Message): r"""A virtual resource representing an overview of an account and - all its child GA4 properties. + all its child Google Analytics properties. Attributes: name (str): @@ -841,7 +848,8 @@ class AccountSummary(proto.Message): class PropertySummary(proto.Message): - r"""A virtual resource representing metadata for a GA4 property. + r"""A virtual resource representing metadata for a Google + Analytics property. Attributes: property (str): @@ -1604,8 +1612,11 @@ class DataRetentionSettings(proto.Message): DataRetentionSetting resource. Format: properties/{property}/dataRetentionSettings event_data_retention (google.analytics.admin_v1beta.types.DataRetentionSettings.RetentionDuration): - The length of time that event-level data is - retained. + Required. The length of time that event-level + data is retained. + user_data_retention (google.analytics.admin_v1beta.types.DataRetentionSettings.RetentionDuration): + Required. The length of time that user-level + data is retained. reset_user_data_on_new_activity (bool): If true, reset the retention period for the user identifier with every event from that user. @@ -1626,12 +1637,15 @@ class RetentionDuration(proto.Enum): TWENTY_SIX_MONTHS (4): The data retention time duration is 26 months. Available to 360 properties only. + Available for event data only. THIRTY_EIGHT_MONTHS (5): The data retention time duration is 38 months. Available to 360 properties only. + Available for event data only. FIFTY_MONTHS (6): The data retention time duration is 50 months. Available to 360 properties only. + Available for event data only. """ RETENTION_DURATION_UNSPECIFIED = 0 TWO_MONTHS = 1 @@ -1649,6 +1663,11 @@ class RetentionDuration(proto.Enum): number=2, enum=RetentionDuration, ) + user_data_retention: RetentionDuration = proto.Field( + proto.ENUM, + number=4, + enum=RetentionDuration, + ) reset_user_data_on_new_activity: bool = proto.Field( proto.BOOL, number=3, diff --git a/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_async.py b/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_async.py index 61997c77c6f5..96ecc02b6e28 100644 --- a/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_async.py +++ b/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_async.py @@ -39,7 +39,12 @@ async def sample_update_data_retention_settings(): client = admin_v1beta.AnalyticsAdminServiceAsyncClient() # Initialize request argument(s) + data_retention_settings = admin_v1beta.DataRetentionSettings() + data_retention_settings.event_data_retention = "FIFTY_MONTHS" + data_retention_settings.user_data_retention = "FIFTY_MONTHS" + request = admin_v1beta.UpdateDataRetentionSettingsRequest( + data_retention_settings=data_retention_settings, ) # Make the request diff --git a/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_sync.py b/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_sync.py index 7a5997ec34c3..e8d228cd9b25 100644 --- a/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_sync.py +++ b/packages/google-analytics-admin/samples/generated_samples/analyticsadmin_v1beta_generated_analytics_admin_service_update_data_retention_settings_sync.py @@ -39,7 +39,12 @@ def sample_update_data_retention_settings(): client = admin_v1beta.AnalyticsAdminServiceClient() # Initialize request argument(s) + data_retention_settings = admin_v1beta.DataRetentionSettings() + data_retention_settings.event_data_retention = "FIFTY_MONTHS" + data_retention_settings.user_data_retention = "FIFTY_MONTHS" + request = admin_v1beta.UpdateDataRetentionSettingsRequest( + data_retention_settings=data_retention_settings, ) # Make the request diff --git a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json index 075af12d2094..68c1856c71a5 100644 --- a/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json +++ b/packages/google-analytics-admin/samples/generated_samples/snippet_metadata_google.analytics.admin.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-admin", - "version": "0.23.3" + "version": "0.23.4" }, "snippets": [ { @@ -7778,12 +7778,12 @@ "regionTag": "analyticsadmin_v1beta_generated_AnalyticsAdminService_UpdateDataRetentionSettings_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -7793,18 +7793,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -7862,12 +7862,12 @@ "regionTag": "analyticsadmin_v1beta_generated_AnalyticsAdminService_UpdateDataRetentionSettings_sync", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -7877,18 +7877,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index 1fc6486dafa7..b9434d0fba80 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -78,6 +78,13 @@ from google.analytics.admin_v1alpha.types import resources from google.analytics.admin_v1alpha.types import subproperty_event_filter +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -353,6 +360,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AnalyticsAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AnalyticsAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -25797,6 +25847,7 @@ def test_get_data_retention_settings(request_type, transport: str = "grpc"): call.return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) response = client.get_data_retention_settings(request) @@ -25814,6 +25865,10 @@ def test_get_data_retention_settings(request_type, transport: str = "grpc"): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -25952,6 +26007,7 @@ async def test_get_data_retention_settings_async( resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -25970,6 +26026,10 @@ async def test_get_data_retention_settings_async( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -26154,6 +26214,7 @@ def test_update_data_retention_settings(request_type, transport: str = "grpc"): call.return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) response = client.update_data_retention_settings(request) @@ -26171,6 +26232,10 @@ def test_update_data_retention_settings(request_type, transport: str = "grpc"): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -26305,6 +26370,7 @@ async def test_update_data_retention_settings_async( resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -26323,6 +26389,10 @@ async def test_update_data_retention_settings_async( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -90346,6 +90416,7 @@ async def test_get_data_retention_settings_empty_call_grpc_asyncio(): resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -90377,6 +90448,7 @@ async def test_update_data_retention_settings_empty_call_grpc_asyncio(): resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -92958,10 +93030,14 @@ def test_get_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_account" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetAccountRequest.pb( analytics_admin.GetAccountRequest() ) @@ -92985,6 +93061,7 @@ def test_get_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Account() + post_with_metadata.return_value = resources.Account(), metadata client.get_account( request, @@ -92996,6 +93073,7 @@ def test_get_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_accounts_rest_bad_request( @@ -93080,10 +93158,14 @@ def test_list_accounts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_accounts" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_accounts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_accounts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAccountsRequest.pb( analytics_admin.ListAccountsRequest() ) @@ -93109,6 +93191,10 @@ def test_list_accounts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAccountsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAccountsResponse(), + metadata, + ) client.list_accounts( request, @@ -93120,6 +93206,7 @@ def test_list_accounts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_account_rest_bad_request( @@ -93397,10 +93484,14 @@ def test_update_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_account" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateAccountRequest.pb( analytics_admin.UpdateAccountRequest() ) @@ -93424,6 +93515,7 @@ def test_update_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Account() + post_with_metadata.return_value = resources.Account(), metadata client.update_account( request, @@ -93435,6 +93527,7 @@ def test_update_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_provision_account_ticket_rest_bad_request( @@ -93519,10 +93612,14 @@ def test_provision_account_ticket_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_provision_account_ticket" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_provision_account_ticket_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_provision_account_ticket" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ProvisionAccountTicketRequest.pb( analytics_admin.ProvisionAccountTicketRequest() ) @@ -93548,6 +93645,10 @@ def test_provision_account_ticket_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ProvisionAccountTicketResponse() + post_with_metadata.return_value = ( + analytics_admin.ProvisionAccountTicketResponse(), + metadata, + ) client.provision_account_ticket( request, @@ -93559,6 +93660,7 @@ def test_provision_account_ticket_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_account_summaries_rest_bad_request( @@ -93643,10 +93745,14 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_account_summaries" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_account_summaries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_account_summaries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAccountSummariesRequest.pb( analytics_admin.ListAccountSummariesRequest() ) @@ -93672,6 +93778,10 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAccountSummariesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAccountSummariesResponse(), + metadata, + ) client.list_account_summaries( request, @@ -93683,6 +93793,7 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_property_rest_bad_request(request_type=analytics_admin.GetPropertyRequest): @@ -93781,10 +93892,14 @@ def test_get_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetPropertyRequest.pb( analytics_admin.GetPropertyRequest() ) @@ -93808,6 +93923,7 @@ def test_get_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.get_property( request, @@ -93819,6 +93935,7 @@ def test_get_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_properties_rest_bad_request( @@ -93903,10 +94020,14 @@ def test_list_properties_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_properties" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_properties_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_properties" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListPropertiesRequest.pb( analytics_admin.ListPropertiesRequest() ) @@ -93932,6 +94053,10 @@ def test_list_properties_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListPropertiesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListPropertiesResponse(), + metadata, + ) client.list_properties( request, @@ -93943,6 +94068,7 @@ def test_list_properties_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_property_rest_bad_request( @@ -94125,10 +94251,14 @@ def test_create_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreatePropertyRequest.pb( analytics_admin.CreatePropertyRequest() ) @@ -94152,6 +94282,7 @@ def test_create_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.create_property( request, @@ -94163,6 +94294,7 @@ def test_create_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_property_rest_bad_request( @@ -94263,10 +94395,14 @@ def test_delete_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_delete_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_delete_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.DeletePropertyRequest.pb( analytics_admin.DeletePropertyRequest() ) @@ -94290,6 +94426,7 @@ def test_delete_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.delete_property( request, @@ -94301,6 +94438,7 @@ def test_delete_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_property_rest_bad_request( @@ -94483,10 +94621,14 @@ def test_update_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdatePropertyRequest.pb( analytics_admin.UpdatePropertyRequest() ) @@ -94510,6 +94652,7 @@ def test_update_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.update_property( request, @@ -94521,6 +94664,7 @@ def test_update_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_firebase_link_rest_bad_request( @@ -94679,10 +94823,14 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_firebase_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_firebase_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_firebase_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateFirebaseLinkRequest.pb( analytics_admin.CreateFirebaseLinkRequest() ) @@ -94706,6 +94854,7 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.FirebaseLink() + post_with_metadata.return_value = resources.FirebaseLink(), metadata client.create_firebase_link( request, @@ -94717,6 +94866,7 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_firebase_link_rest_bad_request( @@ -94910,10 +95060,14 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_firebase_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_firebase_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_firebase_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListFirebaseLinksRequest.pb( analytics_admin.ListFirebaseLinksRequest() ) @@ -94939,6 +95093,10 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListFirebaseLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListFirebaseLinksResponse(), + metadata, + ) client.list_firebase_links( request, @@ -94950,6 +95108,7 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_global_site_tag_rest_bad_request( @@ -95036,10 +95195,14 @@ def test_get_global_site_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_global_site_tag" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_global_site_tag_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_global_site_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetGlobalSiteTagRequest.pb( analytics_admin.GetGlobalSiteTagRequest() ) @@ -95063,6 +95226,7 @@ def test_get_global_site_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GlobalSiteTag() + post_with_metadata.return_value = resources.GlobalSiteTag(), metadata client.get_global_site_tag( request, @@ -95074,6 +95238,7 @@ def test_get_global_site_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_google_ads_link_rest_bad_request( @@ -95242,10 +95407,14 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_google_ads_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_google_ads_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_google_ads_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateGoogleAdsLinkRequest.pb( analytics_admin.CreateGoogleAdsLinkRequest() ) @@ -95269,6 +95438,7 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GoogleAdsLink() + post_with_metadata.return_value = resources.GoogleAdsLink(), metadata client.create_google_ads_link( request, @@ -95280,6 +95450,7 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_google_ads_link_rest_bad_request( @@ -95452,10 +95623,14 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_google_ads_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_google_ads_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_google_ads_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateGoogleAdsLinkRequest.pb( analytics_admin.UpdateGoogleAdsLinkRequest() ) @@ -95479,6 +95654,7 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GoogleAdsLink() + post_with_metadata.return_value = resources.GoogleAdsLink(), metadata client.update_google_ads_link( request, @@ -95490,6 +95666,7 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_google_ads_link_rest_bad_request( @@ -95683,10 +95860,14 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_google_ads_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_google_ads_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_google_ads_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListGoogleAdsLinksRequest.pb( analytics_admin.ListGoogleAdsLinksRequest() ) @@ -95712,6 +95893,10 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListGoogleAdsLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListGoogleAdsLinksResponse(), + metadata, + ) client.list_google_ads_links( request, @@ -95723,6 +95908,7 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_sharing_settings_rest_bad_request( @@ -95818,10 +96004,14 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_sharing_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_sharing_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_sharing_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataSharingSettingsRequest.pb( analytics_admin.GetDataSharingSettingsRequest() ) @@ -95847,6 +96037,7 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataSharingSettings() + post_with_metadata.return_value = resources.DataSharingSettings(), metadata client.get_data_sharing_settings( request, @@ -95858,6 +96049,7 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_measurement_protocol_secret_rest_bad_request( @@ -95951,11 +96143,15 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_measurement_protocol_secret", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_measurement_protocol_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetMeasurementProtocolSecretRequest.pb( analytics_admin.GetMeasurementProtocolSecretRequest() ) @@ -95981,6 +96177,10 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.MeasurementProtocolSecret() + post_with_metadata.return_value = ( + resources.MeasurementProtocolSecret(), + metadata, + ) client.get_measurement_protocol_secret( request, @@ -95992,6 +96192,7 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_measurement_protocol_secrets_rest_bad_request( @@ -96079,11 +96280,15 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_list_measurement_protocol_secrets", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_measurement_protocol_secrets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_measurement_protocol_secrets", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListMeasurementProtocolSecretsRequest.pb( analytics_admin.ListMeasurementProtocolSecretsRequest() ) @@ -96109,6 +96314,10 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListMeasurementProtocolSecretsResponse(), + metadata, + ) client.list_measurement_protocol_secrets( request, @@ -96120,6 +96329,7 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_measurement_protocol_secret_rest_bad_request( @@ -96287,11 +96497,15 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_create_measurement_protocol_secret", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_measurement_protocol_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateMeasurementProtocolSecretRequest.pb( analytics_admin.CreateMeasurementProtocolSecretRequest() ) @@ -96317,6 +96531,10 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.MeasurementProtocolSecret() + post_with_metadata.return_value = ( + resources.MeasurementProtocolSecret(), + metadata, + ) client.create_measurement_protocol_secret( request, @@ -96328,6 +96546,7 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_measurement_protocol_secret_rest_bad_request( @@ -96617,11 +96836,15 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_measurement_protocol_secret", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_measurement_protocol_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateMeasurementProtocolSecretRequest.pb( analytics_admin.UpdateMeasurementProtocolSecretRequest() ) @@ -96647,6 +96870,10 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.MeasurementProtocolSecret() + post_with_metadata.return_value = ( + resources.MeasurementProtocolSecret(), + metadata, + ) client.update_measurement_protocol_secret( request, @@ -96658,6 +96885,7 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_acknowledge_user_data_collection_rest_bad_request( @@ -96742,11 +96970,15 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_acknowledge_user_data_collection", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_acknowledge_user_data_collection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_acknowledge_user_data_collection", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.AcknowledgeUserDataCollectionRequest.pb( analytics_admin.AcknowledgeUserDataCollectionRequest() ) @@ -96772,6 +97004,10 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() + post_with_metadata.return_value = ( + analytics_admin.AcknowledgeUserDataCollectionResponse(), + metadata, + ) client.acknowledge_user_data_collection( request, @@ -96783,6 +97019,7 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_sk_ad_network_conversion_value_schema_rest_bad_request( @@ -96874,11 +97111,15 @@ def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interc transports.AnalyticsAdminServiceRestInterceptor, "post_get_sk_ad_network_conversion_value_schema", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_sk_ad_network_conversion_value_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetSKAdNetworkConversionValueSchemaRequest.pb( analytics_admin.GetSKAdNetworkConversionValueSchemaRequest() ) @@ -96904,6 +97145,10 @@ def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interc ] pre.return_value = request, metadata post.return_value = resources.SKAdNetworkConversionValueSchema() + post_with_metadata.return_value = ( + resources.SKAdNetworkConversionValueSchema(), + metadata, + ) client.get_sk_ad_network_conversion_value_schema( request, @@ -96915,6 +97160,7 @@ def test_get_sk_ad_network_conversion_value_schema_rest_interceptors(null_interc pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_sk_ad_network_conversion_value_schema_rest_bad_request( @@ -97107,11 +97353,15 @@ def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_create_sk_ad_network_conversion_value_schema", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_sk_ad_network_conversion_value_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest.pb( analytics_admin.CreateSKAdNetworkConversionValueSchemaRequest() ) @@ -97137,6 +97387,10 @@ def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( ] pre.return_value = request, metadata post.return_value = resources.SKAdNetworkConversionValueSchema() + post_with_metadata.return_value = ( + resources.SKAdNetworkConversionValueSchema(), + metadata, + ) client.create_sk_ad_network_conversion_value_schema( request, @@ -97148,6 +97402,7 @@ def test_create_sk_ad_network_conversion_value_schema_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sk_ad_network_conversion_value_schema_rest_bad_request( @@ -97464,11 +97719,15 @@ def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_update_sk_ad_network_conversion_value_schema", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_sk_ad_network_conversion_value_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_sk_ad_network_conversion_value_schema", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest.pb( analytics_admin.UpdateSKAdNetworkConversionValueSchemaRequest() ) @@ -97494,6 +97753,10 @@ def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( ] pre.return_value = request, metadata post.return_value = resources.SKAdNetworkConversionValueSchema() + post_with_metadata.return_value = ( + resources.SKAdNetworkConversionValueSchema(), + metadata, + ) client.update_sk_ad_network_conversion_value_schema( request, @@ -97505,6 +97768,7 @@ def test_update_sk_ad_network_conversion_value_schema_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sk_ad_network_conversion_value_schemas_rest_bad_request( @@ -97594,11 +97858,15 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_list_sk_ad_network_conversion_value_schemas", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_sk_ad_network_conversion_value_schemas_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_sk_ad_network_conversion_value_schemas", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListSKAdNetworkConversionValueSchemasRequest.pb( analytics_admin.ListSKAdNetworkConversionValueSchemasRequest() ) @@ -97628,6 +97896,10 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( post.return_value = ( analytics_admin.ListSKAdNetworkConversionValueSchemasResponse() ) + post_with_metadata.return_value = ( + analytics_admin.ListSKAdNetworkConversionValueSchemasResponse(), + metadata, + ) client.list_sk_ad_network_conversion_value_schemas( request, @@ -97639,6 +97911,7 @@ def test_list_sk_ad_network_conversion_value_schemas_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_change_history_events_rest_bad_request( @@ -97726,11 +97999,15 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_search_change_history_events", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_search_change_history_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_search_change_history_events", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.SearchChangeHistoryEventsRequest.pb( analytics_admin.SearchChangeHistoryEventsRequest() ) @@ -97756,6 +98033,10 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.SearchChangeHistoryEventsResponse() + post_with_metadata.return_value = ( + analytics_admin.SearchChangeHistoryEventsResponse(), + metadata, + ) client.search_change_history_events( request, @@ -97767,6 +98048,7 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_google_signals_settings_rest_bad_request( @@ -97859,11 +98141,15 @@ def test_get_google_signals_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_google_signals_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_google_signals_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_google_signals_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetGoogleSignalsSettingsRequest.pb( analytics_admin.GetGoogleSignalsSettingsRequest() ) @@ -97889,6 +98175,7 @@ def test_get_google_signals_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GoogleSignalsSettings() + post_with_metadata.return_value = resources.GoogleSignalsSettings(), metadata client.get_google_signals_settings( request, @@ -97900,6 +98187,7 @@ def test_get_google_signals_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_google_signals_settings_rest_bad_request( @@ -98072,11 +98360,15 @@ def test_update_google_signals_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_google_signals_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_google_signals_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_google_signals_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateGoogleSignalsSettingsRequest.pb( analytics_admin.UpdateGoogleSignalsSettingsRequest() ) @@ -98102,6 +98394,7 @@ def test_update_google_signals_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GoogleSignalsSettings() + post_with_metadata.return_value = resources.GoogleSignalsSettings(), metadata client.update_google_signals_settings( request, @@ -98113,6 +98406,7 @@ def test_update_google_signals_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversion_event_rest_bad_request( @@ -98289,10 +98583,14 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_conversion_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_conversion_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateConversionEventRequest.pb( analytics_admin.CreateConversionEventRequest() ) @@ -98316,6 +98614,7 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConversionEvent() + post_with_metadata.return_value = resources.ConversionEvent(), metadata client.create_conversion_event( request, @@ -98327,6 +98626,7 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_conversion_event_rest_bad_request( @@ -98507,10 +98807,14 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_conversion_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_conversion_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateConversionEventRequest.pb( analytics_admin.UpdateConversionEventRequest() ) @@ -98534,6 +98838,7 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConversionEvent() + post_with_metadata.return_value = resources.ConversionEvent(), metadata client.update_conversion_event( request, @@ -98545,6 +98850,7 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversion_event_rest_bad_request( @@ -98640,10 +98946,14 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_conversion_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_conversion_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetConversionEventRequest.pb( analytics_admin.GetConversionEventRequest() ) @@ -98667,6 +98977,7 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConversionEvent() + post_with_metadata.return_value = resources.ConversionEvent(), metadata client.get_conversion_event( request, @@ -98678,6 +98989,7 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversion_event_rest_bad_request( @@ -98871,10 +99183,14 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_conversion_events" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_conversion_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_conversion_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListConversionEventsRequest.pb( analytics_admin.ListConversionEventsRequest() ) @@ -98900,6 +99216,10 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListConversionEventsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListConversionEventsResponse(), + metadata, + ) client.list_conversion_events( request, @@ -98911,6 +99231,7 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_key_event_rest_bad_request( @@ -99082,10 +99403,14 @@ def test_create_key_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_key_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_key_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateKeyEventRequest.pb( analytics_admin.CreateKeyEventRequest() ) @@ -99109,6 +99434,7 @@ def test_create_key_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyEvent() + post_with_metadata.return_value = resources.KeyEvent(), metadata client.create_key_event( request, @@ -99120,6 +99446,7 @@ def test_create_key_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_key_event_rest_bad_request( @@ -99291,10 +99618,14 @@ def test_update_key_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_key_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_key_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateKeyEventRequest.pb( analytics_admin.UpdateKeyEventRequest() ) @@ -99318,6 +99649,7 @@ def test_update_key_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyEvent() + post_with_metadata.return_value = resources.KeyEvent(), metadata client.update_key_event( request, @@ -99329,6 +99661,7 @@ def test_update_key_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_key_event_rest_bad_request( @@ -99421,10 +99754,14 @@ def test_get_key_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_key_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_key_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetKeyEventRequest.pb( analytics_admin.GetKeyEventRequest() ) @@ -99448,6 +99785,7 @@ def test_get_key_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyEvent() + post_with_metadata.return_value = resources.KeyEvent(), metadata client.get_key_event( request, @@ -99459,6 +99797,7 @@ def test_get_key_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_key_event_rest_bad_request( @@ -99652,10 +99991,14 @@ def test_list_key_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_key_events" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_key_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_key_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListKeyEventsRequest.pb( analytics_admin.ListKeyEventsRequest() ) @@ -99681,6 +100024,10 @@ def test_list_key_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListKeyEventsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListKeyEventsResponse(), + metadata, + ) client.list_key_events( request, @@ -99692,6 +100039,7 @@ def test_list_key_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_display_video360_advertiser_link_rest_bad_request( @@ -99781,11 +100129,15 @@ def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor transports.AnalyticsAdminServiceRestInterceptor, "post_get_display_video360_advertiser_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_display_video360_advertiser_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_display_video360_advertiser_link", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDisplayVideo360AdvertiserLinkRequest.pb( analytics_admin.GetDisplayVideo360AdvertiserLinkRequest() ) @@ -99811,6 +100163,10 @@ def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor ] pre.return_value = request, metadata post.return_value = resources.DisplayVideo360AdvertiserLink() + post_with_metadata.return_value = ( + resources.DisplayVideo360AdvertiserLink(), + metadata, + ) client.get_display_video360_advertiser_link( request, @@ -99822,6 +100178,7 @@ def test_get_display_video360_advertiser_link_rest_interceptors(null_interceptor pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_display_video360_advertiser_links_rest_bad_request( @@ -99909,11 +100266,15 @@ def test_list_display_video360_advertiser_links_rest_interceptors(null_intercept transports.AnalyticsAdminServiceRestInterceptor, "post_list_display_video360_advertiser_links", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_display_video360_advertiser_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_display_video360_advertiser_links", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListDisplayVideo360AdvertiserLinksRequest.pb( analytics_admin.ListDisplayVideo360AdvertiserLinksRequest() ) @@ -99941,6 +100302,10 @@ def test_list_display_video360_advertiser_links_rest_interceptors(null_intercept ] pre.return_value = request, metadata post.return_value = analytics_admin.ListDisplayVideo360AdvertiserLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinksResponse(), + metadata, + ) client.list_display_video360_advertiser_links( request, @@ -99952,6 +100317,7 @@ def test_list_display_video360_advertiser_links_rest_interceptors(null_intercept pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_display_video360_advertiser_link_rest_bad_request( @@ -100124,11 +100490,15 @@ def test_create_display_video360_advertiser_link_rest_interceptors(null_intercep transports.AnalyticsAdminServiceRestInterceptor, "post_create_display_video360_advertiser_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_display_video360_advertiser_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_display_video360_advertiser_link", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest.pb( analytics_admin.CreateDisplayVideo360AdvertiserLinkRequest() ) @@ -100154,6 +100524,10 @@ def test_create_display_video360_advertiser_link_rest_interceptors(null_intercep ] pre.return_value = request, metadata post.return_value = resources.DisplayVideo360AdvertiserLink() + post_with_metadata.return_value = ( + resources.DisplayVideo360AdvertiserLink(), + metadata, + ) client.create_display_video360_advertiser_link( request, @@ -100165,6 +100539,7 @@ def test_create_display_video360_advertiser_link_rest_interceptors(null_intercep pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_display_video360_advertiser_link_rest_bad_request( @@ -100455,11 +100830,15 @@ def test_update_display_video360_advertiser_link_rest_interceptors(null_intercep transports.AnalyticsAdminServiceRestInterceptor, "post_update_display_video360_advertiser_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_display_video360_advertiser_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_display_video360_advertiser_link", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest.pb( analytics_admin.UpdateDisplayVideo360AdvertiserLinkRequest() ) @@ -100485,6 +100864,10 @@ def test_update_display_video360_advertiser_link_rest_interceptors(null_intercep ] pre.return_value = request, metadata post.return_value = resources.DisplayVideo360AdvertiserLink() + post_with_metadata.return_value = ( + resources.DisplayVideo360AdvertiserLink(), + metadata, + ) client.update_display_video360_advertiser_link( request, @@ -100496,6 +100879,7 @@ def test_update_display_video360_advertiser_link_rest_interceptors(null_intercep pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_display_video360_advertiser_link_proposal_rest_bad_request( @@ -100593,11 +100977,15 @@ def test_get_display_video360_advertiser_link_proposal_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_get_display_video360_advertiser_link_proposal", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_display_video360_advertiser_link_proposal_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest.pb( analytics_admin.GetDisplayVideo360AdvertiserLinkProposalRequest() ) @@ -100623,6 +101011,10 @@ def test_get_display_video360_advertiser_link_proposal_rest_interceptors( ] pre.return_value = request, metadata post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + post_with_metadata.return_value = ( + resources.DisplayVideo360AdvertiserLinkProposal(), + metadata, + ) client.get_display_video360_advertiser_link_proposal( request, @@ -100634,6 +101026,7 @@ def test_get_display_video360_advertiser_link_proposal_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_display_video360_advertiser_link_proposals_rest_bad_request( @@ -100729,11 +101122,15 @@ def test_list_display_video360_advertiser_link_proposals_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_list_display_video360_advertiser_link_proposals", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_display_video360_advertiser_link_proposals_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_display_video360_advertiser_link_proposals", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest.pb( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsRequest() @@ -100765,6 +101162,10 @@ def test_list_display_video360_advertiser_link_proposals_rest_interceptors( post.return_value = ( analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse() ) + post_with_metadata.return_value = ( + analytics_admin.ListDisplayVideo360AdvertiserLinkProposalsResponse(), + metadata, + ) client.list_display_video360_advertiser_link_proposals( request, @@ -100776,6 +101177,7 @@ def test_list_display_video360_advertiser_link_proposals_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_display_video360_advertiser_link_proposal_rest_bad_request( @@ -100969,11 +101371,15 @@ def test_create_display_video360_advertiser_link_proposal_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_create_display_video360_advertiser_link_proposal", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_display_video360_advertiser_link_proposal_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest.pb( analytics_admin.CreateDisplayVideo360AdvertiserLinkProposalRequest() @@ -101001,6 +101407,10 @@ def test_create_display_video360_advertiser_link_proposal_rest_interceptors( ] pre.return_value = request, metadata post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + post_with_metadata.return_value = ( + resources.DisplayVideo360AdvertiserLinkProposal(), + metadata, + ) client.create_display_video360_advertiser_link_proposal( request, @@ -101012,6 +101422,7 @@ def test_create_display_video360_advertiser_link_proposal_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_display_video360_advertiser_link_proposal_rest_bad_request( @@ -101230,11 +101641,15 @@ def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_approve_display_video360_advertiser_link_proposal", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_approve_display_video360_advertiser_link_proposal_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_approve_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest.pb( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalRequest() @@ -101264,6 +101679,10 @@ def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( post.return_value = ( analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse() ) + post_with_metadata.return_value = ( + analytics_admin.ApproveDisplayVideo360AdvertiserLinkProposalResponse(), + metadata, + ) client.approve_display_video360_advertiser_link_proposal( request, @@ -101275,6 +101694,7 @@ def test_approve_display_video360_advertiser_link_proposal_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_display_video360_advertiser_link_proposal_rest_bad_request( @@ -101374,11 +101794,15 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( transports.AnalyticsAdminServiceRestInterceptor, "post_cancel_display_video360_advertiser_link_proposal", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_cancel_display_video360_advertiser_link_proposal_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_cancel_display_video360_advertiser_link_proposal", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest.pb( analytics_admin.CancelDisplayVideo360AdvertiserLinkProposalRequest() @@ -101406,6 +101830,10 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( ] pre.return_value = request, metadata post.return_value = resources.DisplayVideo360AdvertiserLinkProposal() + post_with_metadata.return_value = ( + resources.DisplayVideo360AdvertiserLinkProposal(), + metadata, + ) client.cancel_display_video360_advertiser_link_proposal( request, @@ -101417,6 +101845,7 @@ def test_cancel_display_video360_advertiser_link_proposal_rest_interceptors( pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_custom_dimension_rest_bad_request( @@ -101588,10 +102017,14 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_dimension" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_custom_dimension_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateCustomDimensionRequest.pb( analytics_admin.CreateCustomDimensionRequest() ) @@ -101615,6 +102048,7 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomDimension() + post_with_metadata.return_value = resources.CustomDimension(), metadata client.create_custom_dimension( request, @@ -101626,6 +102060,7 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_custom_dimension_rest_bad_request( @@ -101801,10 +102236,14 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_dimension" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_custom_dimension_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateCustomDimensionRequest.pb( analytics_admin.UpdateCustomDimensionRequest() ) @@ -101828,6 +102267,7 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomDimension() + post_with_metadata.return_value = resources.CustomDimension(), metadata client.update_custom_dimension( request, @@ -101839,6 +102279,7 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_dimensions_rest_bad_request( @@ -101923,10 +102364,14 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_dimensions" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_custom_dimensions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_dimensions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListCustomDimensionsRequest.pb( analytics_admin.ListCustomDimensionsRequest() ) @@ -101952,6 +102397,10 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListCustomDimensionsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListCustomDimensionsResponse(), + metadata, + ) client.list_custom_dimensions( request, @@ -101963,6 +102412,7 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_archive_custom_dimension_rest_bad_request( @@ -102166,10 +102616,14 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_dimension" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_custom_dimension_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetCustomDimensionRequest.pb( analytics_admin.GetCustomDimensionRequest() ) @@ -102193,6 +102647,7 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomDimension() + post_with_metadata.return_value = resources.CustomDimension(), metadata client.get_custom_dimension( request, @@ -102204,6 +102659,7 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_custom_metric_rest_bad_request( @@ -102380,10 +102836,14 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_custom_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateCustomMetricRequest.pb( analytics_admin.CreateCustomMetricRequest() ) @@ -102407,6 +102867,7 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomMetric() + post_with_metadata.return_value = resources.CustomMetric(), metadata client.create_custom_metric( request, @@ -102418,6 +102879,7 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_custom_metric_rest_bad_request( @@ -102598,10 +103060,14 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_custom_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateCustomMetricRequest.pb( analytics_admin.UpdateCustomMetricRequest() ) @@ -102625,6 +103091,7 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomMetric() + post_with_metadata.return_value = resources.CustomMetric(), metadata client.update_custom_metric( request, @@ -102636,6 +103103,7 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_metrics_rest_bad_request( @@ -102720,10 +103188,14 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_metrics" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_custom_metrics_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_metrics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListCustomMetricsRequest.pb( analytics_admin.ListCustomMetricsRequest() ) @@ -102749,6 +103221,10 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListCustomMetricsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListCustomMetricsResponse(), + metadata, + ) client.list_custom_metrics( request, @@ -102760,6 +103236,7 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_archive_custom_metric_rest_bad_request( @@ -102969,10 +103446,14 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_custom_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetCustomMetricRequest.pb( analytics_admin.GetCustomMetricRequest() ) @@ -102996,6 +103477,7 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomMetric() + post_with_metadata.return_value = resources.CustomMetric(), metadata client.get_custom_metric( request, @@ -103007,6 +103489,7 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_retention_settings_rest_bad_request( @@ -103056,6 +103539,7 @@ def test_get_data_retention_settings_rest_call_success(request_type): return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) @@ -103078,6 +103562,10 @@ def test_get_data_retention_settings_rest_call_success(request_type): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -103099,11 +103587,15 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_retention_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_retention_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_retention_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataRetentionSettingsRequest.pb( analytics_admin.GetDataRetentionSettingsRequest() ) @@ -103129,6 +103621,7 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataRetentionSettings() + post_with_metadata.return_value = resources.DataRetentionSettings(), metadata client.get_data_retention_settings( request, @@ -103140,6 +103633,7 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_retention_settings_rest_bad_request( @@ -103188,6 +103682,7 @@ def test_update_data_retention_settings_rest_call_success(request_type): request_init["data_retention_settings"] = { "name": "properties/sample1/dataRetentionSettings", "event_data_retention": 1, + "user_data_retention": 1, "reset_user_data_on_new_activity": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -103269,6 +103764,7 @@ def get_message_fields(field): return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) @@ -103291,6 +103787,10 @@ def get_message_fields(field): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -103312,11 +103812,15 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_retention_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_retention_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_retention_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateDataRetentionSettingsRequest.pb( analytics_admin.UpdateDataRetentionSettingsRequest() ) @@ -103342,6 +103846,7 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataRetentionSettings() + post_with_metadata.return_value = resources.DataRetentionSettings(), metadata client.update_data_retention_settings( request, @@ -103353,6 +103858,7 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_data_stream_rest_bad_request( @@ -103528,10 +104034,14 @@ def test_create_data_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_data_stream" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_data_stream_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateDataStreamRequest.pb( analytics_admin.CreateDataStreamRequest() ) @@ -103555,6 +104065,7 @@ def test_create_data_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataStream() + post_with_metadata.return_value = resources.DataStream(), metadata client.create_data_stream( request, @@ -103566,6 +104077,7 @@ def test_create_data_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_data_stream_rest_bad_request( @@ -103850,10 +104362,14 @@ def test_update_data_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_stream" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_stream_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateDataStreamRequest.pb( analytics_admin.UpdateDataStreamRequest() ) @@ -103877,6 +104393,7 @@ def test_update_data_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataStream() + post_with_metadata.return_value = resources.DataStream(), metadata client.update_data_stream( request, @@ -103888,6 +104405,7 @@ def test_update_data_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_streams_rest_bad_request( @@ -103972,10 +104490,14 @@ def test_list_data_streams_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_data_streams" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_data_streams_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_data_streams" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListDataStreamsRequest.pb( analytics_admin.ListDataStreamsRequest() ) @@ -104001,6 +104523,10 @@ def test_list_data_streams_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListDataStreamsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListDataStreamsResponse(), + metadata, + ) client.list_data_streams( request, @@ -104012,6 +104538,7 @@ def test_list_data_streams_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_stream_rest_bad_request( @@ -104100,10 +104627,14 @@ def test_get_data_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_stream" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_stream_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataStreamRequest.pb( analytics_admin.GetDataStreamRequest() ) @@ -104127,6 +104658,7 @@ def test_get_data_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataStream() + post_with_metadata.return_value = resources.DataStream(), metadata client.get_data_stream( request, @@ -104138,6 +104670,7 @@ def test_get_data_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_audience_rest_bad_request(request_type=analytics_admin.GetAudienceRequest): @@ -104233,10 +104766,14 @@ def test_get_audience_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_audience" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_audience_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_audience" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetAudienceRequest.pb( analytics_admin.GetAudienceRequest() ) @@ -104260,6 +104797,7 @@ def test_get_audience_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = audience.Audience() + post_with_metadata.return_value = audience.Audience(), metadata client.get_audience( request, @@ -104271,6 +104809,7 @@ def test_get_audience_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_audiences_rest_bad_request( @@ -104355,10 +104894,14 @@ def test_list_audiences_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_audiences" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_audiences_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_audiences" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAudiencesRequest.pb( analytics_admin.ListAudiencesRequest() ) @@ -104384,6 +104927,10 @@ def test_list_audiences_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAudiencesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAudiencesResponse(), + metadata, + ) client.list_audiences( request, @@ -104395,6 +104942,7 @@ def test_list_audiences_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_audience_rest_bad_request( @@ -104620,10 +105168,14 @@ def test_create_audience_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_audience" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_audience_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_audience" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateAudienceRequest.pb( analytics_admin.CreateAudienceRequest() ) @@ -104647,6 +105199,7 @@ def test_create_audience_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_audience.Audience() + post_with_metadata.return_value = gaa_audience.Audience(), metadata client.create_audience( request, @@ -104658,6 +105211,7 @@ def test_create_audience_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_audience_rest_bad_request( @@ -104883,10 +105437,14 @@ def test_update_audience_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_audience" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_audience_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_audience" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateAudienceRequest.pb( analytics_admin.UpdateAudienceRequest() ) @@ -104910,6 +105468,7 @@ def test_update_audience_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_audience.Audience() + post_with_metadata.return_value = gaa_audience.Audience(), metadata client.update_audience( request, @@ -104921,6 +105480,7 @@ def test_update_audience_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_archive_audience_rest_bad_request( @@ -105118,10 +105678,14 @@ def test_get_search_ads360_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_search_ads360_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_search_ads360_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_search_ads360_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetSearchAds360LinkRequest.pb( analytics_admin.GetSearchAds360LinkRequest() ) @@ -105145,6 +105709,7 @@ def test_get_search_ads360_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.SearchAds360Link() + post_with_metadata.return_value = resources.SearchAds360Link(), metadata client.get_search_ads360_link( request, @@ -105156,6 +105721,7 @@ def test_get_search_ads360_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_search_ads360_links_rest_bad_request( @@ -105240,10 +105806,14 @@ def test_list_search_ads360_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_search_ads360_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_search_ads360_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_search_ads360_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListSearchAds360LinksRequest.pb( analytics_admin.ListSearchAds360LinksRequest() ) @@ -105269,6 +105839,10 @@ def test_list_search_ads360_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListSearchAds360LinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListSearchAds360LinksResponse(), + metadata, + ) client.list_search_ads360_links( request, @@ -105280,6 +105854,7 @@ def test_list_search_ads360_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_search_ads360_link_rest_bad_request( @@ -105447,10 +106022,14 @@ def test_create_search_ads360_link_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_create_search_ads360_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_search_ads360_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_search_ads360_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateSearchAds360LinkRequest.pb( analytics_admin.CreateSearchAds360LinkRequest() ) @@ -105474,6 +106053,7 @@ def test_create_search_ads360_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.SearchAds360Link() + post_with_metadata.return_value = resources.SearchAds360Link(), metadata client.create_search_ads360_link( request, @@ -105485,6 +106065,7 @@ def test_create_search_ads360_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_search_ads360_link_rest_bad_request( @@ -105765,10 +106346,14 @@ def test_update_search_ads360_link_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_search_ads360_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_search_ads360_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_search_ads360_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateSearchAds360LinkRequest.pb( analytics_admin.UpdateSearchAds360LinkRequest() ) @@ -105792,6 +106377,7 @@ def test_update_search_ads360_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.SearchAds360Link() + post_with_metadata.return_value = resources.SearchAds360Link(), metadata client.update_search_ads360_link( request, @@ -105803,6 +106389,7 @@ def test_update_search_ads360_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attribution_settings_rest_bad_request( @@ -105907,10 +106494,14 @@ def test_get_attribution_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_attribution_settings" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_attribution_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_attribution_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetAttributionSettingsRequest.pb( analytics_admin.GetAttributionSettingsRequest() ) @@ -105936,6 +106527,7 @@ def test_get_attribution_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AttributionSettings() + post_with_metadata.return_value = resources.AttributionSettings(), metadata client.get_attribution_settings( request, @@ -105947,6 +106539,7 @@ def test_get_attribution_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_attribution_settings_rest_bad_request( @@ -106134,11 +106727,15 @@ def test_update_attribution_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_attribution_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_attribution_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_attribution_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateAttributionSettingsRequest.pb( analytics_admin.UpdateAttributionSettingsRequest() ) @@ -106164,6 +106761,7 @@ def test_update_attribution_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AttributionSettings() + post_with_metadata.return_value = resources.AttributionSettings(), metadata client.update_attribution_settings( request, @@ -106175,6 +106773,7 @@ def test_update_attribution_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_access_report_rest_bad_request( @@ -106259,10 +106858,14 @@ def test_run_access_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_run_access_report" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_run_access_report_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_run_access_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.RunAccessReportRequest.pb( analytics_admin.RunAccessReportRequest() ) @@ -106288,6 +106891,10 @@ def test_run_access_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.RunAccessReportResponse() + post_with_metadata.return_value = ( + analytics_admin.RunAccessReportResponse(), + metadata, + ) client.run_access_report( request, @@ -106299,6 +106906,7 @@ def test_run_access_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_access_binding_rest_bad_request( @@ -106460,10 +107068,14 @@ def test_create_access_binding_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_access_binding" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_access_binding_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_access_binding" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateAccessBindingRequest.pb( analytics_admin.CreateAccessBindingRequest() ) @@ -106487,6 +107099,7 @@ def test_create_access_binding_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AccessBinding() + post_with_metadata.return_value = resources.AccessBinding(), metadata client.create_access_binding( request, @@ -106498,6 +107111,7 @@ def test_create_access_binding_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_access_binding_rest_bad_request( @@ -106585,10 +107199,14 @@ def test_get_access_binding_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_access_binding" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_access_binding_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_access_binding" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetAccessBindingRequest.pb( analytics_admin.GetAccessBindingRequest() ) @@ -106612,6 +107230,7 @@ def test_get_access_binding_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AccessBinding() + post_with_metadata.return_value = resources.AccessBinding(), metadata client.get_access_binding( request, @@ -106623,6 +107242,7 @@ def test_get_access_binding_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_access_binding_rest_bad_request( @@ -106788,10 +107408,14 @@ def test_update_access_binding_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_access_binding" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_access_binding_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_access_binding" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateAccessBindingRequest.pb( analytics_admin.UpdateAccessBindingRequest() ) @@ -106815,6 +107439,7 @@ def test_update_access_binding_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AccessBinding() + post_with_metadata.return_value = resources.AccessBinding(), metadata client.update_access_binding( request, @@ -106826,6 +107451,7 @@ def test_update_access_binding_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_access_binding_rest_bad_request( @@ -107019,10 +107645,14 @@ def test_list_access_bindings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_access_bindings" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_access_bindings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_access_bindings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAccessBindingsRequest.pb( analytics_admin.ListAccessBindingsRequest() ) @@ -107048,6 +107678,10 @@ def test_list_access_bindings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAccessBindingsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAccessBindingsResponse(), + metadata, + ) client.list_access_bindings( request, @@ -107059,6 +107693,7 @@ def test_list_access_bindings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_access_bindings_rest_bad_request( @@ -107143,11 +107778,15 @@ def test_batch_create_access_bindings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_batch_create_access_bindings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_batch_create_access_bindings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_batch_create_access_bindings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.BatchCreateAccessBindingsRequest.pb( analytics_admin.BatchCreateAccessBindingsRequest() ) @@ -107173,6 +107812,10 @@ def test_batch_create_access_bindings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.BatchCreateAccessBindingsResponse() + post_with_metadata.return_value = ( + analytics_admin.BatchCreateAccessBindingsResponse(), + metadata, + ) client.batch_create_access_bindings( request, @@ -107184,6 +107827,7 @@ def test_batch_create_access_bindings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_get_access_bindings_rest_bad_request( @@ -107266,10 +107910,14 @@ def test_batch_get_access_bindings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_batch_get_access_bindings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_batch_get_access_bindings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_batch_get_access_bindings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.BatchGetAccessBindingsRequest.pb( analytics_admin.BatchGetAccessBindingsRequest() ) @@ -107295,6 +107943,10 @@ def test_batch_get_access_bindings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.BatchGetAccessBindingsResponse() + post_with_metadata.return_value = ( + analytics_admin.BatchGetAccessBindingsResponse(), + metadata, + ) client.batch_get_access_bindings( request, @@ -107306,6 +107958,7 @@ def test_batch_get_access_bindings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_access_bindings_rest_bad_request( @@ -107390,11 +108043,15 @@ def test_batch_update_access_bindings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_batch_update_access_bindings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_batch_update_access_bindings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_batch_update_access_bindings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.BatchUpdateAccessBindingsRequest.pb( analytics_admin.BatchUpdateAccessBindingsRequest() ) @@ -107420,6 +108077,10 @@ def test_batch_update_access_bindings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.BatchUpdateAccessBindingsResponse() + post_with_metadata.return_value = ( + analytics_admin.BatchUpdateAccessBindingsResponse(), + metadata, + ) client.batch_update_access_bindings( request, @@ -107431,6 +108092,7 @@ def test_batch_update_access_bindings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_access_bindings_rest_bad_request( @@ -107633,10 +108295,14 @@ def test_get_expanded_data_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_expanded_data_set" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_expanded_data_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_expanded_data_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetExpandedDataSetRequest.pb( analytics_admin.GetExpandedDataSetRequest() ) @@ -107662,6 +108328,7 @@ def test_get_expanded_data_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = expanded_data_set.ExpandedDataSet() + post_with_metadata.return_value = expanded_data_set.ExpandedDataSet(), metadata client.get_expanded_data_set( request, @@ -107673,6 +108340,7 @@ def test_get_expanded_data_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_expanded_data_sets_rest_bad_request( @@ -107757,10 +108425,14 @@ def test_list_expanded_data_sets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_expanded_data_sets" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_expanded_data_sets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_expanded_data_sets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListExpandedDataSetsRequest.pb( analytics_admin.ListExpandedDataSetsRequest() ) @@ -107786,6 +108458,10 @@ def test_list_expanded_data_sets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListExpandedDataSetsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListExpandedDataSetsResponse(), + metadata, + ) client.list_expanded_data_sets( request, @@ -107797,6 +108473,7 @@ def test_list_expanded_data_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_expanded_data_set_rest_bad_request( @@ -107982,10 +108659,14 @@ def test_create_expanded_data_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_expanded_data_set" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_expanded_data_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_expanded_data_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateExpandedDataSetRequest.pb( analytics_admin.CreateExpandedDataSetRequest() ) @@ -108011,6 +108692,10 @@ def test_create_expanded_data_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_expanded_data_set.ExpandedDataSet() + post_with_metadata.return_value = ( + gaa_expanded_data_set.ExpandedDataSet(), + metadata, + ) client.create_expanded_data_set( request, @@ -108022,6 +108707,7 @@ def test_create_expanded_data_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_expanded_data_set_rest_bad_request( @@ -108211,10 +108897,14 @@ def test_update_expanded_data_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_expanded_data_set" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_expanded_data_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_expanded_data_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateExpandedDataSetRequest.pb( analytics_admin.UpdateExpandedDataSetRequest() ) @@ -108240,6 +108930,10 @@ def test_update_expanded_data_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_expanded_data_set.ExpandedDataSet() + post_with_metadata.return_value = ( + gaa_expanded_data_set.ExpandedDataSet(), + metadata, + ) client.update_expanded_data_set( request, @@ -108251,6 +108945,7 @@ def test_update_expanded_data_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_expanded_data_set_rest_bad_request( @@ -108452,10 +109147,14 @@ def test_get_channel_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_channel_group" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_channel_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_channel_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetChannelGroupRequest.pb( analytics_admin.GetChannelGroupRequest() ) @@ -108479,6 +109178,7 @@ def test_get_channel_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = channel_group.ChannelGroup() + post_with_metadata.return_value = channel_group.ChannelGroup(), metadata client.get_channel_group( request, @@ -108490,6 +109190,7 @@ def test_get_channel_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_channel_groups_rest_bad_request( @@ -108574,10 +109275,14 @@ def test_list_channel_groups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_channel_groups" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_channel_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_channel_groups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListChannelGroupsRequest.pb( analytics_admin.ListChannelGroupsRequest() ) @@ -108603,6 +109308,10 @@ def test_list_channel_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListChannelGroupsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListChannelGroupsResponse(), + metadata, + ) client.list_channel_groups( request, @@ -108614,6 +109323,7 @@ def test_list_channel_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_channel_group_rest_bad_request( @@ -108797,10 +109507,14 @@ def test_create_channel_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_channel_group" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_channel_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_channel_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateChannelGroupRequest.pb( analytics_admin.CreateChannelGroupRequest() ) @@ -108826,6 +109540,7 @@ def test_create_channel_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_channel_group.ChannelGroup() + post_with_metadata.return_value = gaa_channel_group.ChannelGroup(), metadata client.create_channel_group( request, @@ -108837,6 +109552,7 @@ def test_create_channel_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_channel_group_rest_bad_request( @@ -109024,10 +109740,14 @@ def test_update_channel_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_channel_group" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_channel_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_channel_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateChannelGroupRequest.pb( analytics_admin.UpdateChannelGroupRequest() ) @@ -109053,6 +109773,7 @@ def test_update_channel_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_channel_group.ChannelGroup() + post_with_metadata.return_value = gaa_channel_group.ChannelGroup(), metadata client.update_channel_group( request, @@ -109064,6 +109785,7 @@ def test_update_channel_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_channel_group_rest_bad_request( @@ -109259,11 +109981,15 @@ def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_intercep transports.AnalyticsAdminServiceRestInterceptor, "post_set_automated_ga4_configuration_opt_out", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_set_automated_ga4_configuration_opt_out_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_set_automated_ga4_configuration_opt_out", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest.pb( analytics_admin.SetAutomatedGa4ConfigurationOptOutRequest() ) @@ -109291,6 +110017,10 @@ def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_intercep ] pre.return_value = request, metadata post.return_value = analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse() + post_with_metadata.return_value = ( + analytics_admin.SetAutomatedGa4ConfigurationOptOutResponse(), + metadata, + ) client.set_automated_ga4_configuration_opt_out( request, @@ -109302,6 +110032,7 @@ def test_set_automated_ga4_configuration_opt_out_rest_interceptors(null_intercep pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_automated_ga4_configuration_opt_out_rest_bad_request( @@ -109391,11 +110122,15 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interc transports.AnalyticsAdminServiceRestInterceptor, "post_fetch_automated_ga4_configuration_opt_out", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_fetch_automated_ga4_configuration_opt_out_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_fetch_automated_ga4_configuration_opt_out", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest.pb( analytics_admin.FetchAutomatedGa4ConfigurationOptOutRequest() ) @@ -109425,6 +110160,10 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interc post.return_value = ( analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse() ) + post_with_metadata.return_value = ( + analytics_admin.FetchAutomatedGa4ConfigurationOptOutResponse(), + metadata, + ) client.fetch_automated_ga4_configuration_opt_out( request, @@ -109436,6 +110175,7 @@ def test_fetch_automated_ga4_configuration_opt_out_rest_interceptors(null_interc pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_big_query_link_rest_bad_request( @@ -109615,10 +110355,14 @@ def test_create_big_query_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_big_query_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_big_query_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_big_query_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateBigQueryLinkRequest.pb( analytics_admin.CreateBigQueryLinkRequest() ) @@ -109642,6 +110386,7 @@ def test_create_big_query_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.BigQueryLink() + post_with_metadata.return_value = resources.BigQueryLink(), metadata client.create_big_query_link( request, @@ -109653,6 +110398,7 @@ def test_create_big_query_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_big_query_link_rest_bad_request( @@ -109753,10 +110499,14 @@ def test_get_big_query_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_big_query_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_big_query_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_big_query_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetBigQueryLinkRequest.pb( analytics_admin.GetBigQueryLinkRequest() ) @@ -109780,6 +110530,7 @@ def test_get_big_query_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.BigQueryLink() + post_with_metadata.return_value = resources.BigQueryLink(), metadata client.get_big_query_link( request, @@ -109791,6 +110542,7 @@ def test_get_big_query_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_big_query_links_rest_bad_request( @@ -109875,10 +110627,14 @@ def test_list_big_query_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_big_query_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_big_query_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_big_query_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListBigQueryLinksRequest.pb( analytics_admin.ListBigQueryLinksRequest() ) @@ -109904,6 +110660,10 @@ def test_list_big_query_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListBigQueryLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListBigQueryLinksResponse(), + metadata, + ) client.list_big_query_links( request, @@ -109915,6 +110675,7 @@ def test_list_big_query_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_big_query_link_rest_bad_request( @@ -110207,10 +110968,14 @@ def test_update_big_query_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_big_query_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_big_query_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_big_query_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateBigQueryLinkRequest.pb( analytics_admin.UpdateBigQueryLinkRequest() ) @@ -110234,6 +110999,7 @@ def test_update_big_query_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.BigQueryLink() + post_with_metadata.return_value = resources.BigQueryLink(), metadata client.update_big_query_link( request, @@ -110245,6 +111011,7 @@ def test_update_big_query_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_enhanced_measurement_settings_rest_bad_request( @@ -110354,11 +111121,15 @@ def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_enhanced_measurement_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_enhanced_measurement_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_enhanced_measurement_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetEnhancedMeasurementSettingsRequest.pb( analytics_admin.GetEnhancedMeasurementSettingsRequest() ) @@ -110384,6 +111155,10 @@ def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.EnhancedMeasurementSettings() + post_with_metadata.return_value = ( + resources.EnhancedMeasurementSettings(), + metadata, + ) client.get_enhanced_measurement_settings( request, @@ -110395,6 +111170,7 @@ def test_get_enhanced_measurement_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_enhanced_measurement_settings_rest_bad_request( @@ -110596,11 +111372,15 @@ def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor transports.AnalyticsAdminServiceRestInterceptor, "post_update_enhanced_measurement_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_enhanced_measurement_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_enhanced_measurement_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateEnhancedMeasurementSettingsRequest.pb( analytics_admin.UpdateEnhancedMeasurementSettingsRequest() ) @@ -110626,6 +111406,10 @@ def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor ] pre.return_value = request, metadata post.return_value = resources.EnhancedMeasurementSettings() + post_with_metadata.return_value = ( + resources.EnhancedMeasurementSettings(), + metadata, + ) client.update_enhanced_measurement_settings( request, @@ -110637,6 +111421,7 @@ def test_update_enhanced_measurement_settings_rest_interceptors(null_interceptor pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_connected_site_tag_rest_bad_request( @@ -110719,10 +111504,14 @@ def test_create_connected_site_tag_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_create_connected_site_tag", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_connected_site_tag_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_connected_site_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateConnectedSiteTagRequest.pb( analytics_admin.CreateConnectedSiteTagRequest() ) @@ -110748,6 +111537,10 @@ def test_create_connected_site_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.CreateConnectedSiteTagResponse() + post_with_metadata.return_value = ( + analytics_admin.CreateConnectedSiteTagResponse(), + metadata, + ) client.create_connected_site_tag( request, @@ -110759,6 +111552,7 @@ def test_create_connected_site_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connected_site_tag_rest_bad_request( @@ -110949,10 +111743,14 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_connected_site_tags" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_connected_site_tags_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_connected_site_tags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListConnectedSiteTagsRequest.pb( analytics_admin.ListConnectedSiteTagsRequest() ) @@ -110978,6 +111776,10 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListConnectedSiteTagsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListConnectedSiteTagsResponse(), + metadata, + ) client.list_connected_site_tags( request, @@ -110989,6 +111791,7 @@ def test_list_connected_site_tags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_connected_ga4_property_rest_bad_request( @@ -111076,11 +111879,15 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_fetch_connected_ga4_property", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_fetch_connected_ga4_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_fetch_connected_ga4_property", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.FetchConnectedGa4PropertyRequest.pb( analytics_admin.FetchConnectedGa4PropertyRequest() ) @@ -111106,6 +111913,10 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.FetchConnectedGa4PropertyResponse() + post_with_metadata.return_value = ( + analytics_admin.FetchConnectedGa4PropertyResponse(), + metadata, + ) client.fetch_connected_ga4_property( request, @@ -111117,6 +111928,7 @@ def test_fetch_connected_ga4_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_ad_sense_link_rest_bad_request( @@ -111203,10 +112015,14 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_ad_sense_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_ad_sense_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_ad_sense_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetAdSenseLinkRequest.pb( analytics_admin.GetAdSenseLinkRequest() ) @@ -111230,6 +112046,7 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AdSenseLink() + post_with_metadata.return_value = resources.AdSenseLink(), metadata client.get_ad_sense_link( request, @@ -111241,6 +112058,7 @@ def test_get_ad_sense_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_ad_sense_link_rest_bad_request( @@ -111398,10 +112216,14 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_ad_sense_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_ad_sense_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_ad_sense_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateAdSenseLinkRequest.pb( analytics_admin.CreateAdSenseLinkRequest() ) @@ -111425,6 +112247,7 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AdSenseLink() + post_with_metadata.return_value = resources.AdSenseLink(), metadata client.create_ad_sense_link( request, @@ -111436,6 +112259,7 @@ def test_create_ad_sense_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_ad_sense_link_rest_bad_request( @@ -111629,10 +112453,14 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_ad_sense_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_ad_sense_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_ad_sense_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAdSenseLinksRequest.pb( analytics_admin.ListAdSenseLinksRequest() ) @@ -111658,6 +112486,10 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAdSenseLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAdSenseLinksResponse(), + metadata, + ) client.list_ad_sense_links( request, @@ -111669,6 +112501,7 @@ def test_list_ad_sense_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_event_create_rule_rest_bad_request( @@ -111761,10 +112594,14 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_create_rule" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_event_create_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetEventCreateRuleRequest.pb( analytics_admin.GetEventCreateRuleRequest() ) @@ -111790,6 +112627,10 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = event_create_and_edit.EventCreateRule() + post_with_metadata.return_value = ( + event_create_and_edit.EventCreateRule(), + metadata, + ) client.get_event_create_rule( request, @@ -111801,6 +112642,7 @@ def test_get_event_create_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_event_create_rules_rest_bad_request( @@ -111885,10 +112727,14 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_create_rules" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_event_create_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_create_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListEventCreateRulesRequest.pb( analytics_admin.ListEventCreateRulesRequest() ) @@ -111914,6 +112760,10 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListEventCreateRulesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListEventCreateRulesResponse(), + metadata, + ) client.list_event_create_rules( request, @@ -111925,6 +112775,7 @@ def test_list_event_create_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_event_create_rule_rest_bad_request( @@ -112098,10 +112949,14 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_create_rule" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_event_create_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateEventCreateRuleRequest.pb( analytics_admin.CreateEventCreateRuleRequest() ) @@ -112127,6 +112982,10 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = event_create_and_edit.EventCreateRule() + post_with_metadata.return_value = ( + event_create_and_edit.EventCreateRule(), + metadata, + ) client.create_event_create_rule( request, @@ -112138,6 +112997,7 @@ def test_create_event_create_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_event_create_rule_rest_bad_request( @@ -112319,10 +113179,14 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_create_rule" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_event_create_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateEventCreateRuleRequest.pb( analytics_admin.UpdateEventCreateRuleRequest() ) @@ -112348,6 +113212,10 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = event_create_and_edit.EventCreateRule() + post_with_metadata.return_value = ( + event_create_and_edit.EventCreateRule(), + metadata, + ) client.update_event_create_rule( request, @@ -112359,6 +113227,7 @@ def test_update_event_create_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_event_create_rule_rest_bad_request( @@ -112564,10 +113433,14 @@ def test_get_event_edit_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_event_edit_rule" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_event_edit_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_event_edit_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetEventEditRuleRequest.pb( analytics_admin.GetEventEditRuleRequest() ) @@ -112593,6 +113466,10 @@ def test_get_event_edit_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = event_create_and_edit.EventEditRule() + post_with_metadata.return_value = ( + event_create_and_edit.EventEditRule(), + metadata, + ) client.get_event_edit_rule( request, @@ -112604,6 +113481,7 @@ def test_get_event_edit_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_event_edit_rules_rest_bad_request( @@ -112688,10 +113566,14 @@ def test_list_event_edit_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_event_edit_rules" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_event_edit_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_event_edit_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListEventEditRulesRequest.pb( analytics_admin.ListEventEditRulesRequest() ) @@ -112717,6 +113599,10 @@ def test_list_event_edit_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListEventEditRulesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListEventEditRulesResponse(), + metadata, + ) client.list_event_edit_rules( request, @@ -112728,6 +113614,7 @@ def test_list_event_edit_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_event_edit_rule_rest_bad_request( @@ -112901,10 +113788,14 @@ def test_create_event_edit_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_event_edit_rule" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_event_edit_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_event_edit_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateEventEditRuleRequest.pb( analytics_admin.CreateEventEditRuleRequest() ) @@ -112930,6 +113821,10 @@ def test_create_event_edit_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = event_create_and_edit.EventEditRule() + post_with_metadata.return_value = ( + event_create_and_edit.EventEditRule(), + metadata, + ) client.create_event_edit_rule( request, @@ -112941,6 +113836,7 @@ def test_create_event_edit_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_event_edit_rule_rest_bad_request( @@ -113122,10 +114018,14 @@ def test_update_event_edit_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_event_edit_rule" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_event_edit_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_event_edit_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateEventEditRuleRequest.pb( analytics_admin.UpdateEventEditRuleRequest() ) @@ -113151,6 +114051,10 @@ def test_update_event_edit_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = event_create_and_edit.EventEditRule() + post_with_metadata.return_value = ( + event_create_and_edit.EventEditRule(), + metadata, + ) client.update_event_edit_rule( request, @@ -113162,6 +114066,7 @@ def test_update_event_edit_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_event_edit_rule_rest_bad_request( @@ -113563,11 +114468,15 @@ def test_update_data_redaction_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_redaction_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_redaction_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_redaction_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateDataRedactionSettingsRequest.pb( analytics_admin.UpdateDataRedactionSettingsRequest() ) @@ -113593,6 +114502,7 @@ def test_update_data_redaction_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataRedactionSettings() + post_with_metadata.return_value = resources.DataRedactionSettings(), metadata client.update_data_redaction_settings( request, @@ -113604,6 +114514,7 @@ def test_update_data_redaction_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_redaction_settings_rest_bad_request( @@ -113699,11 +114610,15 @@ def test_get_data_redaction_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_redaction_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_redaction_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_redaction_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataRedactionSettingsRequest.pb( analytics_admin.GetDataRedactionSettingsRequest() ) @@ -113729,6 +114644,7 @@ def test_get_data_redaction_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataRedactionSettings() + post_with_metadata.return_value = resources.DataRedactionSettings(), metadata client.get_data_redaction_settings( request, @@ -113740,6 +114656,7 @@ def test_get_data_redaction_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_calculated_metric_rest_bad_request( @@ -113842,10 +114759,14 @@ def test_get_calculated_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_calculated_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_calculated_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_calculated_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetCalculatedMetricRequest.pb( analytics_admin.GetCalculatedMetricRequest() ) @@ -113869,6 +114790,7 @@ def test_get_calculated_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CalculatedMetric() + post_with_metadata.return_value = resources.CalculatedMetric(), metadata client.get_calculated_metric( request, @@ -113880,6 +114802,7 @@ def test_get_calculated_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_calculated_metric_rest_bad_request( @@ -114061,10 +114984,14 @@ def test_create_calculated_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_calculated_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_calculated_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_calculated_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateCalculatedMetricRequest.pb( analytics_admin.CreateCalculatedMetricRequest() ) @@ -114088,6 +115015,7 @@ def test_create_calculated_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CalculatedMetric() + post_with_metadata.return_value = resources.CalculatedMetric(), metadata client.create_calculated_metric( request, @@ -114099,6 +115027,7 @@ def test_create_calculated_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_calculated_metrics_rest_bad_request( @@ -114183,10 +115112,14 @@ def test_list_calculated_metrics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_calculated_metrics" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_calculated_metrics_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_calculated_metrics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListCalculatedMetricsRequest.pb( analytics_admin.ListCalculatedMetricsRequest() ) @@ -114212,6 +115145,10 @@ def test_list_calculated_metrics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListCalculatedMetricsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListCalculatedMetricsResponse(), + metadata, + ) client.list_calculated_metrics( request, @@ -114223,6 +115160,7 @@ def test_list_calculated_metrics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_calculated_metric_rest_bad_request( @@ -114408,10 +115346,14 @@ def test_update_calculated_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_calculated_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_calculated_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_calculated_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateCalculatedMetricRequest.pb( analytics_admin.UpdateCalculatedMetricRequest() ) @@ -114435,6 +115377,7 @@ def test_update_calculated_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CalculatedMetric() + post_with_metadata.return_value = resources.CalculatedMetric(), metadata client.update_calculated_metric( request, @@ -114446,6 +115389,7 @@ def test_update_calculated_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_calculated_metric_rest_bad_request( @@ -114636,10 +115580,14 @@ def test_create_rollup_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_rollup_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_rollup_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_rollup_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateRollupPropertyRequest.pb( analytics_admin.CreateRollupPropertyRequest() ) @@ -114665,6 +115613,10 @@ def test_create_rollup_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.CreateRollupPropertyResponse() + post_with_metadata.return_value = ( + analytics_admin.CreateRollupPropertyResponse(), + metadata, + ) client.create_rollup_property( request, @@ -114676,6 +115628,7 @@ def test_create_rollup_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rollup_property_source_link_rest_bad_request( @@ -114763,11 +115716,15 @@ def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_rollup_property_source_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_rollup_property_source_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_rollup_property_source_link", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetRollupPropertySourceLinkRequest.pb( analytics_admin.GetRollupPropertySourceLinkRequest() ) @@ -114793,6 +115750,7 @@ def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.RollupPropertySourceLink() + post_with_metadata.return_value = resources.RollupPropertySourceLink(), metadata client.get_rollup_property_source_link( request, @@ -114804,6 +115762,7 @@ def test_get_rollup_property_source_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rollup_property_source_links_rest_bad_request( @@ -114891,11 +115850,15 @@ def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_list_rollup_property_source_links", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_rollup_property_source_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_rollup_property_source_links", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListRollupPropertySourceLinksRequest.pb( analytics_admin.ListRollupPropertySourceLinksRequest() ) @@ -114921,6 +115884,10 @@ def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListRollupPropertySourceLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListRollupPropertySourceLinksResponse(), + metadata, + ) client.list_rollup_property_source_links( request, @@ -114932,6 +115899,7 @@ def test_list_rollup_property_source_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_rollup_property_source_link_rest_bad_request( @@ -115096,11 +116064,15 @@ def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_create_rollup_property_source_link", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_rollup_property_source_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_rollup_property_source_link", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateRollupPropertySourceLinkRequest.pb( analytics_admin.CreateRollupPropertySourceLinkRequest() ) @@ -115126,6 +116098,7 @@ def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.RollupPropertySourceLink() + post_with_metadata.return_value = resources.RollupPropertySourceLink(), metadata client.create_rollup_property_source_link( request, @@ -115137,6 +116110,7 @@ def test_create_rollup_property_source_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rollup_property_source_link_rest_bad_request( @@ -115328,10 +116302,14 @@ def test_provision_subproperty_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_provision_subproperty" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_provision_subproperty_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_provision_subproperty" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ProvisionSubpropertyRequest.pb( analytics_admin.ProvisionSubpropertyRequest() ) @@ -115357,6 +116335,10 @@ def test_provision_subproperty_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ProvisionSubpropertyResponse() + post_with_metadata.return_value = ( + analytics_admin.ProvisionSubpropertyResponse(), + metadata, + ) client.provision_subproperty( request, @@ -115368,6 +116350,7 @@ def test_provision_subproperty_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_subproperty_event_filter_rest_bad_request( @@ -115550,11 +116533,15 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_create_subproperty_event_filter", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_subproperty_event_filter_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_subproperty_event_filter", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateSubpropertyEventFilterRequest.pb( analytics_admin.CreateSubpropertyEventFilterRequest() ) @@ -115580,6 +116567,10 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + post_with_metadata.return_value = ( + gaa_subproperty_event_filter.SubpropertyEventFilter(), + metadata, + ) client.create_subproperty_event_filter( request, @@ -115591,6 +116582,7 @@ def test_create_subproperty_event_filter_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_subproperty_event_filter_rest_bad_request( @@ -115678,11 +116670,15 @@ def test_get_subproperty_event_filter_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_subproperty_event_filter", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_subproperty_event_filter_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_subproperty_event_filter", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetSubpropertyEventFilterRequest.pb( analytics_admin.GetSubpropertyEventFilterRequest() ) @@ -115708,6 +116704,10 @@ def test_get_subproperty_event_filter_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = subproperty_event_filter.SubpropertyEventFilter() + post_with_metadata.return_value = ( + subproperty_event_filter.SubpropertyEventFilter(), + metadata, + ) client.get_subproperty_event_filter( request, @@ -115719,6 +116719,7 @@ def test_get_subproperty_event_filter_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_subproperty_event_filters_rest_bad_request( @@ -115806,11 +116807,15 @@ def test_list_subproperty_event_filters_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_list_subproperty_event_filters", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_subproperty_event_filters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_subproperty_event_filters", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListSubpropertyEventFiltersRequest.pb( analytics_admin.ListSubpropertyEventFiltersRequest() ) @@ -115836,6 +116841,10 @@ def test_list_subproperty_event_filters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListSubpropertyEventFiltersResponse() + post_with_metadata.return_value = ( + analytics_admin.ListSubpropertyEventFiltersResponse(), + metadata, + ) client.list_subproperty_event_filters( request, @@ -115847,6 +116856,7 @@ def test_list_subproperty_event_filters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_subproperty_event_filter_rest_bad_request( @@ -116037,11 +117047,15 @@ def test_update_subproperty_event_filter_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_subproperty_event_filter", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_subproperty_event_filter_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_subproperty_event_filter", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateSubpropertyEventFilterRequest.pb( analytics_admin.UpdateSubpropertyEventFilterRequest() ) @@ -116067,6 +117081,10 @@ def test_update_subproperty_event_filter_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gaa_subproperty_event_filter.SubpropertyEventFilter() + post_with_metadata.return_value = ( + gaa_subproperty_event_filter.SubpropertyEventFilter(), + metadata, + ) client.update_subproperty_event_filter( request, @@ -116078,6 +117096,7 @@ def test_update_subproperty_event_filter_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_subproperty_event_filter_rest_bad_request( diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py index 7ac0ce48bdec..1ec8d31c14e1 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1beta/test_analytics_admin_service.py @@ -67,6 +67,13 @@ resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AnalyticsAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AnalyticsAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -18541,6 +18591,7 @@ def test_get_data_retention_settings(request_type, transport: str = "grpc"): call.return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) response = client.get_data_retention_settings(request) @@ -18558,6 +18609,10 @@ def test_get_data_retention_settings(request_type, transport: str = "grpc"): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -18696,6 +18751,7 @@ async def test_get_data_retention_settings_async( resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -18714,6 +18770,10 @@ async def test_get_data_retention_settings_async( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -18898,6 +18958,7 @@ def test_update_data_retention_settings(request_type, transport: str = "grpc"): call.return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) response = client.update_data_retention_settings(request) @@ -18915,6 +18976,10 @@ def test_update_data_retention_settings(request_type, transport: str = "grpc"): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -19049,6 +19114,7 @@ async def test_update_data_retention_settings_async( resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -19067,6 +19133,10 @@ async def test_update_data_retention_settings_async( response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -34301,6 +34371,7 @@ async def test_get_data_retention_settings_empty_call_grpc_asyncio(): resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -34332,6 +34403,7 @@ async def test_update_data_retention_settings_empty_call_grpc_asyncio(): resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) ) @@ -34614,10 +34686,14 @@ def test_get_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_account" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetAccountRequest.pb( analytics_admin.GetAccountRequest() ) @@ -34641,6 +34717,7 @@ def test_get_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Account() + post_with_metadata.return_value = resources.Account(), metadata client.get_account( request, @@ -34652,6 +34729,7 @@ def test_get_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_accounts_rest_bad_request( @@ -34736,10 +34814,14 @@ def test_list_accounts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_accounts" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_accounts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_accounts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAccountsRequest.pb( analytics_admin.ListAccountsRequest() ) @@ -34765,6 +34847,10 @@ def test_list_accounts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAccountsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAccountsResponse(), + metadata, + ) client.list_accounts( request, @@ -34776,6 +34862,7 @@ def test_list_accounts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_account_rest_bad_request( @@ -35053,10 +35140,14 @@ def test_update_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_account" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateAccountRequest.pb( analytics_admin.UpdateAccountRequest() ) @@ -35080,6 +35171,7 @@ def test_update_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Account() + post_with_metadata.return_value = resources.Account(), metadata client.update_account( request, @@ -35091,6 +35183,7 @@ def test_update_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_provision_account_ticket_rest_bad_request( @@ -35175,10 +35268,14 @@ def test_provision_account_ticket_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_provision_account_ticket" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_provision_account_ticket_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_provision_account_ticket" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ProvisionAccountTicketRequest.pb( analytics_admin.ProvisionAccountTicketRequest() ) @@ -35204,6 +35301,10 @@ def test_provision_account_ticket_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ProvisionAccountTicketResponse() + post_with_metadata.return_value = ( + analytics_admin.ProvisionAccountTicketResponse(), + metadata, + ) client.provision_account_ticket( request, @@ -35215,6 +35316,7 @@ def test_provision_account_ticket_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_account_summaries_rest_bad_request( @@ -35299,10 +35401,14 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_account_summaries" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_account_summaries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_account_summaries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListAccountSummariesRequest.pb( analytics_admin.ListAccountSummariesRequest() ) @@ -35328,6 +35434,10 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListAccountSummariesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListAccountSummariesResponse(), + metadata, + ) client.list_account_summaries( request, @@ -35339,6 +35449,7 @@ def test_list_account_summaries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_property_rest_bad_request(request_type=analytics_admin.GetPropertyRequest): @@ -35437,10 +35548,14 @@ def test_get_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetPropertyRequest.pb( analytics_admin.GetPropertyRequest() ) @@ -35464,6 +35579,7 @@ def test_get_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.get_property( request, @@ -35475,6 +35591,7 @@ def test_get_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_properties_rest_bad_request( @@ -35559,10 +35676,14 @@ def test_list_properties_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_properties" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_properties_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_properties" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListPropertiesRequest.pb( analytics_admin.ListPropertiesRequest() ) @@ -35588,6 +35709,10 @@ def test_list_properties_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListPropertiesResponse() + post_with_metadata.return_value = ( + analytics_admin.ListPropertiesResponse(), + metadata, + ) client.list_properties( request, @@ -35599,6 +35724,7 @@ def test_list_properties_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_property_rest_bad_request( @@ -35781,10 +35907,14 @@ def test_create_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreatePropertyRequest.pb( analytics_admin.CreatePropertyRequest() ) @@ -35808,6 +35938,7 @@ def test_create_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.create_property( request, @@ -35819,6 +35950,7 @@ def test_create_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_property_rest_bad_request( @@ -35919,10 +36051,14 @@ def test_delete_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_delete_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_delete_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_delete_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.DeletePropertyRequest.pb( analytics_admin.DeletePropertyRequest() ) @@ -35946,6 +36082,7 @@ def test_delete_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.delete_property( request, @@ -35957,6 +36094,7 @@ def test_delete_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_property_rest_bad_request( @@ -36139,10 +36277,14 @@ def test_update_property_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_property" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_property_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_property" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdatePropertyRequest.pb( analytics_admin.UpdatePropertyRequest() ) @@ -36166,6 +36308,7 @@ def test_update_property_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Property() + post_with_metadata.return_value = resources.Property(), metadata client.update_property( request, @@ -36177,6 +36320,7 @@ def test_update_property_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_firebase_link_rest_bad_request( @@ -36335,10 +36479,14 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_firebase_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_firebase_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_firebase_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateFirebaseLinkRequest.pb( analytics_admin.CreateFirebaseLinkRequest() ) @@ -36362,6 +36510,7 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.FirebaseLink() + post_with_metadata.return_value = resources.FirebaseLink(), metadata client.create_firebase_link( request, @@ -36373,6 +36522,7 @@ def test_create_firebase_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_firebase_link_rest_bad_request( @@ -36566,10 +36716,14 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_firebase_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_firebase_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_firebase_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListFirebaseLinksRequest.pb( analytics_admin.ListFirebaseLinksRequest() ) @@ -36595,6 +36749,10 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListFirebaseLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListFirebaseLinksResponse(), + metadata, + ) client.list_firebase_links( request, @@ -36606,6 +36764,7 @@ def test_list_firebase_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_google_ads_link_rest_bad_request( @@ -36774,10 +36933,14 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_google_ads_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_google_ads_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_google_ads_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateGoogleAdsLinkRequest.pb( analytics_admin.CreateGoogleAdsLinkRequest() ) @@ -36801,6 +36964,7 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GoogleAdsLink() + post_with_metadata.return_value = resources.GoogleAdsLink(), metadata client.create_google_ads_link( request, @@ -36812,6 +36976,7 @@ def test_create_google_ads_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_google_ads_link_rest_bad_request( @@ -36984,10 +37149,14 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_google_ads_link" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_google_ads_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_google_ads_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateGoogleAdsLinkRequest.pb( analytics_admin.UpdateGoogleAdsLinkRequest() ) @@ -37011,6 +37180,7 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.GoogleAdsLink() + post_with_metadata.return_value = resources.GoogleAdsLink(), metadata client.update_google_ads_link( request, @@ -37022,6 +37192,7 @@ def test_update_google_ads_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_google_ads_link_rest_bad_request( @@ -37215,10 +37386,14 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_google_ads_links" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_google_ads_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_google_ads_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListGoogleAdsLinksRequest.pb( analytics_admin.ListGoogleAdsLinksRequest() ) @@ -37244,6 +37419,10 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListGoogleAdsLinksResponse() + post_with_metadata.return_value = ( + analytics_admin.ListGoogleAdsLinksResponse(), + metadata, + ) client.list_google_ads_links( request, @@ -37255,6 +37434,7 @@ def test_list_google_ads_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_sharing_settings_rest_bad_request( @@ -37350,10 +37530,14 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_sharing_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_sharing_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_sharing_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataSharingSettingsRequest.pb( analytics_admin.GetDataSharingSettingsRequest() ) @@ -37379,6 +37563,7 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataSharingSettings() + post_with_metadata.return_value = resources.DataSharingSettings(), metadata client.get_data_sharing_settings( request, @@ -37390,6 +37575,7 @@ def test_get_data_sharing_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_measurement_protocol_secret_rest_bad_request( @@ -37483,11 +37669,15 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_measurement_protocol_secret", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_measurement_protocol_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetMeasurementProtocolSecretRequest.pb( analytics_admin.GetMeasurementProtocolSecretRequest() ) @@ -37513,6 +37703,10 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.MeasurementProtocolSecret() + post_with_metadata.return_value = ( + resources.MeasurementProtocolSecret(), + metadata, + ) client.get_measurement_protocol_secret( request, @@ -37524,6 +37718,7 @@ def test_get_measurement_protocol_secret_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_measurement_protocol_secrets_rest_bad_request( @@ -37611,11 +37806,15 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_list_measurement_protocol_secrets", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_measurement_protocol_secrets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_measurement_protocol_secrets", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListMeasurementProtocolSecretsRequest.pb( analytics_admin.ListMeasurementProtocolSecretsRequest() ) @@ -37641,6 +37840,10 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListMeasurementProtocolSecretsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListMeasurementProtocolSecretsResponse(), + metadata, + ) client.list_measurement_protocol_secrets( request, @@ -37652,6 +37855,7 @@ def test_list_measurement_protocol_secrets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_measurement_protocol_secret_rest_bad_request( @@ -37819,11 +38023,15 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_create_measurement_protocol_secret", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_measurement_protocol_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateMeasurementProtocolSecretRequest.pb( analytics_admin.CreateMeasurementProtocolSecretRequest() ) @@ -37849,6 +38057,10 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.MeasurementProtocolSecret() + post_with_metadata.return_value = ( + resources.MeasurementProtocolSecret(), + metadata, + ) client.create_measurement_protocol_secret( request, @@ -37860,6 +38072,7 @@ def test_create_measurement_protocol_secret_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_measurement_protocol_secret_rest_bad_request( @@ -38149,11 +38362,15 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_measurement_protocol_secret", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_measurement_protocol_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_measurement_protocol_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateMeasurementProtocolSecretRequest.pb( analytics_admin.UpdateMeasurementProtocolSecretRequest() ) @@ -38179,6 +38396,10 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.MeasurementProtocolSecret() + post_with_metadata.return_value = ( + resources.MeasurementProtocolSecret(), + metadata, + ) client.update_measurement_protocol_secret( request, @@ -38190,6 +38411,7 @@ def test_update_measurement_protocol_secret_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_acknowledge_user_data_collection_rest_bad_request( @@ -38274,11 +38496,15 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_acknowledge_user_data_collection", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_acknowledge_user_data_collection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_acknowledge_user_data_collection", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.AcknowledgeUserDataCollectionRequest.pb( analytics_admin.AcknowledgeUserDataCollectionRequest() ) @@ -38304,6 +38530,10 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.AcknowledgeUserDataCollectionResponse() + post_with_metadata.return_value = ( + analytics_admin.AcknowledgeUserDataCollectionResponse(), + metadata, + ) client.acknowledge_user_data_collection( request, @@ -38315,6 +38545,7 @@ def test_acknowledge_user_data_collection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_change_history_events_rest_bad_request( @@ -38402,11 +38633,15 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_search_change_history_events", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_search_change_history_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_search_change_history_events", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.SearchChangeHistoryEventsRequest.pb( analytics_admin.SearchChangeHistoryEventsRequest() ) @@ -38432,6 +38667,10 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.SearchChangeHistoryEventsResponse() + post_with_metadata.return_value = ( + analytics_admin.SearchChangeHistoryEventsResponse(), + metadata, + ) client.search_change_history_events( request, @@ -38443,6 +38682,7 @@ def test_search_change_history_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversion_event_rest_bad_request( @@ -38619,10 +38859,14 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_conversion_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_conversion_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateConversionEventRequest.pb( analytics_admin.CreateConversionEventRequest() ) @@ -38646,6 +38890,7 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConversionEvent() + post_with_metadata.return_value = resources.ConversionEvent(), metadata client.create_conversion_event( request, @@ -38657,6 +38902,7 @@ def test_create_conversion_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_conversion_event_rest_bad_request( @@ -38837,10 +39083,14 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_conversion_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_conversion_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateConversionEventRequest.pb( analytics_admin.UpdateConversionEventRequest() ) @@ -38864,6 +39114,7 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConversionEvent() + post_with_metadata.return_value = resources.ConversionEvent(), metadata client.update_conversion_event( request, @@ -38875,6 +39126,7 @@ def test_update_conversion_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversion_event_rest_bad_request( @@ -38970,10 +39222,14 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_conversion_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_conversion_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_conversion_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetConversionEventRequest.pb( analytics_admin.GetConversionEventRequest() ) @@ -38997,6 +39253,7 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConversionEvent() + post_with_metadata.return_value = resources.ConversionEvent(), metadata client.get_conversion_event( request, @@ -39008,6 +39265,7 @@ def test_get_conversion_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversion_event_rest_bad_request( @@ -39201,10 +39459,14 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_conversion_events" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_conversion_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_conversion_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListConversionEventsRequest.pb( analytics_admin.ListConversionEventsRequest() ) @@ -39230,6 +39492,10 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListConversionEventsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListConversionEventsResponse(), + metadata, + ) client.list_conversion_events( request, @@ -39241,6 +39507,7 @@ def test_list_conversion_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_key_event_rest_bad_request( @@ -39412,10 +39679,14 @@ def test_create_key_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_key_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_key_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateKeyEventRequest.pb( analytics_admin.CreateKeyEventRequest() ) @@ -39439,6 +39710,7 @@ def test_create_key_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyEvent() + post_with_metadata.return_value = resources.KeyEvent(), metadata client.create_key_event( request, @@ -39450,6 +39722,7 @@ def test_create_key_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_key_event_rest_bad_request( @@ -39621,10 +39894,14 @@ def test_update_key_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_key_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_key_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateKeyEventRequest.pb( analytics_admin.UpdateKeyEventRequest() ) @@ -39648,6 +39925,7 @@ def test_update_key_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyEvent() + post_with_metadata.return_value = resources.KeyEvent(), metadata client.update_key_event( request, @@ -39659,6 +39937,7 @@ def test_update_key_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_key_event_rest_bad_request( @@ -39751,10 +40030,14 @@ def test_get_key_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_key_event" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_key_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_key_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetKeyEventRequest.pb( analytics_admin.GetKeyEventRequest() ) @@ -39778,6 +40061,7 @@ def test_get_key_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyEvent() + post_with_metadata.return_value = resources.KeyEvent(), metadata client.get_key_event( request, @@ -39789,6 +40073,7 @@ def test_get_key_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_key_event_rest_bad_request( @@ -39982,10 +40267,14 @@ def test_list_key_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_key_events" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_key_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_key_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListKeyEventsRequest.pb( analytics_admin.ListKeyEventsRequest() ) @@ -40011,6 +40300,10 @@ def test_list_key_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListKeyEventsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListKeyEventsResponse(), + metadata, + ) client.list_key_events( request, @@ -40022,6 +40315,7 @@ def test_list_key_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_custom_dimension_rest_bad_request( @@ -40193,10 +40487,14 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_dimension" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_custom_dimension_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateCustomDimensionRequest.pb( analytics_admin.CreateCustomDimensionRequest() ) @@ -40220,6 +40518,7 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomDimension() + post_with_metadata.return_value = resources.CustomDimension(), metadata client.create_custom_dimension( request, @@ -40231,6 +40530,7 @@ def test_create_custom_dimension_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_custom_dimension_rest_bad_request( @@ -40406,10 +40706,14 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_dimension" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_custom_dimension_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateCustomDimensionRequest.pb( analytics_admin.UpdateCustomDimensionRequest() ) @@ -40433,6 +40737,7 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomDimension() + post_with_metadata.return_value = resources.CustomDimension(), metadata client.update_custom_dimension( request, @@ -40444,6 +40749,7 @@ def test_update_custom_dimension_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_dimensions_rest_bad_request( @@ -40528,10 +40834,14 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_dimensions" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_custom_dimensions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_dimensions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListCustomDimensionsRequest.pb( analytics_admin.ListCustomDimensionsRequest() ) @@ -40557,6 +40867,10 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListCustomDimensionsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListCustomDimensionsResponse(), + metadata, + ) client.list_custom_dimensions( request, @@ -40568,6 +40882,7 @@ def test_list_custom_dimensions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_archive_custom_dimension_rest_bad_request( @@ -40771,10 +41086,14 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_dimension" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_custom_dimension_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_dimension" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetCustomDimensionRequest.pb( analytics_admin.GetCustomDimensionRequest() ) @@ -40798,6 +41117,7 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomDimension() + post_with_metadata.return_value = resources.CustomDimension(), metadata client.get_custom_dimension( request, @@ -40809,6 +41129,7 @@ def test_get_custom_dimension_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_custom_metric_rest_bad_request( @@ -40985,10 +41306,14 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_custom_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_custom_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateCustomMetricRequest.pb( analytics_admin.CreateCustomMetricRequest() ) @@ -41012,6 +41337,7 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomMetric() + post_with_metadata.return_value = resources.CustomMetric(), metadata client.create_custom_metric( request, @@ -41023,6 +41349,7 @@ def test_create_custom_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_custom_metric_rest_bad_request( @@ -41203,10 +41530,14 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_custom_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_custom_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateCustomMetricRequest.pb( analytics_admin.UpdateCustomMetricRequest() ) @@ -41230,6 +41561,7 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomMetric() + post_with_metadata.return_value = resources.CustomMetric(), metadata client.update_custom_metric( request, @@ -41241,6 +41573,7 @@ def test_update_custom_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_metrics_rest_bad_request( @@ -41325,10 +41658,14 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_custom_metrics" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_custom_metrics_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_custom_metrics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListCustomMetricsRequest.pb( analytics_admin.ListCustomMetricsRequest() ) @@ -41354,6 +41691,10 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListCustomMetricsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListCustomMetricsResponse(), + metadata, + ) client.list_custom_metrics( request, @@ -41365,6 +41706,7 @@ def test_list_custom_metrics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_archive_custom_metric_rest_bad_request( @@ -41574,10 +41916,14 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_custom_metric" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_custom_metric_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_custom_metric" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetCustomMetricRequest.pb( analytics_admin.GetCustomMetricRequest() ) @@ -41601,6 +41947,7 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CustomMetric() + post_with_metadata.return_value = resources.CustomMetric(), metadata client.get_custom_metric( request, @@ -41612,6 +41959,7 @@ def test_get_custom_metric_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_retention_settings_rest_bad_request( @@ -41661,6 +42009,7 @@ def test_get_data_retention_settings_rest_call_success(request_type): return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) @@ -41683,6 +42032,10 @@ def test_get_data_retention_settings_rest_call_success(request_type): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -41704,11 +42057,15 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_retention_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_retention_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_retention_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataRetentionSettingsRequest.pb( analytics_admin.GetDataRetentionSettingsRequest() ) @@ -41734,6 +42091,7 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataRetentionSettings() + post_with_metadata.return_value = resources.DataRetentionSettings(), metadata client.get_data_retention_settings( request, @@ -41745,6 +42103,7 @@ def test_get_data_retention_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_retention_settings_rest_bad_request( @@ -41793,6 +42152,7 @@ def test_update_data_retention_settings_rest_call_success(request_type): request_init["data_retention_settings"] = { "name": "properties/sample1/dataRetentionSettings", "event_data_retention": 1, + "user_data_retention": 1, "reset_user_data_on_new_activity": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -41874,6 +42234,7 @@ def get_message_fields(field): return_value = resources.DataRetentionSettings( name="name_value", event_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, + user_data_retention=resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS, reset_user_data_on_new_activity=True, ) @@ -41896,6 +42257,10 @@ def get_message_fields(field): response.event_data_retention == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS ) + assert ( + response.user_data_retention + == resources.DataRetentionSettings.RetentionDuration.TWO_MONTHS + ) assert response.reset_user_data_on_new_activity is True @@ -41917,11 +42282,15 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_retention_settings", ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_retention_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_retention_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateDataRetentionSettingsRequest.pb( analytics_admin.UpdateDataRetentionSettingsRequest() ) @@ -41947,6 +42316,7 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataRetentionSettings() + post_with_metadata.return_value = resources.DataRetentionSettings(), metadata client.update_data_retention_settings( request, @@ -41958,6 +42328,7 @@ def test_update_data_retention_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_data_stream_rest_bad_request( @@ -42133,10 +42504,14 @@ def test_create_data_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_create_data_stream" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_create_data_stream_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_create_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.CreateDataStreamRequest.pb( analytics_admin.CreateDataStreamRequest() ) @@ -42160,6 +42535,7 @@ def test_create_data_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataStream() + post_with_metadata.return_value = resources.DataStream(), metadata client.create_data_stream( request, @@ -42171,6 +42547,7 @@ def test_create_data_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_data_stream_rest_bad_request( @@ -42455,10 +42832,14 @@ def test_update_data_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_update_data_stream" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_update_data_stream_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_update_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.UpdateDataStreamRequest.pb( analytics_admin.UpdateDataStreamRequest() ) @@ -42482,6 +42863,7 @@ def test_update_data_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataStream() + post_with_metadata.return_value = resources.DataStream(), metadata client.update_data_stream( request, @@ -42493,6 +42875,7 @@ def test_update_data_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_streams_rest_bad_request( @@ -42577,10 +42960,14 @@ def test_list_data_streams_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_list_data_streams" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_list_data_streams_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_list_data_streams" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.ListDataStreamsRequest.pb( analytics_admin.ListDataStreamsRequest() ) @@ -42606,6 +42993,10 @@ def test_list_data_streams_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.ListDataStreamsResponse() + post_with_metadata.return_value = ( + analytics_admin.ListDataStreamsResponse(), + metadata, + ) client.list_data_streams( request, @@ -42617,6 +43008,7 @@ def test_list_data_streams_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_stream_rest_bad_request( @@ -42705,10 +43097,14 @@ def test_get_data_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_get_data_stream" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_data_stream_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_get_data_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.GetDataStreamRequest.pb( analytics_admin.GetDataStreamRequest() ) @@ -42732,6 +43128,7 @@ def test_get_data_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.DataStream() + post_with_metadata.return_value = resources.DataStream(), metadata client.get_data_stream( request, @@ -42743,6 +43140,7 @@ def test_get_data_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_access_report_rest_bad_request( @@ -42827,10 +43225,14 @@ def test_run_access_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "post_run_access_report" ) as post, mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_run_access_report_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnalyticsAdminServiceRestInterceptor, "pre_run_access_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_admin.RunAccessReportRequest.pb( analytics_admin.RunAccessReportRequest() ) @@ -42856,6 +43258,10 @@ def test_run_access_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_admin.RunAccessReportResponse() + post_with_metadata.return_value = ( + analytics_admin.RunAccessReportResponse(), + metadata, + ) client.run_access_report( request, @@ -42867,6 +43273,7 @@ def test_run_access_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-analytics-data/CHANGELOG.md b/packages/google-analytics-data/CHANGELOG.md index d2d8908d39a3..c07487875db4 100644 --- a/packages/google-analytics-data/CHANGELOG.md +++ b/packages/google-analytics-data/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.18.17](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.16...google-analytics-data-v0.18.17) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.18.16](https://github.com/googleapis/google-cloud-python/compare/google-analytics-data-v0.18.15...google-analytics-data-v0.18.16) (2024-12-12) diff --git a/packages/google-analytics-data/google/analytics/data/gapic_version.py b/packages/google-analytics-data/google/analytics/data/gapic_version.py index e7e0463c8ca1..846ea94889d9 100644 --- a/packages/google-analytics-data/google/analytics/data/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.16" # {x-release-please-version} +__version__ = "0.18.17" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py index e7e0463c8ca1..846ea94889d9 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.16" # {x-release-please-version} +__version__ = "0.18.17" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index 421b316b6c2c..3156f18f5bc6 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -540,6 +542,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py index faf3c835dd1a..f35081bc2816 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -207,12 +207,35 @@ def post_create_audience_list( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_audience_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_audience_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_create_audience_list` interceptor runs + before the `post_create_audience_list_with_metadata` interceptor. """ return response + def post_create_audience_list_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_audience_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_create_audience_list_with_metadata` + interceptor in new development instead of the `post_create_audience_list` interceptor. + When both interceptors are used, this `post_create_audience_list_with_metadata` interceptor runs after the + `post_create_audience_list` interceptor. The (possibly modified) response returned by + `post_create_audience_list` will be passed to + `post_create_audience_list_with_metadata`. + """ + return response, metadata + def pre_create_recurring_audience_list( self, request: analytics_data_api.CreateRecurringAudienceListRequest, @@ -233,12 +256,38 @@ def post_create_recurring_audience_list( ) -> analytics_data_api.RecurringAudienceList: """Post-rpc interceptor for create_recurring_audience_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_recurring_audience_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_create_recurring_audience_list` interceptor runs + before the `post_create_recurring_audience_list_with_metadata` interceptor. """ return response + def post_create_recurring_audience_list_with_metadata( + self, + response: analytics_data_api.RecurringAudienceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.RecurringAudienceList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_recurring_audience_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_create_recurring_audience_list_with_metadata` + interceptor in new development instead of the `post_create_recurring_audience_list` interceptor. + When both interceptors are used, this `post_create_recurring_audience_list_with_metadata` interceptor runs after the + `post_create_recurring_audience_list` interceptor. The (possibly modified) response returned by + `post_create_recurring_audience_list` will be passed to + `post_create_recurring_audience_list_with_metadata`. + """ + return response, metadata + def pre_create_report_task( self, request: analytics_data_api.CreateReportTaskRequest, @@ -259,12 +308,35 @@ def post_create_report_task( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_report_task - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_report_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_create_report_task` interceptor runs + before the `post_create_report_task_with_metadata` interceptor. """ return response + def post_create_report_task_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_report_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_create_report_task_with_metadata` + interceptor in new development instead of the `post_create_report_task` interceptor. + When both interceptors are used, this `post_create_report_task_with_metadata` interceptor runs after the + `post_create_report_task` interceptor. The (possibly modified) response returned by + `post_create_report_task` will be passed to + `post_create_report_task_with_metadata`. + """ + return response, metadata + def pre_get_audience_list( self, request: analytics_data_api.GetAudienceListRequest, @@ -285,12 +357,37 @@ def post_get_audience_list( ) -> analytics_data_api.AudienceList: """Post-rpc interceptor for get_audience_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_audience_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_get_audience_list` interceptor runs + before the `post_get_audience_list_with_metadata` interceptor. """ return response + def post_get_audience_list_with_metadata( + self, + response: analytics_data_api.AudienceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.AudienceList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_audience_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_audience_list_with_metadata` + interceptor in new development instead of the `post_get_audience_list` interceptor. + When both interceptors are used, this `post_get_audience_list_with_metadata` interceptor runs after the + `post_get_audience_list` interceptor. The (possibly modified) response returned by + `post_get_audience_list` will be passed to + `post_get_audience_list_with_metadata`. + """ + return response, metadata + def pre_get_property_quotas_snapshot( self, request: analytics_data_api.GetPropertyQuotasSnapshotRequest, @@ -311,12 +408,38 @@ def post_get_property_quotas_snapshot( ) -> analytics_data_api.PropertyQuotasSnapshot: """Post-rpc interceptor for get_property_quotas_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_property_quotas_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_get_property_quotas_snapshot` interceptor runs + before the `post_get_property_quotas_snapshot_with_metadata` interceptor. """ return response + def post_get_property_quotas_snapshot_with_metadata( + self, + response: analytics_data_api.PropertyQuotasSnapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.PropertyQuotasSnapshot, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_property_quotas_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_property_quotas_snapshot_with_metadata` + interceptor in new development instead of the `post_get_property_quotas_snapshot` interceptor. + When both interceptors are used, this `post_get_property_quotas_snapshot_with_metadata` interceptor runs after the + `post_get_property_quotas_snapshot` interceptor. The (possibly modified) response returned by + `post_get_property_quotas_snapshot` will be passed to + `post_get_property_quotas_snapshot_with_metadata`. + """ + return response, metadata + def pre_get_recurring_audience_list( self, request: analytics_data_api.GetRecurringAudienceListRequest, @@ -337,12 +460,38 @@ def post_get_recurring_audience_list( ) -> analytics_data_api.RecurringAudienceList: """Post-rpc interceptor for get_recurring_audience_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_recurring_audience_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_get_recurring_audience_list` interceptor runs + before the `post_get_recurring_audience_list_with_metadata` interceptor. """ return response + def post_get_recurring_audience_list_with_metadata( + self, + response: analytics_data_api.RecurringAudienceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.RecurringAudienceList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_recurring_audience_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_recurring_audience_list_with_metadata` + interceptor in new development instead of the `post_get_recurring_audience_list` interceptor. + When both interceptors are used, this `post_get_recurring_audience_list_with_metadata` interceptor runs after the + `post_get_recurring_audience_list` interceptor. The (possibly modified) response returned by + `post_get_recurring_audience_list` will be passed to + `post_get_recurring_audience_list_with_metadata`. + """ + return response, metadata + def pre_get_report_task( self, request: analytics_data_api.GetReportTaskRequest, @@ -362,12 +511,35 @@ def post_get_report_task( ) -> analytics_data_api.ReportTask: """Post-rpc interceptor for get_report_task - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_report_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_get_report_task` interceptor runs + before the `post_get_report_task_with_metadata` interceptor. """ return response + def post_get_report_task_with_metadata( + self, + response: analytics_data_api.ReportTask, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[analytics_data_api.ReportTask, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_report_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_report_task_with_metadata` + interceptor in new development instead of the `post_get_report_task` interceptor. + When both interceptors are used, this `post_get_report_task_with_metadata` interceptor runs after the + `post_get_report_task` interceptor. The (possibly modified) response returned by + `post_get_report_task` will be passed to + `post_get_report_task_with_metadata`. + """ + return response, metadata + def pre_list_audience_lists( self, request: analytics_data_api.ListAudienceListsRequest, @@ -388,12 +560,38 @@ def post_list_audience_lists( ) -> analytics_data_api.ListAudienceListsResponse: """Post-rpc interceptor for list_audience_lists - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_audience_lists_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_list_audience_lists` interceptor runs + before the `post_list_audience_lists_with_metadata` interceptor. """ return response + def post_list_audience_lists_with_metadata( + self, + response: analytics_data_api.ListAudienceListsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.ListAudienceListsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_audience_lists + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_list_audience_lists_with_metadata` + interceptor in new development instead of the `post_list_audience_lists` interceptor. + When both interceptors are used, this `post_list_audience_lists_with_metadata` interceptor runs after the + `post_list_audience_lists` interceptor. The (possibly modified) response returned by + `post_list_audience_lists` will be passed to + `post_list_audience_lists_with_metadata`. + """ + return response, metadata + def pre_list_recurring_audience_lists( self, request: analytics_data_api.ListRecurringAudienceListsRequest, @@ -414,12 +612,38 @@ def post_list_recurring_audience_lists( ) -> analytics_data_api.ListRecurringAudienceListsResponse: """Post-rpc interceptor for list_recurring_audience_lists - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_recurring_audience_lists_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_list_recurring_audience_lists` interceptor runs + before the `post_list_recurring_audience_lists_with_metadata` interceptor. """ return response + def post_list_recurring_audience_lists_with_metadata( + self, + response: analytics_data_api.ListRecurringAudienceListsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.ListRecurringAudienceListsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_recurring_audience_lists + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_list_recurring_audience_lists_with_metadata` + interceptor in new development instead of the `post_list_recurring_audience_lists` interceptor. + When both interceptors are used, this `post_list_recurring_audience_lists_with_metadata` interceptor runs after the + `post_list_recurring_audience_lists` interceptor. The (possibly modified) response returned by + `post_list_recurring_audience_lists` will be passed to + `post_list_recurring_audience_lists_with_metadata`. + """ + return response, metadata + def pre_list_report_tasks( self, request: analytics_data_api.ListReportTasksRequest, @@ -440,12 +664,38 @@ def post_list_report_tasks( ) -> analytics_data_api.ListReportTasksResponse: """Post-rpc interceptor for list_report_tasks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_report_tasks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_list_report_tasks` interceptor runs + before the `post_list_report_tasks_with_metadata` interceptor. """ return response + def post_list_report_tasks_with_metadata( + self, + response: analytics_data_api.ListReportTasksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.ListReportTasksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_report_tasks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_list_report_tasks_with_metadata` + interceptor in new development instead of the `post_list_report_tasks` interceptor. + When both interceptors are used, this `post_list_report_tasks_with_metadata` interceptor runs after the + `post_list_report_tasks` interceptor. The (possibly modified) response returned by + `post_list_report_tasks` will be passed to + `post_list_report_tasks_with_metadata`. + """ + return response, metadata + def pre_query_audience_list( self, request: analytics_data_api.QueryAudienceListRequest, @@ -466,12 +716,38 @@ def post_query_audience_list( ) -> analytics_data_api.QueryAudienceListResponse: """Post-rpc interceptor for query_audience_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_audience_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_query_audience_list` interceptor runs + before the `post_query_audience_list_with_metadata` interceptor. """ return response + def post_query_audience_list_with_metadata( + self, + response: analytics_data_api.QueryAudienceListResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.QueryAudienceListResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_audience_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_query_audience_list_with_metadata` + interceptor in new development instead of the `post_query_audience_list` interceptor. + When both interceptors are used, this `post_query_audience_list_with_metadata` interceptor runs after the + `post_query_audience_list` interceptor. The (possibly modified) response returned by + `post_query_audience_list` will be passed to + `post_query_audience_list_with_metadata`. + """ + return response, metadata + def pre_query_report_task( self, request: analytics_data_api.QueryReportTaskRequest, @@ -492,12 +768,38 @@ def post_query_report_task( ) -> analytics_data_api.QueryReportTaskResponse: """Post-rpc interceptor for query_report_task - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_report_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_query_report_task` interceptor runs + before the `post_query_report_task_with_metadata` interceptor. """ return response + def post_query_report_task_with_metadata( + self, + response: analytics_data_api.QueryReportTaskResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.QueryReportTaskResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_report_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_query_report_task_with_metadata` + interceptor in new development instead of the `post_query_report_task` interceptor. + When both interceptors are used, this `post_query_report_task_with_metadata` interceptor runs after the + `post_query_report_task` interceptor. The (possibly modified) response returned by + `post_query_report_task` will be passed to + `post_query_report_task_with_metadata`. + """ + return response, metadata + def pre_run_funnel_report( self, request: analytics_data_api.RunFunnelReportRequest, @@ -518,12 +820,38 @@ def post_run_funnel_report( ) -> analytics_data_api.RunFunnelReportResponse: """Post-rpc interceptor for run_funnel_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_funnel_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_run_funnel_report` interceptor runs + before the `post_run_funnel_report_with_metadata` interceptor. """ return response + def post_run_funnel_report_with_metadata( + self, + response: analytics_data_api.RunFunnelReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.RunFunnelReportResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for run_funnel_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_run_funnel_report_with_metadata` + interceptor in new development instead of the `post_run_funnel_report` interceptor. + When both interceptors are used, this `post_run_funnel_report_with_metadata` interceptor runs after the + `post_run_funnel_report` interceptor. The (possibly modified) response returned by + `post_run_funnel_report` will be passed to + `post_run_funnel_report_with_metadata`. + """ + return response, metadata + def pre_sheet_export_audience_list( self, request: analytics_data_api.SheetExportAudienceListRequest, @@ -544,12 +872,38 @@ def post_sheet_export_audience_list( ) -> analytics_data_api.SheetExportAudienceListResponse: """Post-rpc interceptor for sheet_export_audience_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sheet_export_audience_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_sheet_export_audience_list` interceptor runs + before the `post_sheet_export_audience_list_with_metadata` interceptor. """ return response + def post_sheet_export_audience_list_with_metadata( + self, + response: analytics_data_api.SheetExportAudienceListResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.SheetExportAudienceListResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for sheet_export_audience_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_sheet_export_audience_list_with_metadata` + interceptor in new development instead of the `post_sheet_export_audience_list` interceptor. + When both interceptors are used, this `post_sheet_export_audience_list_with_metadata` interceptor runs after the + `post_sheet_export_audience_list` interceptor. The (possibly modified) response returned by + `post_sheet_export_audience_list` will be passed to + `post_sheet_export_audience_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AlphaAnalyticsDataRestStub: @@ -795,6 +1149,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_audience_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_audience_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -953,6 +1311,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_recurring_audience_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_recurring_audience_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1105,6 +1470,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_report_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_report_task_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1254,6 +1623,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_audience_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_audience_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1402,6 +1775,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_property_quotas_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_property_quotas_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1559,6 +1936,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_recurring_audience_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_recurring_audience_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1703,6 +2084,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_report_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_report_task_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1849,6 +2234,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_audience_lists(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_audience_lists_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1998,6 +2387,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_recurring_audience_lists(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_recurring_audience_lists_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2148,6 +2544,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_report_tasks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_report_tasks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2300,6 +2700,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_audience_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_audience_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2454,6 +2858,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_report_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_report_task_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2609,6 +3017,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_funnel_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_funnel_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2766,6 +3178,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_sheet_export_audience_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sheet_export_audience_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py index e7e0463c8ca1..846ea94889d9 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.18.16" # {x-release-please-version} +__version__ = "0.18.17" # {x-release-please-version} diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py index 0e690944ba66..5d22f84deb35 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -502,6 +504,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py index d0802352956b..1ee31c80866b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1beta/services/beta_analytics_data/transports/rest.py @@ -183,12 +183,38 @@ def post_batch_run_pivot_reports( ) -> analytics_data_api.BatchRunPivotReportsResponse: """Post-rpc interceptor for batch_run_pivot_reports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_run_pivot_reports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_batch_run_pivot_reports` interceptor runs + before the `post_batch_run_pivot_reports_with_metadata` interceptor. """ return response + def post_batch_run_pivot_reports_with_metadata( + self, + response: analytics_data_api.BatchRunPivotReportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.BatchRunPivotReportsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_run_pivot_reports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_batch_run_pivot_reports_with_metadata` + interceptor in new development instead of the `post_batch_run_pivot_reports` interceptor. + When both interceptors are used, this `post_batch_run_pivot_reports_with_metadata` interceptor runs after the + `post_batch_run_pivot_reports` interceptor. The (possibly modified) response returned by + `post_batch_run_pivot_reports` will be passed to + `post_batch_run_pivot_reports_with_metadata`. + """ + return response, metadata + def pre_batch_run_reports( self, request: analytics_data_api.BatchRunReportsRequest, @@ -209,12 +235,38 @@ def post_batch_run_reports( ) -> analytics_data_api.BatchRunReportsResponse: """Post-rpc interceptor for batch_run_reports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_run_reports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_batch_run_reports` interceptor runs + before the `post_batch_run_reports_with_metadata` interceptor. """ return response + def post_batch_run_reports_with_metadata( + self, + response: analytics_data_api.BatchRunReportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.BatchRunReportsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_run_reports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_batch_run_reports_with_metadata` + interceptor in new development instead of the `post_batch_run_reports` interceptor. + When both interceptors are used, this `post_batch_run_reports_with_metadata` interceptor runs after the + `post_batch_run_reports` interceptor. The (possibly modified) response returned by + `post_batch_run_reports` will be passed to + `post_batch_run_reports_with_metadata`. + """ + return response, metadata + def pre_check_compatibility( self, request: analytics_data_api.CheckCompatibilityRequest, @@ -235,12 +287,38 @@ def post_check_compatibility( ) -> analytics_data_api.CheckCompatibilityResponse: """Post-rpc interceptor for check_compatibility - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_compatibility_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_check_compatibility` interceptor runs + before the `post_check_compatibility_with_metadata` interceptor. """ return response + def post_check_compatibility_with_metadata( + self, + response: analytics_data_api.CheckCompatibilityResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.CheckCompatibilityResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for check_compatibility + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_check_compatibility_with_metadata` + interceptor in new development instead of the `post_check_compatibility` interceptor. + When both interceptors are used, this `post_check_compatibility_with_metadata` interceptor runs after the + `post_check_compatibility` interceptor. The (possibly modified) response returned by + `post_check_compatibility` will be passed to + `post_check_compatibility_with_metadata`. + """ + return response, metadata + def pre_create_audience_export( self, request: analytics_data_api.CreateAudienceExportRequest, @@ -261,12 +339,35 @@ def post_create_audience_export( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_audience_export - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_audience_export_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_create_audience_export` interceptor runs + before the `post_create_audience_export_with_metadata` interceptor. """ return response + def post_create_audience_export_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_audience_export + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_create_audience_export_with_metadata` + interceptor in new development instead of the `post_create_audience_export` interceptor. + When both interceptors are used, this `post_create_audience_export_with_metadata` interceptor runs after the + `post_create_audience_export` interceptor. The (possibly modified) response returned by + `post_create_audience_export` will be passed to + `post_create_audience_export_with_metadata`. + """ + return response, metadata + def pre_get_audience_export( self, request: analytics_data_api.GetAudienceExportRequest, @@ -287,12 +388,37 @@ def post_get_audience_export( ) -> analytics_data_api.AudienceExport: """Post-rpc interceptor for get_audience_export - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_audience_export_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_get_audience_export` interceptor runs + before the `post_get_audience_export_with_metadata` interceptor. """ return response + def post_get_audience_export_with_metadata( + self, + response: analytics_data_api.AudienceExport, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.AudienceExport, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_audience_export + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_audience_export_with_metadata` + interceptor in new development instead of the `post_get_audience_export` interceptor. + When both interceptors are used, this `post_get_audience_export_with_metadata` interceptor runs after the + `post_get_audience_export` interceptor. The (possibly modified) response returned by + `post_get_audience_export` will be passed to + `post_get_audience_export_with_metadata`. + """ + return response, metadata + def pre_get_metadata( self, request: analytics_data_api.GetMetadataRequest, @@ -312,12 +438,35 @@ def post_get_metadata( ) -> analytics_data_api.Metadata: """Post-rpc interceptor for get_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_get_metadata` interceptor runs + before the `post_get_metadata_with_metadata` interceptor. """ return response + def post_get_metadata_with_metadata( + self, + response: analytics_data_api.Metadata, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[analytics_data_api.Metadata, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_metadata_with_metadata` + interceptor in new development instead of the `post_get_metadata` interceptor. + When both interceptors are used, this `post_get_metadata_with_metadata` interceptor runs after the + `post_get_metadata` interceptor. The (possibly modified) response returned by + `post_get_metadata` will be passed to + `post_get_metadata_with_metadata`. + """ + return response, metadata + def pre_list_audience_exports( self, request: analytics_data_api.ListAudienceExportsRequest, @@ -338,12 +487,38 @@ def post_list_audience_exports( ) -> analytics_data_api.ListAudienceExportsResponse: """Post-rpc interceptor for list_audience_exports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_audience_exports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_list_audience_exports` interceptor runs + before the `post_list_audience_exports_with_metadata` interceptor. """ return response + def post_list_audience_exports_with_metadata( + self, + response: analytics_data_api.ListAudienceExportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.ListAudienceExportsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_audience_exports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_list_audience_exports_with_metadata` + interceptor in new development instead of the `post_list_audience_exports` interceptor. + When both interceptors are used, this `post_list_audience_exports_with_metadata` interceptor runs after the + `post_list_audience_exports` interceptor. The (possibly modified) response returned by + `post_list_audience_exports` will be passed to + `post_list_audience_exports_with_metadata`. + """ + return response, metadata + def pre_query_audience_export( self, request: analytics_data_api.QueryAudienceExportRequest, @@ -364,12 +539,38 @@ def post_query_audience_export( ) -> analytics_data_api.QueryAudienceExportResponse: """Post-rpc interceptor for query_audience_export - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_audience_export_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_query_audience_export` interceptor runs + before the `post_query_audience_export_with_metadata` interceptor. """ return response + def post_query_audience_export_with_metadata( + self, + response: analytics_data_api.QueryAudienceExportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.QueryAudienceExportResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_audience_export + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_query_audience_export_with_metadata` + interceptor in new development instead of the `post_query_audience_export` interceptor. + When both interceptors are used, this `post_query_audience_export_with_metadata` interceptor runs after the + `post_query_audience_export` interceptor. The (possibly modified) response returned by + `post_query_audience_export` will be passed to + `post_query_audience_export_with_metadata`. + """ + return response, metadata + def pre_run_pivot_report( self, request: analytics_data_api.RunPivotReportRequest, @@ -390,12 +591,38 @@ def post_run_pivot_report( ) -> analytics_data_api.RunPivotReportResponse: """Post-rpc interceptor for run_pivot_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_pivot_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_run_pivot_report` interceptor runs + before the `post_run_pivot_report_with_metadata` interceptor. """ return response + def post_run_pivot_report_with_metadata( + self, + response: analytics_data_api.RunPivotReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.RunPivotReportResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for run_pivot_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_run_pivot_report_with_metadata` + interceptor in new development instead of the `post_run_pivot_report` interceptor. + When both interceptors are used, this `post_run_pivot_report_with_metadata` interceptor runs after the + `post_run_pivot_report` interceptor. The (possibly modified) response returned by + `post_run_pivot_report` will be passed to + `post_run_pivot_report_with_metadata`. + """ + return response, metadata + def pre_run_realtime_report( self, request: analytics_data_api.RunRealtimeReportRequest, @@ -416,12 +643,38 @@ def post_run_realtime_report( ) -> analytics_data_api.RunRealtimeReportResponse: """Post-rpc interceptor for run_realtime_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_realtime_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_run_realtime_report` interceptor runs + before the `post_run_realtime_report_with_metadata` interceptor. """ return response + def post_run_realtime_report_with_metadata( + self, + response: analytics_data_api.RunRealtimeReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.RunRealtimeReportResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for run_realtime_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_run_realtime_report_with_metadata` + interceptor in new development instead of the `post_run_realtime_report` interceptor. + When both interceptors are used, this `post_run_realtime_report_with_metadata` interceptor runs after the + `post_run_realtime_report` interceptor. The (possibly modified) response returned by + `post_run_realtime_report` will be passed to + `post_run_realtime_report_with_metadata`. + """ + return response, metadata + def pre_run_report( self, request: analytics_data_api.RunReportRequest, @@ -441,12 +694,37 @@ def post_run_report( ) -> analytics_data_api.RunReportResponse: """Post-rpc interceptor for run_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BetaAnalyticsData server but before - it is returned to user code. + it is returned to user code. This `post_run_report` interceptor runs + before the `post_run_report_with_metadata` interceptor. """ return response + def post_run_report_with_metadata( + self, + response: analytics_data_api.RunReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.RunReportResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for run_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BetaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_run_report_with_metadata` + interceptor in new development instead of the `post_run_report` interceptor. + When both interceptors are used, this `post_run_report_with_metadata` interceptor runs after the + `post_run_report` interceptor. The (possibly modified) response returned by + `post_run_report` will be passed to + `post_run_report_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class BetaAnalyticsDataRestStub: @@ -693,6 +971,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_run_pivot_reports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_run_pivot_reports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -849,6 +1131,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_run_reports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_run_reports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1007,6 +1293,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_compatibility(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_check_compatibility_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1162,6 +1452,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_audience_export(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_audience_export_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1312,6 +1606,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_audience_export(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_audience_export_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1458,6 +1756,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1606,6 +1908,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_audience_exports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_audience_exports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1762,6 +2068,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_audience_export(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_audience_export_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1916,6 +2226,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_pivot_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_pivot_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2070,6 +2384,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_realtime_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_realtime_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2220,6 +2538,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index 00bf06f70394..fca15527accc 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.16" + "version": "0.18.17" }, "snippets": [ { diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json index 5a4307966794..846f20d255b3 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-analytics-data", - "version": "0.18.16" + "version": "0.18.17" }, "snippets": [ { diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index fb3ad3b3bf7b..c85d5400591f 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -72,6 +72,13 @@ ) from google.analytics.data_v1alpha.types import analytics_data_api, data +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AlphaAnalyticsDataClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AlphaAnalyticsDataClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10164,10 +10214,14 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_run_funnel_report" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_run_funnel_report_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_run_funnel_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.RunFunnelReportRequest.pb( analytics_data_api.RunFunnelReportRequest() ) @@ -10193,6 +10247,10 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.RunFunnelReportResponse() + post_with_metadata.return_value = ( + analytics_data_api.RunFunnelReportResponse(), + metadata, + ) client.run_funnel_report( request, @@ -10204,6 +10262,7 @@ def test_run_funnel_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_audience_list_rest_bad_request( @@ -10370,10 +10429,14 @@ def test_create_audience_list_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_audience_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.CreateAudienceListRequest.pb( analytics_data_api.CreateAudienceListRequest() ) @@ -10397,6 +10460,7 @@ def test_create_audience_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_audience_list( request, @@ -10408,6 +10472,7 @@ def test_create_audience_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_audience_list_rest_bad_request( @@ -10492,10 +10557,14 @@ def test_query_audience_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_query_audience_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.QueryAudienceListRequest.pb( analytics_data_api.QueryAudienceListRequest() ) @@ -10521,6 +10590,10 @@ def test_query_audience_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.QueryAudienceListResponse() + post_with_metadata.return_value = ( + analytics_data_api.QueryAudienceListResponse(), + metadata, + ) client.query_audience_list( request, @@ -10532,6 +10605,7 @@ def test_query_audience_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_sheet_export_audience_list_rest_bad_request( @@ -10622,10 +10696,14 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_sheet_export_audience_list" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_sheet_export_audience_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_sheet_export_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( analytics_data_api.SheetExportAudienceListRequest() ) @@ -10651,6 +10729,10 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.SheetExportAudienceListResponse() + post_with_metadata.return_value = ( + analytics_data_api.SheetExportAudienceListResponse(), + metadata, + ) client.sheet_export_audience_list( request, @@ -10662,6 +10744,7 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_audience_list_rest_bad_request( @@ -10762,10 +10845,14 @@ def test_get_audience_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_get_audience_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.GetAudienceListRequest.pb( analytics_data_api.GetAudienceListRequest() ) @@ -10791,6 +10878,7 @@ def test_get_audience_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.AudienceList() + post_with_metadata.return_value = analytics_data_api.AudienceList(), metadata client.get_audience_list( request, @@ -10802,6 +10890,7 @@ def test_get_audience_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_audience_lists_rest_bad_request( @@ -10886,10 +10975,14 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_audience_lists_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.ListAudienceListsRequest.pb( analytics_data_api.ListAudienceListsRequest() ) @@ -10915,6 +11008,10 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.ListAudienceListsResponse() + post_with_metadata.return_value = ( + analytics_data_api.ListAudienceListsResponse(), + metadata, + ) client.list_audience_lists( request, @@ -10926,6 +11023,7 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_recurring_audience_list_rest_bad_request( @@ -11102,11 +11200,15 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): transports.AlphaAnalyticsDataRestInterceptor, "post_create_recurring_audience_list", ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_recurring_audience_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_create_recurring_audience_list", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( analytics_data_api.CreateRecurringAudienceListRequest() ) @@ -11132,6 +11234,10 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.RecurringAudienceList() + post_with_metadata.return_value = ( + analytics_data_api.RecurringAudienceList(), + metadata, + ) client.create_recurring_audience_list( request, @@ -11143,6 +11249,7 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_recurring_audience_list_rest_bad_request( @@ -11235,10 +11342,14 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_get_recurring_audience_list" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_get_recurring_audience_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_get_recurring_audience_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( analytics_data_api.GetRecurringAudienceListRequest() ) @@ -11264,6 +11375,10 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.RecurringAudienceList() + post_with_metadata.return_value = ( + analytics_data_api.RecurringAudienceList(), + metadata, + ) client.get_recurring_audience_list( request, @@ -11275,6 +11390,7 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_recurring_audience_lists_rest_bad_request( @@ -11362,11 +11478,15 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): transports.AlphaAnalyticsDataRestInterceptor, "post_list_recurring_audience_lists", ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_recurring_audience_lists_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_list_recurring_audience_lists", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( analytics_data_api.ListRecurringAudienceListsRequest() ) @@ -11392,6 +11512,10 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + post_with_metadata.return_value = ( + analytics_data_api.ListRecurringAudienceListsResponse(), + metadata, + ) client.list_recurring_audience_lists( request, @@ -11403,6 +11527,7 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_property_quotas_snapshot_rest_bad_request( @@ -11488,10 +11613,14 @@ def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): transports.AlphaAnalyticsDataRestInterceptor, "post_get_property_quotas_snapshot", ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_get_property_quotas_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_get_property_quotas_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( analytics_data_api.GetPropertyQuotasSnapshotRequest() ) @@ -11517,6 +11646,10 @@ def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.PropertyQuotasSnapshot() + post_with_metadata.return_value = ( + analytics_data_api.PropertyQuotasSnapshot(), + metadata, + ) client.get_property_quotas_snapshot( request, @@ -11528,6 +11661,7 @@ def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_report_task_rest_bad_request( @@ -11776,10 +11910,14 @@ def test_create_report_task_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_create_report_task" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_report_task_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_create_report_task" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.CreateReportTaskRequest.pb( analytics_data_api.CreateReportTaskRequest() ) @@ -11803,6 +11941,7 @@ def test_create_report_task_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_report_task( request, @@ -11814,6 +11953,7 @@ def test_create_report_task_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_report_task_rest_bad_request( @@ -11898,10 +12038,14 @@ def test_query_report_task_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_query_report_task" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_query_report_task_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_query_report_task" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.QueryReportTaskRequest.pb( analytics_data_api.QueryReportTaskRequest() ) @@ -11927,6 +12071,10 @@ def test_query_report_task_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.QueryReportTaskResponse() + post_with_metadata.return_value = ( + analytics_data_api.QueryReportTaskResponse(), + metadata, + ) client.query_report_task( request, @@ -11938,6 +12086,7 @@ def test_query_report_task_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_report_task_rest_bad_request( @@ -12022,10 +12171,14 @@ def test_get_report_task_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_get_report_task" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_get_report_task_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_get_report_task" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.GetReportTaskRequest.pb( analytics_data_api.GetReportTaskRequest() ) @@ -12051,6 +12204,7 @@ def test_get_report_task_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.ReportTask() + post_with_metadata.return_value = analytics_data_api.ReportTask(), metadata client.get_report_task( request, @@ -12062,6 +12216,7 @@ def test_get_report_task_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_report_tasks_rest_bad_request( @@ -12146,10 +12301,14 @@ def test_list_report_tasks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "post_list_report_tasks" ) as post, mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_list_report_tasks_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, "pre_list_report_tasks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.ListReportTasksRequest.pb( analytics_data_api.ListReportTasksRequest() ) @@ -12175,6 +12334,10 @@ def test_list_report_tasks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.ListReportTasksResponse() + post_with_metadata.return_value = ( + analytics_data_api.ListReportTasksResponse(), + metadata, + ) client.list_report_tasks( request, @@ -12186,6 +12349,7 @@ def test_list_report_tasks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py index 0f74b9eed126..dc8aa0324895 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1beta/test_beta_analytics_data.py @@ -71,6 +71,13 @@ ) from google.analytics.data_v1beta.types import analytics_data_api, data +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +345,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BetaAnalyticsDataClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BetaAnalyticsDataClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6568,10 +6618,13 @@ def test_run_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_run_report" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_run_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_run_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.RunReportRequest.pb( analytics_data_api.RunReportRequest() ) @@ -6597,6 +6650,10 @@ def test_run_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.RunReportResponse() + post_with_metadata.return_value = ( + analytics_data_api.RunReportResponse(), + metadata, + ) client.run_report( request, @@ -6608,6 +6665,7 @@ def test_run_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_pivot_report_rest_bad_request( @@ -6692,10 +6750,14 @@ def test_run_pivot_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_run_pivot_report" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_run_pivot_report_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_run_pivot_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.RunPivotReportRequest.pb( analytics_data_api.RunPivotReportRequest() ) @@ -6721,6 +6783,10 @@ def test_run_pivot_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.RunPivotReportResponse() + post_with_metadata.return_value = ( + analytics_data_api.RunPivotReportResponse(), + metadata, + ) client.run_pivot_report( request, @@ -6732,6 +6798,7 @@ def test_run_pivot_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_run_reports_rest_bad_request( @@ -6816,10 +6883,14 @@ def test_batch_run_reports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_reports" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_batch_run_reports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_reports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.BatchRunReportsRequest.pb( analytics_data_api.BatchRunReportsRequest() ) @@ -6845,6 +6916,10 @@ def test_batch_run_reports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.BatchRunReportsResponse() + post_with_metadata.return_value = ( + analytics_data_api.BatchRunReportsResponse(), + metadata, + ) client.batch_run_reports( request, @@ -6856,6 +6931,7 @@ def test_batch_run_reports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_run_pivot_reports_rest_bad_request( @@ -6940,10 +7016,14 @@ def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_batch_run_pivot_reports" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_batch_run_pivot_reports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_batch_run_pivot_reports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.BatchRunPivotReportsRequest.pb( analytics_data_api.BatchRunPivotReportsRequest() ) @@ -6969,6 +7049,10 @@ def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.BatchRunPivotReportsResponse() + post_with_metadata.return_value = ( + analytics_data_api.BatchRunPivotReportsResponse(), + metadata, + ) client.batch_run_pivot_reports( request, @@ -6980,6 +7064,7 @@ def test_batch_run_pivot_reports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_metadata_rest_bad_request( @@ -7064,10 +7149,13 @@ def test_get_metadata_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_get_metadata" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, "post_get_metadata_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_get_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.GetMetadataRequest.pb( analytics_data_api.GetMetadataRequest() ) @@ -7093,6 +7181,7 @@ def test_get_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.Metadata() + post_with_metadata.return_value = analytics_data_api.Metadata(), metadata client.get_metadata( request, @@ -7104,6 +7193,7 @@ def test_get_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_realtime_report_rest_bad_request( @@ -7190,10 +7280,14 @@ def test_run_realtime_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_run_realtime_report" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_run_realtime_report_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_run_realtime_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.RunRealtimeReportRequest.pb( analytics_data_api.RunRealtimeReportRequest() ) @@ -7219,6 +7313,10 @@ def test_run_realtime_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.RunRealtimeReportResponse() + post_with_metadata.return_value = ( + analytics_data_api.RunRealtimeReportResponse(), + metadata, + ) client.run_realtime_report( request, @@ -7230,6 +7328,7 @@ def test_run_realtime_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_check_compatibility_rest_bad_request( @@ -7311,10 +7410,14 @@ def test_check_compatibility_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_check_compatibility" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_check_compatibility_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_check_compatibility" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.CheckCompatibilityRequest.pb( analytics_data_api.CheckCompatibilityRequest() ) @@ -7340,6 +7443,10 @@ def test_check_compatibility_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.CheckCompatibilityResponse() + post_with_metadata.return_value = ( + analytics_data_api.CheckCompatibilityResponse(), + metadata, + ) client.check_compatibility( request, @@ -7351,6 +7458,7 @@ def test_check_compatibility_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_audience_export_rest_bad_request( @@ -7512,10 +7620,14 @@ def test_create_audience_export_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_create_audience_export" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_create_audience_export_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_create_audience_export" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.CreateAudienceExportRequest.pb( analytics_data_api.CreateAudienceExportRequest() ) @@ -7539,6 +7651,7 @@ def test_create_audience_export_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_audience_export( request, @@ -7550,6 +7663,7 @@ def test_create_audience_export_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_audience_export_rest_bad_request( @@ -7634,10 +7748,14 @@ def test_query_audience_export_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_query_audience_export" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_query_audience_export_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_query_audience_export" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.QueryAudienceExportRequest.pb( analytics_data_api.QueryAudienceExportRequest() ) @@ -7663,6 +7781,10 @@ def test_query_audience_export_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.QueryAudienceExportResponse() + post_with_metadata.return_value = ( + analytics_data_api.QueryAudienceExportResponse(), + metadata, + ) client.query_audience_export( request, @@ -7674,6 +7796,7 @@ def test_query_audience_export_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_audience_export_rest_bad_request( @@ -7772,10 +7895,14 @@ def test_get_audience_export_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_get_audience_export" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_get_audience_export_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_get_audience_export" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.GetAudienceExportRequest.pb( analytics_data_api.GetAudienceExportRequest() ) @@ -7801,6 +7928,7 @@ def test_get_audience_export_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.AudienceExport() + post_with_metadata.return_value = analytics_data_api.AudienceExport(), metadata client.get_audience_export( request, @@ -7812,6 +7940,7 @@ def test_get_audience_export_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_audience_exports_rest_bad_request( @@ -7896,10 +8025,14 @@ def test_list_audience_exports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "post_list_audience_exports" ) as post, mock.patch.object( + transports.BetaAnalyticsDataRestInterceptor, + "post_list_audience_exports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BetaAnalyticsDataRestInterceptor, "pre_list_audience_exports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = analytics_data_api.ListAudienceExportsRequest.pb( analytics_data_api.ListAudienceExportsRequest() ) @@ -7925,6 +8058,10 @@ def test_list_audience_exports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = analytics_data_api.ListAudienceExportsResponse() + post_with_metadata.return_value = ( + analytics_data_api.ListAudienceExportsResponse(), + metadata, + ) client.list_audience_exports( request, @@ -7936,6 +8073,7 @@ def test_list_audience_exports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-apps-card/CHANGELOG.md b/packages/google-apps-card/CHANGELOG.md index dd127eae0ca8..c270a40c0229 100644 --- a/packages/google-apps-card/CHANGELOG.md +++ b/packages/google-apps-card/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-apps-card-v0.1.5...google-apps-card-v0.1.6) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-apps-card-v0.1.4...google-apps-card-v0.1.5) (2024-10-24) diff --git a/packages/google-apps-card/google/apps/card/gapic_version.py b/packages/google-apps-card/google/apps/card/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-apps-card/google/apps/card/gapic_version.py +++ b/packages/google-apps-card/google/apps/card/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-apps-card/google/apps/card_v1/gapic_version.py b/packages/google-apps-card/google/apps/card_v1/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-apps-card/google/apps/card_v1/gapic_version.py +++ b/packages/google-apps-card/google/apps/card_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-apps-chat/CHANGELOG.md b/packages/google-apps-chat/CHANGELOG.md index 6a64bb51b53c..4c748ec70d09 100644 --- a/packages/google-apps-chat/CHANGELOG.md +++ b/packages/google-apps-chat/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.2.2](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.2.1...google-apps-chat-v0.2.2) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + + +### Documentation + +* Update Google chat app command documentation ([50d43b1](https://github.com/googleapis/google-cloud-python/commit/50d43b1574812be8eb9bfffd241660ee1bddef15)) + +## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.2.0...google-apps-chat-v0.2.1) (2025-01-29) + + +### Features + +* A new field custom_emoji_metadata is added to message `.google.chat.v1.Annotation` ([b165f92](https://github.com/googleapis/google-cloud-python/commit/b165f923cdf47fbb23757531145eace0e3f088cb)) +* A new message `CustomEmojiMetadata` is added ([b165f92](https://github.com/googleapis/google-cloud-python/commit/b165f923cdf47fbb23757531145eace0e3f088cb)) +* A new value `CUSTOM_EMOJI` is added to enum `AnnotationType` ([b165f92](https://github.com/googleapis/google-cloud-python/commit/b165f923cdf47fbb23757531145eace0e3f088cb)) + + +### Documentation + +* A comment for field `custom_emoji` in message `.google.chat.v1.Emoji` is changed ([b165f92](https://github.com/googleapis/google-cloud-python/commit/b165f923cdf47fbb23757531145eace0e3f088cb)) +* A comment for method `CreateReaction` in service `ChatService` is changed ([b165f92](https://github.com/googleapis/google-cloud-python/commit/b165f923cdf47fbb23757531145eace0e3f088cb)) +* A comment for method `DeleteReaction` in service `ChatService` is changed ([b165f92](https://github.com/googleapis/google-cloud-python/commit/b165f923cdf47fbb23757531145eace0e3f088cb)) + ## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-apps-chat-v0.1.14...google-apps-chat-v0.2.0) (2024-12-12) diff --git a/packages/google-apps-chat/README.rst b/packages/google-apps-chat/README.rst index 6cd1ba088336..2717c74a1dcc 100644 --- a/packages/google-apps-chat/README.rst +++ b/packages/google-apps-chat/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Chat API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Chat API.: https://developers.google.com/chat/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index 17679ffaa12a..4740c22c5e51 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -27,6 +27,7 @@ Annotation, AnnotationType, ChatSpaceLinkData, + CustomEmojiMetadata, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -141,6 +142,7 @@ "ActionStatus", "Annotation", "ChatSpaceLinkData", + "CustomEmojiMetadata", "DriveLinkData", "RichLinkMetadata", "SlashCommandMetadata", diff --git a/packages/google-apps-chat/google/apps/chat/gapic_version.py b/packages/google-apps-chat/google/apps/chat/gapic_version.py index 364164ddb134..d1a1a883babd 100644 --- a/packages/google-apps-chat/google/apps/chat/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index d770a6fbb7ff..1105d7b8697e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -24,6 +24,7 @@ Annotation, AnnotationType, ChatSpaceLinkData, + CustomEmojiMetadata, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -150,6 +151,7 @@ "CreateReactionRequest", "CreateSpaceRequest", "CustomEmoji", + "CustomEmojiMetadata", "DeleteMembershipRequest", "DeleteMessageRequest", "DeleteReactionRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py index 364164ddb134..d1a1a883babd 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.0" # {x-release-please-version} +__version__ = "0.2.2" # {x-release-please-version} diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 58f76edd0c60..9c5f838f346e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -3170,8 +3170,8 @@ async def create_reaction( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gc_reaction.Reaction: - r"""Creates a reaction and adds it to a message. Only unicode emojis - are supported. For an example, see `Add a reaction to a + r"""Creates a reaction and adds it to a message. For an example, see + `Add a reaction to a message `__. Requires `user @@ -3420,8 +3420,7 @@ async def delete_reaction( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Deletes a reaction to a message. Only unicode emojis are - supported. For an example, see `Delete a + r"""Deletes a reaction to a message. For an example, see `Delete a reaction `__. Requires `user diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 266684cd5ce2..78f66a8d2cf6 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -677,6 +679,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3648,8 +3677,8 @@ def create_reaction( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gc_reaction.Reaction: - r"""Creates a reaction and adds it to a message. Only unicode emojis - are supported. For an example, see `Add a reaction to a + r"""Creates a reaction and adds it to a message. For an example, see + `Add a reaction to a message `__. Requires `user @@ -3892,8 +3921,7 @@ def delete_reaction( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""Deletes a reaction to a message. Only unicode emojis are - supported. For an example, see `Delete a + r"""Deletes a reaction to a message. For an example, see `Delete a reaction `__. Requires `user diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index 7d85a0a227a9..1ec37a5013c7 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -1277,8 +1277,8 @@ def create_reaction( ) -> Callable[[gc_reaction.CreateReactionRequest], gc_reaction.Reaction]: r"""Return a callable for the create reaction method over gRPC. - Creates a reaction and adds it to a message. Only unicode emojis - are supported. For an example, see `Add a reaction to a + Creates a reaction and adds it to a message. For an example, see + `Add a reaction to a message `__. Requires `user @@ -1339,8 +1339,7 @@ def delete_reaction( ) -> Callable[[reaction.DeleteReactionRequest], empty_pb2.Empty]: r"""Return a callable for the delete reaction method over gRPC. - Deletes a reaction to a message. Only unicode emojis are - supported. For an example, see `Delete a + Deletes a reaction to a message. For an example, see `Delete a reaction `__. Requires `user diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index 8512dd48da9f..66b02f83266e 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -1304,8 +1304,8 @@ def create_reaction( ) -> Callable[[gc_reaction.CreateReactionRequest], Awaitable[gc_reaction.Reaction]]: r"""Return a callable for the create reaction method over gRPC. - Creates a reaction and adds it to a message. Only unicode emojis - are supported. For an example, see `Add a reaction to a + Creates a reaction and adds it to a message. For an example, see + `Add a reaction to a message `__. Requires `user @@ -1368,8 +1368,7 @@ def delete_reaction( ) -> Callable[[reaction.DeleteReactionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete reaction method over gRPC. - Deletes a reaction to a message. Only unicode emojis are - supported. For an example, see `Delete a + Deletes a reaction to a message. For an example, see `Delete a reaction `__. Requires `user diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index fe861b816728..01235e4d8427 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -326,12 +326,37 @@ def post_complete_import_space( ) -> space.CompleteImportSpaceResponse: """Post-rpc interceptor for complete_import_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_import_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_complete_import_space` interceptor runs + before the `post_complete_import_space_with_metadata` interceptor. """ return response + def post_complete_import_space_with_metadata( + self, + response: space.CompleteImportSpaceResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + space.CompleteImportSpaceResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for complete_import_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_complete_import_space_with_metadata` + interceptor in new development instead of the `post_complete_import_space` interceptor. + When both interceptors are used, this `post_complete_import_space_with_metadata` interceptor runs after the + `post_complete_import_space` interceptor. The (possibly modified) response returned by + `post_complete_import_space` will be passed to + `post_complete_import_space_with_metadata`. + """ + return response, metadata + def pre_create_membership( self, request: gc_membership.CreateMembershipRequest, @@ -351,12 +376,35 @@ def post_create_membership( ) -> gc_membership.Membership: """Post-rpc interceptor for create_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_create_membership` interceptor runs + before the `post_create_membership_with_metadata` interceptor. """ return response + def post_create_membership_with_metadata( + self, + response: gc_membership.Membership, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_membership.Membership, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_create_membership_with_metadata` + interceptor in new development instead of the `post_create_membership` interceptor. + When both interceptors are used, this `post_create_membership_with_metadata` interceptor runs after the + `post_create_membership` interceptor. The (possibly modified) response returned by + `post_create_membership` will be passed to + `post_create_membership_with_metadata`. + """ + return response, metadata + def pre_create_message( self, request: gc_message.CreateMessageRequest, @@ -374,12 +422,35 @@ def pre_create_message( def post_create_message(self, response: gc_message.Message) -> gc_message.Message: """Post-rpc interceptor for create_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_create_message` interceptor runs + before the `post_create_message_with_metadata` interceptor. """ return response + def post_create_message_with_metadata( + self, + response: gc_message.Message, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_message.Message, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_create_message_with_metadata` + interceptor in new development instead of the `post_create_message` interceptor. + When both interceptors are used, this `post_create_message_with_metadata` interceptor runs after the + `post_create_message` interceptor. The (possibly modified) response returned by + `post_create_message` will be passed to + `post_create_message_with_metadata`. + """ + return response, metadata + def pre_create_reaction( self, request: gc_reaction.CreateReactionRequest, @@ -399,12 +470,35 @@ def post_create_reaction( ) -> gc_reaction.Reaction: """Post-rpc interceptor for create_reaction - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_reaction_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_create_reaction` interceptor runs + before the `post_create_reaction_with_metadata` interceptor. """ return response + def post_create_reaction_with_metadata( + self, + response: gc_reaction.Reaction, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_reaction.Reaction, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_reaction + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_create_reaction_with_metadata` + interceptor in new development instead of the `post_create_reaction` interceptor. + When both interceptors are used, this `post_create_reaction_with_metadata` interceptor runs after the + `post_create_reaction` interceptor. The (possibly modified) response returned by + `post_create_reaction` will be passed to + `post_create_reaction_with_metadata`. + """ + return response, metadata + def pre_create_space( self, request: gc_space.CreateSpaceRequest, @@ -420,12 +514,35 @@ def pre_create_space( def post_create_space(self, response: gc_space.Space) -> gc_space.Space: """Post-rpc interceptor for create_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_create_space` interceptor runs + before the `post_create_space_with_metadata` interceptor. """ return response + def post_create_space_with_metadata( + self, + response: gc_space.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_space.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_create_space_with_metadata` + interceptor in new development instead of the `post_create_space` interceptor. + When both interceptors are used, this `post_create_space_with_metadata` interceptor runs after the + `post_create_space` interceptor. The (possibly modified) response returned by + `post_create_space` will be passed to + `post_create_space_with_metadata`. + """ + return response, metadata + def pre_delete_membership( self, request: membership.DeleteMembershipRequest, @@ -445,12 +562,35 @@ def post_delete_membership( ) -> membership.Membership: """Post-rpc interceptor for delete_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_delete_membership` interceptor runs + before the `post_delete_membership_with_metadata` interceptor. """ return response + def post_delete_membership_with_metadata( + self, + response: membership.Membership, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[membership.Membership, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_delete_membership_with_metadata` + interceptor in new development instead of the `post_delete_membership` interceptor. + When both interceptors are used, this `post_delete_membership_with_metadata` interceptor runs after the + `post_delete_membership` interceptor. The (possibly modified) response returned by + `post_delete_membership` will be passed to + `post_delete_membership_with_metadata`. + """ + return response, metadata + def pre_delete_message( self, request: message.DeleteMessageRequest, @@ -502,12 +642,33 @@ def pre_find_direct_message( def post_find_direct_message(self, response: space.Space) -> space.Space: """Post-rpc interceptor for find_direct_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_find_direct_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_find_direct_message` interceptor runs + before the `post_find_direct_message_with_metadata` interceptor. """ return response + def post_find_direct_message_with_metadata( + self, response: space.Space, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[space.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for find_direct_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_find_direct_message_with_metadata` + interceptor in new development instead of the `post_find_direct_message` interceptor. + When both interceptors are used, this `post_find_direct_message_with_metadata` interceptor runs after the + `post_find_direct_message` interceptor. The (possibly modified) response returned by + `post_find_direct_message` will be passed to + `post_find_direct_message_with_metadata`. + """ + return response, metadata + def pre_get_attachment( self, request: attachment.GetAttachmentRequest, @@ -527,12 +688,35 @@ def post_get_attachment( ) -> attachment.Attachment: """Post-rpc interceptor for get_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_attachment` interceptor runs + before the `post_get_attachment_with_metadata` interceptor. """ return response + def post_get_attachment_with_metadata( + self, + response: attachment.Attachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[attachment.Attachment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_attachment_with_metadata` + interceptor in new development instead of the `post_get_attachment` interceptor. + When both interceptors are used, this `post_get_attachment_with_metadata` interceptor runs after the + `post_get_attachment` interceptor. The (possibly modified) response returned by + `post_get_attachment` will be passed to + `post_get_attachment_with_metadata`. + """ + return response, metadata + def pre_get_membership( self, request: membership.GetMembershipRequest, @@ -552,12 +736,35 @@ def post_get_membership( ) -> membership.Membership: """Post-rpc interceptor for get_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_membership` interceptor runs + before the `post_get_membership_with_metadata` interceptor. """ return response + def post_get_membership_with_metadata( + self, + response: membership.Membership, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[membership.Membership, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_membership_with_metadata` + interceptor in new development instead of the `post_get_membership` interceptor. + When both interceptors are used, this `post_get_membership_with_metadata` interceptor runs after the + `post_get_membership` interceptor. The (possibly modified) response returned by + `post_get_membership` will be passed to + `post_get_membership_with_metadata`. + """ + return response, metadata + def pre_get_message( self, request: message.GetMessageRequest, @@ -573,12 +780,35 @@ def pre_get_message( def post_get_message(self, response: message.Message) -> message.Message: """Post-rpc interceptor for get_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_message` interceptor runs + before the `post_get_message_with_metadata` interceptor. """ return response + def post_get_message_with_metadata( + self, + response: message.Message, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[message.Message, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_message_with_metadata` + interceptor in new development instead of the `post_get_message` interceptor. + When both interceptors are used, this `post_get_message_with_metadata` interceptor runs after the + `post_get_message` interceptor. The (possibly modified) response returned by + `post_get_message` will be passed to + `post_get_message_with_metadata`. + """ + return response, metadata + def pre_get_space( self, request: space.GetSpaceRequest, @@ -594,12 +824,33 @@ def pre_get_space( def post_get_space(self, response: space.Space) -> space.Space: """Post-rpc interceptor for get_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_space` interceptor runs + before the `post_get_space_with_metadata` interceptor. """ return response + def post_get_space_with_metadata( + self, response: space.Space, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[space.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_space_with_metadata` + interceptor in new development instead of the `post_get_space` interceptor. + When both interceptors are used, this `post_get_space_with_metadata` interceptor runs after the + `post_get_space` interceptor. The (possibly modified) response returned by + `post_get_space` will be passed to + `post_get_space_with_metadata`. + """ + return response, metadata + def pre_get_space_event( self, request: space_event.GetSpaceEventRequest, @@ -619,12 +870,35 @@ def post_get_space_event( ) -> space_event.SpaceEvent: """Post-rpc interceptor for get_space_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_space_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_space_event` interceptor runs + before the `post_get_space_event_with_metadata` interceptor. """ return response + def post_get_space_event_with_metadata( + self, + response: space_event.SpaceEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[space_event.SpaceEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_space_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_space_event_with_metadata` + interceptor in new development instead of the `post_get_space_event` interceptor. + When both interceptors are used, this `post_get_space_event_with_metadata` interceptor runs after the + `post_get_space_event` interceptor. The (possibly modified) response returned by + `post_get_space_event` will be passed to + `post_get_space_event_with_metadata`. + """ + return response, metadata + def pre_get_space_read_state( self, request: space_read_state.GetSpaceReadStateRequest, @@ -645,12 +919,37 @@ def post_get_space_read_state( ) -> space_read_state.SpaceReadState: """Post-rpc interceptor for get_space_read_state - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_space_read_state_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_space_read_state` interceptor runs + before the `post_get_space_read_state_with_metadata` interceptor. """ return response + def post_get_space_read_state_with_metadata( + self, + response: space_read_state.SpaceReadState, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + space_read_state.SpaceReadState, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_space_read_state + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_space_read_state_with_metadata` + interceptor in new development instead of the `post_get_space_read_state` interceptor. + When both interceptors are used, this `post_get_space_read_state_with_metadata` interceptor runs after the + `post_get_space_read_state` interceptor. The (possibly modified) response returned by + `post_get_space_read_state` will be passed to + `post_get_space_read_state_with_metadata`. + """ + return response, metadata + def pre_get_thread_read_state( self, request: thread_read_state.GetThreadReadStateRequest, @@ -671,12 +970,37 @@ def post_get_thread_read_state( ) -> thread_read_state.ThreadReadState: """Post-rpc interceptor for get_thread_read_state - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_thread_read_state_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_get_thread_read_state` interceptor runs + before the `post_get_thread_read_state_with_metadata` interceptor. """ return response + def post_get_thread_read_state_with_metadata( + self, + response: thread_read_state.ThreadReadState, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + thread_read_state.ThreadReadState, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_thread_read_state + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_get_thread_read_state_with_metadata` + interceptor in new development instead of the `post_get_thread_read_state` interceptor. + When both interceptors are used, this `post_get_thread_read_state_with_metadata` interceptor runs after the + `post_get_thread_read_state` interceptor. The (possibly modified) response returned by + `post_get_thread_read_state` will be passed to + `post_get_thread_read_state_with_metadata`. + """ + return response, metadata + def pre_list_memberships( self, request: membership.ListMembershipsRequest, @@ -696,12 +1020,37 @@ def post_list_memberships( ) -> membership.ListMembershipsResponse: """Post-rpc interceptor for list_memberships - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_memberships_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_list_memberships` interceptor runs + before the `post_list_memberships_with_metadata` interceptor. """ return response + def post_list_memberships_with_metadata( + self, + response: membership.ListMembershipsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + membership.ListMembershipsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_memberships + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_memberships_with_metadata` + interceptor in new development instead of the `post_list_memberships` interceptor. + When both interceptors are used, this `post_list_memberships_with_metadata` interceptor runs after the + `post_list_memberships` interceptor. The (possibly modified) response returned by + `post_list_memberships` will be passed to + `post_list_memberships_with_metadata`. + """ + return response, metadata + def pre_list_messages( self, request: message.ListMessagesRequest, @@ -719,12 +1068,35 @@ def post_list_messages( ) -> message.ListMessagesResponse: """Post-rpc interceptor for list_messages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_messages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_list_messages` interceptor runs + before the `post_list_messages_with_metadata` interceptor. """ return response + def post_list_messages_with_metadata( + self, + response: message.ListMessagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[message.ListMessagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_messages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_messages_with_metadata` + interceptor in new development instead of the `post_list_messages` interceptor. + When both interceptors are used, this `post_list_messages_with_metadata` interceptor runs after the + `post_list_messages` interceptor. The (possibly modified) response returned by + `post_list_messages` will be passed to + `post_list_messages_with_metadata`. + """ + return response, metadata + def pre_list_reactions( self, request: reaction.ListReactionsRequest, @@ -742,12 +1114,35 @@ def post_list_reactions( ) -> reaction.ListReactionsResponse: """Post-rpc interceptor for list_reactions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_reactions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_list_reactions` interceptor runs + before the `post_list_reactions_with_metadata` interceptor. """ return response + def post_list_reactions_with_metadata( + self, + response: reaction.ListReactionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reaction.ListReactionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_reactions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_reactions_with_metadata` + interceptor in new development instead of the `post_list_reactions` interceptor. + When both interceptors are used, this `post_list_reactions_with_metadata` interceptor runs after the + `post_list_reactions` interceptor. The (possibly modified) response returned by + `post_list_reactions` will be passed to + `post_list_reactions_with_metadata`. + """ + return response, metadata + def pre_list_space_events( self, request: space_event.ListSpaceEventsRequest, @@ -767,12 +1162,37 @@ def post_list_space_events( ) -> space_event.ListSpaceEventsResponse: """Post-rpc interceptor for list_space_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_space_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_list_space_events` interceptor runs + before the `post_list_space_events_with_metadata` interceptor. """ return response + def post_list_space_events_with_metadata( + self, + response: space_event.ListSpaceEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + space_event.ListSpaceEventsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_space_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_space_events_with_metadata` + interceptor in new development instead of the `post_list_space_events` interceptor. + When both interceptors are used, this `post_list_space_events_with_metadata` interceptor runs after the + `post_list_space_events` interceptor. The (possibly modified) response returned by + `post_list_space_events` will be passed to + `post_list_space_events_with_metadata`. + """ + return response, metadata + def pre_list_spaces( self, request: space.ListSpacesRequest, @@ -790,12 +1210,35 @@ def post_list_spaces( ) -> space.ListSpacesResponse: """Post-rpc interceptor for list_spaces - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_spaces_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_list_spaces` interceptor runs + before the `post_list_spaces_with_metadata` interceptor. """ return response + def post_list_spaces_with_metadata( + self, + response: space.ListSpacesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[space.ListSpacesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_spaces + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_list_spaces_with_metadata` + interceptor in new development instead of the `post_list_spaces` interceptor. + When both interceptors are used, this `post_list_spaces_with_metadata` interceptor runs after the + `post_list_spaces` interceptor. The (possibly modified) response returned by + `post_list_spaces` will be passed to + `post_list_spaces_with_metadata`. + """ + return response, metadata + def pre_search_spaces( self, request: space.SearchSpacesRequest, @@ -813,12 +1256,35 @@ def post_search_spaces( ) -> space.SearchSpacesResponse: """Post-rpc interceptor for search_spaces - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_spaces_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_search_spaces` interceptor runs + before the `post_search_spaces_with_metadata` interceptor. """ return response + def post_search_spaces_with_metadata( + self, + response: space.SearchSpacesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[space.SearchSpacesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_spaces + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_search_spaces_with_metadata` + interceptor in new development instead of the `post_search_spaces` interceptor. + When both interceptors are used, this `post_search_spaces_with_metadata` interceptor runs after the + `post_search_spaces` interceptor. The (possibly modified) response returned by + `post_search_spaces` will be passed to + `post_search_spaces_with_metadata`. + """ + return response, metadata + def pre_set_up_space( self, request: space_setup.SetUpSpaceRequest, @@ -834,12 +1300,33 @@ def pre_set_up_space( def post_set_up_space(self, response: space.Space) -> space.Space: """Post-rpc interceptor for set_up_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_up_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_set_up_space` interceptor runs + before the `post_set_up_space_with_metadata` interceptor. """ return response + def post_set_up_space_with_metadata( + self, response: space.Space, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[space.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_up_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_set_up_space_with_metadata` + interceptor in new development instead of the `post_set_up_space` interceptor. + When both interceptors are used, this `post_set_up_space_with_metadata` interceptor runs after the + `post_set_up_space` interceptor. The (possibly modified) response returned by + `post_set_up_space` will be passed to + `post_set_up_space_with_metadata`. + """ + return response, metadata + def pre_update_membership( self, request: gc_membership.UpdateMembershipRequest, @@ -859,12 +1346,35 @@ def post_update_membership( ) -> gc_membership.Membership: """Post-rpc interceptor for update_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_update_membership` interceptor runs + before the `post_update_membership_with_metadata` interceptor. """ return response + def post_update_membership_with_metadata( + self, + response: gc_membership.Membership, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_membership.Membership, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_update_membership_with_metadata` + interceptor in new development instead of the `post_update_membership` interceptor. + When both interceptors are used, this `post_update_membership_with_metadata` interceptor runs after the + `post_update_membership` interceptor. The (possibly modified) response returned by + `post_update_membership` will be passed to + `post_update_membership_with_metadata`. + """ + return response, metadata + def pre_update_message( self, request: gc_message.UpdateMessageRequest, @@ -882,12 +1392,35 @@ def pre_update_message( def post_update_message(self, response: gc_message.Message) -> gc_message.Message: """Post-rpc interceptor for update_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_update_message` interceptor runs + before the `post_update_message_with_metadata` interceptor. """ return response + def post_update_message_with_metadata( + self, + response: gc_message.Message, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_message.Message, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_update_message_with_metadata` + interceptor in new development instead of the `post_update_message` interceptor. + When both interceptors are used, this `post_update_message_with_metadata` interceptor runs after the + `post_update_message` interceptor. The (possibly modified) response returned by + `post_update_message` will be passed to + `post_update_message_with_metadata`. + """ + return response, metadata + def pre_update_space( self, request: gc_space.UpdateSpaceRequest, @@ -903,12 +1436,35 @@ def pre_update_space( def post_update_space(self, response: gc_space.Space) -> gc_space.Space: """Post-rpc interceptor for update_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_update_space` interceptor runs + before the `post_update_space_with_metadata` interceptor. """ return response + def post_update_space_with_metadata( + self, + response: gc_space.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gc_space.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_update_space_with_metadata` + interceptor in new development instead of the `post_update_space` interceptor. + When both interceptors are used, this `post_update_space_with_metadata` interceptor runs after the + `post_update_space` interceptor. The (possibly modified) response returned by + `post_update_space` will be passed to + `post_update_space_with_metadata`. + """ + return response, metadata + def pre_update_space_read_state( self, request: gc_space_read_state.UpdateSpaceReadStateRequest, @@ -929,12 +1485,37 @@ def post_update_space_read_state( ) -> gc_space_read_state.SpaceReadState: """Post-rpc interceptor for update_space_read_state - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_space_read_state_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_update_space_read_state` interceptor runs + before the `post_update_space_read_state_with_metadata` interceptor. """ return response + def post_update_space_read_state_with_metadata( + self, + response: gc_space_read_state.SpaceReadState, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gc_space_read_state.SpaceReadState, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_space_read_state + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_update_space_read_state_with_metadata` + interceptor in new development instead of the `post_update_space_read_state` interceptor. + When both interceptors are used, this `post_update_space_read_state_with_metadata` interceptor runs after the + `post_update_space_read_state` interceptor. The (possibly modified) response returned by + `post_update_space_read_state` will be passed to + `post_update_space_read_state_with_metadata`. + """ + return response, metadata + def pre_upload_attachment( self, request: attachment.UploadAttachmentRequest, @@ -954,12 +1535,37 @@ def post_upload_attachment( ) -> attachment.UploadAttachmentResponse: """Post-rpc interceptor for upload_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upload_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChatService server but before - it is returned to user code. + it is returned to user code. This `post_upload_attachment` interceptor runs + before the `post_upload_attachment_with_metadata` interceptor. """ return response + def post_upload_attachment_with_metadata( + self, + response: attachment.UploadAttachmentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attachment.UploadAttachmentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for upload_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_upload_attachment_with_metadata` + interceptor in new development instead of the `post_upload_attachment` interceptor. + When both interceptors are used, this `post_upload_attachment_with_metadata` interceptor runs after the + `post_upload_attachment` interceptor. The (possibly modified) response returned by + `post_upload_attachment` will be passed to + `post_upload_attachment_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ChatServiceRestStub: @@ -1176,6 +1782,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_import_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_import_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1331,6 +1941,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1481,6 +2095,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1627,6 +2245,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_reaction(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_reaction_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1784,6 +2406,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1931,6 +2557,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2402,6 +3032,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_find_direct_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_find_direct_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2544,6 +3178,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2691,6 +3329,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2835,6 +3477,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2981,6 +3627,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3128,6 +3778,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_space_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_space_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3274,6 +3928,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_space_read_state(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_space_read_state_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3420,6 +4078,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_thread_read_state(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_thread_read_state_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3567,6 +4229,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_memberships(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_memberships_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3716,6 +4382,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_messages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_messages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3858,6 +4528,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_reactions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_reactions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4003,6 +4677,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_space_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_space_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4152,6 +4830,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_spaces(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_spaces_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4300,6 +4982,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_spaces(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_spaces_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4455,6 +5141,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_up_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_up_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4608,6 +5298,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4758,6 +5452,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_message_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4914,6 +5612,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5066,6 +5768,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_space_read_state(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_space_read_state_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5216,6 +5922,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_upload_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upload_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index d510a888bd89..2c5d4092148d 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -18,6 +18,7 @@ Annotation, AnnotationType, ChatSpaceLinkData, + CustomEmojiMetadata, DriveLinkData, RichLinkMetadata, SlashCommandMetadata, @@ -127,6 +128,7 @@ "ActionStatus", "Annotation", "ChatSpaceLinkData", + "CustomEmojiMetadata", "DriveLinkData", "RichLinkMetadata", "SlashCommandMetadata", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py b/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py index ee3267d68907..36c39de076cb 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/annotation.py @@ -19,7 +19,7 @@ import proto # type: ignore -from google.apps.chat_v1.types import attachment +from google.apps.chat_v1.types import attachment, reaction from google.apps.chat_v1.types import user as gc_user __protobuf__ = proto.module( @@ -30,6 +30,7 @@ "UserMentionMetadata", "SlashCommandMetadata", "RichLinkMetadata", + "CustomEmojiMetadata", "DriveLinkData", "ChatSpaceLinkData", }, @@ -48,11 +49,14 @@ class AnnotationType(proto.Enum): A slash command is invoked. RICH_LINK (3): A rich link annotation. + CUSTOM_EMOJI (4): + A custom emoji annotation. """ ANNOTATION_TYPE_UNSPECIFIED = 0 USER_MENTION = 1 SLASH_COMMAND = 2 RICH_LINK = 3 + CUSTOM_EMOJI = 4 class Annotation(proto.Message): @@ -115,6 +119,10 @@ class Annotation(proto.Message): rich_link_metadata (google.apps.chat_v1.types.RichLinkMetadata): The metadata for a rich link. + This field is a member of `oneof`_ ``metadata``. + custom_emoji_metadata (google.apps.chat_v1.types.CustomEmojiMetadata): + The metadata for a custom emoji. + This field is a member of `oneof`_ ``metadata``. """ @@ -150,6 +158,12 @@ class Annotation(proto.Message): oneof="metadata", message="RichLinkMetadata", ) + custom_emoji_metadata: "CustomEmojiMetadata" = proto.Field( + proto.MESSAGE, + number=7, + oneof="metadata", + message="CustomEmojiMetadata", + ) class UserMentionMetadata(proto.Message): @@ -309,6 +323,21 @@ class RichLinkType(proto.Enum): ) +class CustomEmojiMetadata(proto.Message): + r"""Annotation metadata for custom emoji. + + Attributes: + custom_emoji (google.apps.chat_v1.types.CustomEmoji): + The custom emoji. + """ + + custom_emoji: reaction.CustomEmoji = proto.Field( + proto.MESSAGE, + number=1, + message=reaction.CustomEmoji, + ) + + class DriveLinkData(proto.Message): r"""Data for Google Drive links. diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/reaction.py b/packages/google-apps-chat/google/apps/chat_v1/types/reaction.py index 9b2d3c05c5e0..80a247b89e1d 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/reaction.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/reaction.py @@ -85,7 +85,7 @@ class Emoji(proto.Message): This field is a member of `oneof`_ ``content``. custom_emoji (google.apps.chat_v1.types.CustomEmoji): - Output only. A custom emoji. + A custom emoji. This field is a member of `oneof`_ ``content``. """ diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/slash_command.py b/packages/google-apps-chat/google/apps/chat_v1/types/slash_command.py index 0ded32898f02..726bbb5c9c52 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/slash_command.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/slash_command.py @@ -29,7 +29,7 @@ class SlashCommand(proto.Message): r"""A `slash - command `__ + command `__ in Google Chat. Attributes: diff --git a/packages/google-apps-chat/noxfile.py b/packages/google-apps-chat/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-apps-chat/noxfile.py +++ b/packages/google-apps-chat/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index 0d0edfee7fb2..a066eea48312 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-chat", - "version": "0.2.0" + "version": "0.2.2" }, "snippets": [ { diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index af05fac42229..8583adbfaaae 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -89,6 +89,13 @@ from google.apps.chat_v1.types import space_read_state from google.apps.chat_v1.types import space_read_state as gc_space_read_state +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ChatServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ChatServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -18696,6 +18746,7 @@ def test_create_message_rest_call_success(request_type): "message": "message_value", }, }, + "custom_emoji_metadata": {"custom_emoji": {"uid": "uid_value"}}, } ], "thread": {"name": "name_value", "thread_key": "thread_key_value"}, @@ -18774,10 +18825,7 @@ def test_create_message_rest_call_success(request_type): "client_assigned_message_id": "client_assigned_message_id_value", "emoji_reaction_summaries": [ { - "emoji": { - "unicode": "unicode_value", - "custom_emoji": {"uid": "uid_value"}, - }, + "emoji": {"unicode": "unicode_value", "custom_emoji": {}}, "reaction_count": 1501, } ], @@ -18909,10 +18957,13 @@ def test_create_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_create_message" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_create_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_create_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_message.CreateMessageRequest.pb( gc_message.CreateMessageRequest() ) @@ -18936,6 +18987,7 @@ def test_create_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_message.Message() + post_with_metadata.return_value = gc_message.Message(), metadata client.create_message( request, @@ -18947,6 +18999,7 @@ def test_create_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_messages_rest_bad_request(request_type=message.ListMessagesRequest): @@ -19029,10 +19082,13 @@ def test_list_messages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_list_messages" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_messages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_list_messages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = message.ListMessagesRequest.pb(message.ListMessagesRequest()) transcode.return_value = { "method": "post", @@ -19056,6 +19112,7 @@ def test_list_messages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = message.ListMessagesResponse() + post_with_metadata.return_value = message.ListMessagesResponse(), metadata client.list_messages( request, @@ -19067,6 +19124,7 @@ def test_list_messages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_memberships_rest_bad_request( @@ -19151,10 +19209,13 @@ def test_list_memberships_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_list_memberships" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_memberships_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_list_memberships" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.ListMembershipsRequest.pb( membership.ListMembershipsRequest() ) @@ -19180,6 +19241,7 @@ def test_list_memberships_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.ListMembershipsResponse() + post_with_metadata.return_value = membership.ListMembershipsResponse(), metadata client.list_memberships( request, @@ -19191,6 +19253,7 @@ def test_list_memberships_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_membership_rest_bad_request(request_type=membership.GetMembershipRequest): @@ -19277,10 +19340,13 @@ def test_get_membership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_membership" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.GetMembershipRequest.pb( membership.GetMembershipRequest() ) @@ -19304,6 +19370,7 @@ def test_get_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.Membership() + post_with_metadata.return_value = membership.Membership(), metadata client.get_membership( request, @@ -19315,6 +19382,7 @@ def test_get_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_message_rest_bad_request(request_type=message.GetMessageRequest): @@ -19409,10 +19477,13 @@ def test_get_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_message" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = message.GetMessageRequest.pb(message.GetMessageRequest()) transcode.return_value = { "method": "post", @@ -19434,6 +19505,7 @@ def test_get_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = message.Message() + post_with_metadata.return_value = message.Message(), metadata client.get_message( request, @@ -19445,6 +19517,7 @@ def test_get_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_message_rest_bad_request(request_type=gc_message.UpdateMessageRequest): @@ -19778,6 +19851,7 @@ def test_update_message_rest_call_success(request_type): "message": "message_value", }, }, + "custom_emoji_metadata": {"custom_emoji": {"uid": "uid_value"}}, } ], "thread": {"name": "name_value", "thread_key": "thread_key_value"}, @@ -19856,10 +19930,7 @@ def test_update_message_rest_call_success(request_type): "client_assigned_message_id": "client_assigned_message_id_value", "emoji_reaction_summaries": [ { - "emoji": { - "unicode": "unicode_value", - "custom_emoji": {"uid": "uid_value"}, - }, + "emoji": {"unicode": "unicode_value", "custom_emoji": {}}, "reaction_count": 1501, } ], @@ -19991,10 +20062,13 @@ def test_update_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_update_message" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_update_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_update_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_message.UpdateMessageRequest.pb( gc_message.UpdateMessageRequest() ) @@ -20018,6 +20092,7 @@ def test_update_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_message.Message() + post_with_metadata.return_value = gc_message.Message(), metadata client.update_message( request, @@ -20029,6 +20104,7 @@ def test_update_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_message_rest_bad_request(request_type=message.DeleteMessageRequest): @@ -20226,10 +20302,13 @@ def test_get_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_attachment" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_attachment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attachment.GetAttachmentRequest.pb( attachment.GetAttachmentRequest() ) @@ -20253,6 +20332,7 @@ def test_get_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attachment.Attachment() + post_with_metadata.return_value = attachment.Attachment(), metadata client.get_attachment( request, @@ -20264,6 +20344,7 @@ def test_get_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_upload_attachment_rest_bad_request( @@ -20345,10 +20426,13 @@ def test_upload_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_upload_attachment" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_upload_attachment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_upload_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attachment.UploadAttachmentRequest.pb( attachment.UploadAttachmentRequest() ) @@ -20374,6 +20458,10 @@ def test_upload_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attachment.UploadAttachmentResponse() + post_with_metadata.return_value = ( + attachment.UploadAttachmentResponse(), + metadata, + ) client.upload_attachment( request, @@ -20385,6 +20473,7 @@ def test_upload_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_spaces_rest_bad_request(request_type=space.ListSpacesRequest): @@ -20467,10 +20556,13 @@ def test_list_spaces_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_list_spaces" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_spaces_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_list_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space.ListSpacesRequest.pb(space.ListSpacesRequest()) transcode.return_value = { "method": "post", @@ -20492,6 +20584,7 @@ def test_list_spaces_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space.ListSpacesResponse() + post_with_metadata.return_value = space.ListSpacesResponse(), metadata client.list_spaces( request, @@ -20503,6 +20596,7 @@ def test_list_spaces_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_spaces_rest_bad_request(request_type=space.SearchSpacesRequest): @@ -20587,10 +20681,13 @@ def test_search_spaces_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_search_spaces" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_search_spaces_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_search_spaces" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space.SearchSpacesRequest.pb(space.SearchSpacesRequest()) transcode.return_value = { "method": "post", @@ -20612,6 +20709,7 @@ def test_search_spaces_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space.SearchSpacesResponse() + post_with_metadata.return_value = space.SearchSpacesResponse(), metadata client.search_spaces( request, @@ -20623,6 +20721,7 @@ def test_search_spaces_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_space_rest_bad_request(request_type=space.GetSpaceRequest): @@ -20731,10 +20830,13 @@ def test_get_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_space" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space.GetSpaceRequest.pb(space.GetSpaceRequest()) transcode.return_value = { "method": "post", @@ -20756,6 +20858,7 @@ def test_get_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space.Space() + post_with_metadata.return_value = space.Space(), metadata client.get_space( request, @@ -20767,6 +20870,7 @@ def test_get_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_space_rest_bad_request(request_type=gc_space.CreateSpaceRequest): @@ -20982,10 +21086,13 @@ def test_create_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_create_space" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_create_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_create_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_space.CreateSpaceRequest.pb(gc_space.CreateSpaceRequest()) transcode.return_value = { "method": "post", @@ -21007,6 +21114,7 @@ def test_create_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_space.Space() + post_with_metadata.return_value = gc_space.Space(), metadata client.create_space( request, @@ -21018,6 +21126,7 @@ def test_create_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_up_space_rest_bad_request(request_type=space_setup.SetUpSpaceRequest): @@ -21126,10 +21235,13 @@ def test_set_up_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_set_up_space" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_set_up_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_set_up_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space_setup.SetUpSpaceRequest.pb(space_setup.SetUpSpaceRequest()) transcode.return_value = { "method": "post", @@ -21151,6 +21263,7 @@ def test_set_up_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space.Space() + post_with_metadata.return_value = space.Space(), metadata client.set_up_space( request, @@ -21162,6 +21275,7 @@ def test_set_up_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_space_rest_bad_request(request_type=gc_space.UpdateSpaceRequest): @@ -21377,10 +21491,13 @@ def test_update_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_update_space" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_update_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_update_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_space.UpdateSpaceRequest.pb(gc_space.UpdateSpaceRequest()) transcode.return_value = { "method": "post", @@ -21402,6 +21519,7 @@ def test_update_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_space.Space() + post_with_metadata.return_value = gc_space.Space(), metadata client.update_space( request, @@ -21413,6 +21531,7 @@ def test_update_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_space_rest_bad_request(request_type=space.DeleteSpaceRequest): @@ -21599,10 +21718,14 @@ def test_complete_import_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_complete_import_space" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_complete_import_space_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_complete_import_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space.CompleteImportSpaceRequest.pb( space.CompleteImportSpaceRequest() ) @@ -21628,6 +21751,7 @@ def test_complete_import_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space.CompleteImportSpaceResponse() + post_with_metadata.return_value = space.CompleteImportSpaceResponse(), metadata client.complete_import_space( request, @@ -21639,6 +21763,7 @@ def test_complete_import_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_find_direct_message_rest_bad_request( @@ -21749,10 +21874,13 @@ def test_find_direct_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_find_direct_message" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_find_direct_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_find_direct_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space.FindDirectMessageRequest.pb(space.FindDirectMessageRequest()) transcode.return_value = { "method": "post", @@ -21774,6 +21902,7 @@ def test_find_direct_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space.Space() + post_with_metadata.return_value = space.Space(), metadata client.find_direct_message( request, @@ -21785,6 +21914,7 @@ def test_find_direct_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_membership_rest_bad_request( @@ -21955,10 +22085,13 @@ def test_create_membership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_create_membership" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_create_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_create_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_membership.CreateMembershipRequest.pb( gc_membership.CreateMembershipRequest() ) @@ -21982,6 +22115,7 @@ def test_create_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_membership.Membership() + post_with_metadata.return_value = gc_membership.Membership(), metadata client.create_membership( request, @@ -21993,6 +22127,7 @@ def test_create_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_membership_rest_bad_request( @@ -22163,10 +22298,13 @@ def test_update_membership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_update_membership" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_update_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_update_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_membership.UpdateMembershipRequest.pb( gc_membership.UpdateMembershipRequest() ) @@ -22190,6 +22328,7 @@ def test_update_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_membership.Membership() + post_with_metadata.return_value = gc_membership.Membership(), metadata client.update_membership( request, @@ -22201,6 +22340,7 @@ def test_update_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_membership_rest_bad_request( @@ -22289,10 +22429,13 @@ def test_delete_membership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_delete_membership" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_delete_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_delete_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.DeleteMembershipRequest.pb( membership.DeleteMembershipRequest() ) @@ -22316,6 +22459,7 @@ def test_delete_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.Membership() + post_with_metadata.return_value = membership.Membership(), metadata client.delete_membership( request, @@ -22327,6 +22471,7 @@ def test_delete_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_reaction_rest_bad_request( @@ -22489,10 +22634,13 @@ def test_create_reaction_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_create_reaction" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_create_reaction_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_create_reaction" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_reaction.CreateReactionRequest.pb( gc_reaction.CreateReactionRequest() ) @@ -22516,6 +22664,7 @@ def test_create_reaction_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_reaction.Reaction() + post_with_metadata.return_value = gc_reaction.Reaction(), metadata client.create_reaction( request, @@ -22527,6 +22676,7 @@ def test_create_reaction_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_reactions_rest_bad_request(request_type=reaction.ListReactionsRequest): @@ -22609,10 +22759,13 @@ def test_list_reactions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_list_reactions" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_reactions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_list_reactions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reaction.ListReactionsRequest.pb(reaction.ListReactionsRequest()) transcode.return_value = { "method": "post", @@ -22636,6 +22789,7 @@ def test_list_reactions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reaction.ListReactionsResponse() + post_with_metadata.return_value = reaction.ListReactionsResponse(), metadata client.list_reactions( request, @@ -22647,6 +22801,7 @@ def test_list_reactions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_reaction_rest_bad_request(request_type=reaction.DeleteReactionRequest): @@ -22836,10 +22991,13 @@ def test_get_space_read_state_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_space_read_state" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_space_read_state_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_space_read_state" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space_read_state.GetSpaceReadStateRequest.pb( space_read_state.GetSpaceReadStateRequest() ) @@ -22865,6 +23023,7 @@ def test_get_space_read_state_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space_read_state.SpaceReadState() + post_with_metadata.return_value = space_read_state.SpaceReadState(), metadata client.get_space_read_state( request, @@ -22876,6 +23035,7 @@ def test_get_space_read_state_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_space_read_state_rest_bad_request( @@ -23037,10 +23197,14 @@ def test_update_space_read_state_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_update_space_read_state" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_update_space_read_state_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_update_space_read_state" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gc_space_read_state.UpdateSpaceReadStateRequest.pb( gc_space_read_state.UpdateSpaceReadStateRequest() ) @@ -23066,6 +23230,7 @@ def test_update_space_read_state_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gc_space_read_state.SpaceReadState() + post_with_metadata.return_value = gc_space_read_state.SpaceReadState(), metadata client.update_space_read_state( request, @@ -23077,6 +23242,7 @@ def test_update_space_read_state_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_thread_read_state_rest_bad_request( @@ -23165,10 +23331,14 @@ def test_get_thread_read_state_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_thread_read_state" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, + "post_get_thread_read_state_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_thread_read_state" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = thread_read_state.GetThreadReadStateRequest.pb( thread_read_state.GetThreadReadStateRequest() ) @@ -23194,6 +23364,7 @@ def test_get_thread_read_state_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = thread_read_state.ThreadReadState() + post_with_metadata.return_value = thread_read_state.ThreadReadState(), metadata client.get_thread_read_state( request, @@ -23205,6 +23376,7 @@ def test_get_thread_read_state_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_space_event_rest_bad_request( @@ -23291,10 +23463,13 @@ def test_get_space_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_get_space_event" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_get_space_event_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_get_space_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space_event.GetSpaceEventRequest.pb( space_event.GetSpaceEventRequest() ) @@ -23318,6 +23493,7 @@ def test_get_space_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space_event.SpaceEvent() + post_with_metadata.return_value = space_event.SpaceEvent(), metadata client.get_space_event( request, @@ -23329,6 +23505,7 @@ def test_get_space_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_space_events_rest_bad_request( @@ -23413,10 +23590,13 @@ def test_list_space_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChatServiceRestInterceptor, "post_list_space_events" ) as post, mock.patch.object( + transports.ChatServiceRestInterceptor, "post_list_space_events_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChatServiceRestInterceptor, "pre_list_space_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = space_event.ListSpaceEventsRequest.pb( space_event.ListSpaceEventsRequest() ) @@ -23442,6 +23622,10 @@ def test_list_space_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = space_event.ListSpaceEventsResponse() + post_with_metadata.return_value = ( + space_event.ListSpaceEventsResponse(), + metadata, + ) client.list_space_events( request, @@ -23453,6 +23637,7 @@ def test_list_space_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-apps-events-subscriptions/CHANGELOG.md b/packages/google-apps-events-subscriptions/CHANGELOG.md index a68302ffe5bf..191b74a826b5 100644 --- a/packages/google-apps-events-subscriptions/CHANGELOG.md +++ b/packages/google-apps-events-subscriptions/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-apps-events-subscriptions-v0.1.5...google-apps-events-subscriptions-v0.1.6) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-apps-events-subscriptions-v0.1.4...google-apps-events-subscriptions-v0.1.5) (2024-12-12) diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py index 578243a08b4a..1e5f218568f4 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -522,6 +524,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1576,16 +1605,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/transports/rest.py b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/transports/rest.py index 4bf33555dab4..58cdb80af37c 100644 --- a/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/transports/rest.py +++ b/packages/google-apps-events-subscriptions/google/apps/events_subscriptions_v1/services/subscriptions_service/transports/rest.py @@ -146,12 +146,35 @@ def post_create_subscription( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SubscriptionsService server but before - it is returned to user code. + it is returned to user code. This `post_create_subscription` interceptor runs + before the `post_create_subscription_with_metadata` interceptor. """ return response + def post_create_subscription_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SubscriptionsService server but before it is returned to user code. + + We recommend only using this `post_create_subscription_with_metadata` + interceptor in new development instead of the `post_create_subscription` interceptor. + When both interceptors are used, this `post_create_subscription_with_metadata` interceptor runs after the + `post_create_subscription` interceptor. The (possibly modified) response returned by + `post_create_subscription` will be passed to + `post_create_subscription_with_metadata`. + """ + return response, metadata + def pre_delete_subscription( self, request: subscriptions_service.DeleteSubscriptionRequest, @@ -172,12 +195,35 @@ def post_delete_subscription( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SubscriptionsService server but before - it is returned to user code. + it is returned to user code. This `post_delete_subscription` interceptor runs + before the `post_delete_subscription_with_metadata` interceptor. """ return response + def post_delete_subscription_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SubscriptionsService server but before it is returned to user code. + + We recommend only using this `post_delete_subscription_with_metadata` + interceptor in new development instead of the `post_delete_subscription` interceptor. + When both interceptors are used, this `post_delete_subscription_with_metadata` interceptor runs after the + `post_delete_subscription` interceptor. The (possibly modified) response returned by + `post_delete_subscription` will be passed to + `post_delete_subscription_with_metadata`. + """ + return response, metadata + def pre_get_subscription( self, request: subscriptions_service.GetSubscriptionRequest, @@ -198,12 +244,37 @@ def post_get_subscription( ) -> subscription_resource.Subscription: """Post-rpc interceptor for get_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SubscriptionsService server but before - it is returned to user code. + it is returned to user code. This `post_get_subscription` interceptor runs + before the `post_get_subscription_with_metadata` interceptor. """ return response + def post_get_subscription_with_metadata( + self, + response: subscription_resource.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + subscription_resource.Subscription, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SubscriptionsService server but before it is returned to user code. + + We recommend only using this `post_get_subscription_with_metadata` + interceptor in new development instead of the `post_get_subscription` interceptor. + When both interceptors are used, this `post_get_subscription_with_metadata` interceptor runs after the + `post_get_subscription` interceptor. The (possibly modified) response returned by + `post_get_subscription` will be passed to + `post_get_subscription_with_metadata`. + """ + return response, metadata + def pre_list_subscriptions( self, request: subscriptions_service.ListSubscriptionsRequest, @@ -224,12 +295,38 @@ def post_list_subscriptions( ) -> subscriptions_service.ListSubscriptionsResponse: """Post-rpc interceptor for list_subscriptions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_subscriptions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SubscriptionsService server but before - it is returned to user code. + it is returned to user code. This `post_list_subscriptions` interceptor runs + before the `post_list_subscriptions_with_metadata` interceptor. """ return response + def post_list_subscriptions_with_metadata( + self, + response: subscriptions_service.ListSubscriptionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + subscriptions_service.ListSubscriptionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_subscriptions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SubscriptionsService server but before it is returned to user code. + + We recommend only using this `post_list_subscriptions_with_metadata` + interceptor in new development instead of the `post_list_subscriptions` interceptor. + When both interceptors are used, this `post_list_subscriptions_with_metadata` interceptor runs after the + `post_list_subscriptions` interceptor. The (possibly modified) response returned by + `post_list_subscriptions` will be passed to + `post_list_subscriptions_with_metadata`. + """ + return response, metadata + def pre_reactivate_subscription( self, request: subscriptions_service.ReactivateSubscriptionRequest, @@ -250,12 +347,35 @@ def post_reactivate_subscription( ) -> operations_pb2.Operation: """Post-rpc interceptor for reactivate_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reactivate_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SubscriptionsService server but before - it is returned to user code. + it is returned to user code. This `post_reactivate_subscription` interceptor runs + before the `post_reactivate_subscription_with_metadata` interceptor. """ return response + def post_reactivate_subscription_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reactivate_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SubscriptionsService server but before it is returned to user code. + + We recommend only using this `post_reactivate_subscription_with_metadata` + interceptor in new development instead of the `post_reactivate_subscription` interceptor. + When both interceptors are used, this `post_reactivate_subscription_with_metadata` interceptor runs after the + `post_reactivate_subscription` interceptor. The (possibly modified) response returned by + `post_reactivate_subscription` will be passed to + `post_reactivate_subscription_with_metadata`. + """ + return response, metadata + def pre_update_subscription( self, request: subscriptions_service.UpdateSubscriptionRequest, @@ -276,12 +396,35 @@ def post_update_subscription( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SubscriptionsService server but before - it is returned to user code. + it is returned to user code. This `post_update_subscription` interceptor runs + before the `post_update_subscription_with_metadata` interceptor. """ return response + def post_update_subscription_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SubscriptionsService server but before it is returned to user code. + + We recommend only using this `post_update_subscription_with_metadata` + interceptor in new development instead of the `post_update_subscription` interceptor. + When both interceptors are used, this `post_update_subscription_with_metadata` interceptor runs after the + `post_update_subscription` interceptor. The (possibly modified) response returned by + `post_update_subscription` will be passed to + `post_update_subscription_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -560,6 +703,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_subscription_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -707,6 +854,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_subscription_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -855,6 +1006,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_subscription_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1005,6 +1160,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_subscriptions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_subscriptions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1162,6 +1321,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reactivate_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reactivate_subscription_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1315,6 +1478,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_subscription_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json index 521aa116d51f..b93c9b9c9a31 100644 --- a/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json +++ b/packages/google-apps-events-subscriptions/samples/generated_samples/snippet_metadata_google.apps.events.subscriptions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-events-subscriptions", - "version": "0.1.5" + "version": "0.1.6" }, "snippets": [ { diff --git a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py index 639f5130b6ec..207f575b3738 100644 --- a/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py +++ b/packages/google-apps-events-subscriptions/tests/unit/gapic/events_subscriptions_v1/test_subscriptions_service.py @@ -77,6 +77,13 @@ subscriptions_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -350,6 +357,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SubscriptionsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SubscriptionsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5145,10 +5195,14 @@ def test_create_subscription_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "post_create_subscription" ) as post, mock.patch.object( + transports.SubscriptionsServiceRestInterceptor, + "post_create_subscription_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "pre_create_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = subscriptions_service.CreateSubscriptionRequest.pb( subscriptions_service.CreateSubscriptionRequest() ) @@ -5172,6 +5226,7 @@ def test_create_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_subscription( request, @@ -5183,6 +5238,7 @@ def test_create_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_subscription_rest_bad_request( @@ -5263,10 +5319,14 @@ def test_delete_subscription_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "post_delete_subscription" ) as post, mock.patch.object( + transports.SubscriptionsServiceRestInterceptor, + "post_delete_subscription_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "pre_delete_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = subscriptions_service.DeleteSubscriptionRequest.pb( subscriptions_service.DeleteSubscriptionRequest() ) @@ -5290,6 +5350,7 @@ def test_delete_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_subscription( request, @@ -5301,6 +5362,7 @@ def test_delete_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_subscription_rest_bad_request( @@ -5404,10 +5466,14 @@ def test_get_subscription_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "post_get_subscription" ) as post, mock.patch.object( + transports.SubscriptionsServiceRestInterceptor, + "post_get_subscription_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "pre_get_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = subscriptions_service.GetSubscriptionRequest.pb( subscriptions_service.GetSubscriptionRequest() ) @@ -5433,6 +5499,7 @@ def test_get_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = subscription_resource.Subscription() + post_with_metadata.return_value = subscription_resource.Subscription(), metadata client.get_subscription( request, @@ -5444,6 +5511,7 @@ def test_get_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_subscriptions_rest_bad_request( @@ -5528,10 +5596,14 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "post_list_subscriptions" ) as post, mock.patch.object( + transports.SubscriptionsServiceRestInterceptor, + "post_list_subscriptions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "pre_list_subscriptions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = subscriptions_service.ListSubscriptionsRequest.pb( subscriptions_service.ListSubscriptionsRequest() ) @@ -5557,6 +5629,10 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = subscriptions_service.ListSubscriptionsResponse() + post_with_metadata.return_value = ( + subscriptions_service.ListSubscriptionsResponse(), + metadata, + ) client.list_subscriptions( request, @@ -5568,6 +5644,7 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_subscription_rest_bad_request( @@ -5737,10 +5814,14 @@ def test_update_subscription_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "post_update_subscription" ) as post, mock.patch.object( + transports.SubscriptionsServiceRestInterceptor, + "post_update_subscription_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "pre_update_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = subscriptions_service.UpdateSubscriptionRequest.pb( subscriptions_service.UpdateSubscriptionRequest() ) @@ -5764,6 +5845,7 @@ def test_update_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_subscription( request, @@ -5775,6 +5857,7 @@ def test_update_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reactivate_subscription_rest_bad_request( @@ -5855,10 +5938,14 @@ def test_reactivate_subscription_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "post_reactivate_subscription" ) as post, mock.patch.object( + transports.SubscriptionsServiceRestInterceptor, + "post_reactivate_subscription_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubscriptionsServiceRestInterceptor, "pre_reactivate_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = subscriptions_service.ReactivateSubscriptionRequest.pb( subscriptions_service.ReactivateSubscriptionRequest() ) @@ -5882,6 +5969,7 @@ def test_reactivate_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reactivate_subscription( request, @@ -5893,6 +5981,7 @@ def test_reactivate_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-apps-meet/CHANGELOG.md b/packages/google-apps-meet/CHANGELOG.md index 561f2a2c99ba..a93721bbdf54 100644 --- a/packages/google-apps-meet/CHANGELOG.md +++ b/packages/google-apps-meet/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [0.1.13](https://github.com/googleapis/google-cloud-python/compare/google-apps-meet-v0.1.12...google-apps-meet-v0.1.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + +## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-apps-meet-v0.1.11...google-apps-meet-v0.1.12) (2025-01-29) + + +### Features + +* Add methods for configuring meeting spaces and members (https://developers.google.com/meet/api/guides/beta/configuration-beta) ([7b23175](https://github.com/googleapis/google-cloud-python/commit/7b23175303cfd26145f3d00bbbd8defe06ae2090)) +* Add new OAuth scope `https://www.googleapis.com/auth/meetings.space.settings` to service `SpacesService` ([7b23175](https://github.com/googleapis/google-cloud-python/commit/7b23175303cfd26145f3d00bbbd8defe06ae2090)) + + +### Documentation + +* Improve docs for `GetSpaceRequest`, `EndActiveConferenceRequest`, `ListConferenceRecordsRequest` ([7b23175](https://github.com/googleapis/google-cloud-python/commit/7b23175303cfd26145f3d00bbbd8defe06ae2090)) +* improve docs for GetSpaceRequest, EndActiveConferenceRequest, ListConferenceRecordsRequest ([7b23175](https://github.com/googleapis/google-cloud-python/commit/7b23175303cfd26145f3d00bbbd8defe06ae2090)) +* Remove *Developer Preview* label from methods that are now generally available ([7b23175](https://github.com/googleapis/google-cloud-python/commit/7b23175303cfd26145f3d00bbbd8defe06ae2090)) + ## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-meet-v0.1.10...google-apps-meet-v0.1.11) (2024-12-12) diff --git a/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst b/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst index 3e799ddd3d66..a3db8f168df8 100644 --- a/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst +++ b/packages/google-apps-meet/docs/meet_v2beta/spaces_service.rst @@ -4,3 +4,7 @@ SpacesService .. automodule:: google.apps.meet_v2beta.services.spaces_service :members: :inherited-members: + +.. automodule:: google.apps.meet_v2beta.services.spaces_service.pagers + :members: + :inherited-members: diff --git a/packages/google-apps-meet/google/apps/meet/gapic_version.py b/packages/google-apps-meet/google/apps/meet/gapic_version.py index 4b834789ba9e..7daf9a1dd221 100644 --- a/packages/google-apps-meet/google/apps/meet/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py index 4b834789ba9e..7daf9a1dd221 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py index 35bd560991ad..875009ac9f4c 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -602,6 +604,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/transports/rest.py b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/transports/rest.py index 4ed223265adf..ebf5f73fc856 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/transports/rest.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/conference_records_service/transports/rest.py @@ -189,12 +189,35 @@ def post_get_conference_record( ) -> resource.ConferenceRecord: """Post-rpc interceptor for get_conference_record - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conference_record_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_conference_record` interceptor runs + before the `post_get_conference_record_with_metadata` interceptor. """ return response + def post_get_conference_record_with_metadata( + self, + response: resource.ConferenceRecord, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.ConferenceRecord, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conference_record + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_conference_record_with_metadata` + interceptor in new development instead of the `post_get_conference_record` interceptor. + When both interceptors are used, this `post_get_conference_record_with_metadata` interceptor runs after the + `post_get_conference_record` interceptor. The (possibly modified) response returned by + `post_get_conference_record` will be passed to + `post_get_conference_record_with_metadata`. + """ + return response, metadata + def pre_get_participant( self, request: service.GetParticipantRequest, @@ -212,12 +235,35 @@ def post_get_participant( ) -> resource.Participant: """Post-rpc interceptor for get_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_participant` interceptor runs + before the `post_get_participant_with_metadata` interceptor. """ return response + def post_get_participant_with_metadata( + self, + response: resource.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_participant_with_metadata` + interceptor in new development instead of the `post_get_participant` interceptor. + When both interceptors are used, this `post_get_participant_with_metadata` interceptor runs after the + `post_get_participant` interceptor. The (possibly modified) response returned by + `post_get_participant` will be passed to + `post_get_participant_with_metadata`. + """ + return response, metadata + def pre_get_participant_session( self, request: service.GetParticipantSessionRequest, @@ -237,12 +283,35 @@ def post_get_participant_session( ) -> resource.ParticipantSession: """Post-rpc interceptor for get_participant_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_participant_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_participant_session` interceptor runs + before the `post_get_participant_session_with_metadata` interceptor. """ return response + def post_get_participant_session_with_metadata( + self, + response: resource.ParticipantSession, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.ParticipantSession, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_participant_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_participant_session_with_metadata` + interceptor in new development instead of the `post_get_participant_session` interceptor. + When both interceptors are used, this `post_get_participant_session_with_metadata` interceptor runs after the + `post_get_participant_session` interceptor. The (possibly modified) response returned by + `post_get_participant_session` will be passed to + `post_get_participant_session_with_metadata`. + """ + return response, metadata + def pre_get_recording( self, request: service.GetRecordingRequest, @@ -258,12 +327,35 @@ def pre_get_recording( def post_get_recording(self, response: resource.Recording) -> resource.Recording: """Post-rpc interceptor for get_recording - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_recording_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_recording` interceptor runs + before the `post_get_recording_with_metadata` interceptor. """ return response + def post_get_recording_with_metadata( + self, + response: resource.Recording, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Recording, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_recording + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_recording_with_metadata` + interceptor in new development instead of the `post_get_recording` interceptor. + When both interceptors are used, this `post_get_recording_with_metadata` interceptor runs after the + `post_get_recording` interceptor. The (possibly modified) response returned by + `post_get_recording` will be passed to + `post_get_recording_with_metadata`. + """ + return response, metadata + def pre_get_transcript( self, request: service.GetTranscriptRequest, @@ -279,12 +371,35 @@ def pre_get_transcript( def post_get_transcript(self, response: resource.Transcript) -> resource.Transcript: """Post-rpc interceptor for get_transcript - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transcript_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_transcript` interceptor runs + before the `post_get_transcript_with_metadata` interceptor. """ return response + def post_get_transcript_with_metadata( + self, + response: resource.Transcript, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Transcript, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_transcript + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_transcript_with_metadata` + interceptor in new development instead of the `post_get_transcript` interceptor. + When both interceptors are used, this `post_get_transcript_with_metadata` interceptor runs after the + `post_get_transcript` interceptor. The (possibly modified) response returned by + `post_get_transcript` will be passed to + `post_get_transcript_with_metadata`. + """ + return response, metadata + def pre_get_transcript_entry( self, request: service.GetTranscriptEntryRequest, @@ -304,12 +419,35 @@ def post_get_transcript_entry( ) -> resource.TranscriptEntry: """Post-rpc interceptor for get_transcript_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transcript_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_transcript_entry` interceptor runs + before the `post_get_transcript_entry_with_metadata` interceptor. """ return response + def post_get_transcript_entry_with_metadata( + self, + response: resource.TranscriptEntry, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.TranscriptEntry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_transcript_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_transcript_entry_with_metadata` + interceptor in new development instead of the `post_get_transcript_entry` interceptor. + When both interceptors are used, this `post_get_transcript_entry_with_metadata` interceptor runs after the + `post_get_transcript_entry` interceptor. The (possibly modified) response returned by + `post_get_transcript_entry` will be passed to + `post_get_transcript_entry_with_metadata`. + """ + return response, metadata + def pre_list_conference_records( self, request: service.ListConferenceRecordsRequest, @@ -329,12 +467,37 @@ def post_list_conference_records( ) -> service.ListConferenceRecordsResponse: """Post-rpc interceptor for list_conference_records - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conference_records_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_conference_records` interceptor runs + before the `post_list_conference_records_with_metadata` interceptor. """ return response + def post_list_conference_records_with_metadata( + self, + response: service.ListConferenceRecordsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListConferenceRecordsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_conference_records + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_conference_records_with_metadata` + interceptor in new development instead of the `post_list_conference_records` interceptor. + When both interceptors are used, this `post_list_conference_records_with_metadata` interceptor runs after the + `post_list_conference_records` interceptor. The (possibly modified) response returned by + `post_list_conference_records` will be passed to + `post_list_conference_records_with_metadata`. + """ + return response, metadata + def pre_list_participants( self, request: service.ListParticipantsRequest, @@ -354,12 +517,37 @@ def post_list_participants( ) -> service.ListParticipantsResponse: """Post-rpc interceptor for list_participants - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_participants_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_participants` interceptor runs + before the `post_list_participants_with_metadata` interceptor. """ return response + def post_list_participants_with_metadata( + self, + response: service.ListParticipantsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListParticipantsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_participants + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_participants_with_metadata` + interceptor in new development instead of the `post_list_participants` interceptor. + When both interceptors are used, this `post_list_participants_with_metadata` interceptor runs after the + `post_list_participants` interceptor. The (possibly modified) response returned by + `post_list_participants` will be passed to + `post_list_participants_with_metadata`. + """ + return response, metadata + def pre_list_participant_sessions( self, request: service.ListParticipantSessionsRequest, @@ -379,12 +567,37 @@ def post_list_participant_sessions( ) -> service.ListParticipantSessionsResponse: """Post-rpc interceptor for list_participant_sessions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_participant_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_participant_sessions` interceptor runs + before the `post_list_participant_sessions_with_metadata` interceptor. """ return response + def post_list_participant_sessions_with_metadata( + self, + response: service.ListParticipantSessionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListParticipantSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_participant_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_participant_sessions_with_metadata` + interceptor in new development instead of the `post_list_participant_sessions` interceptor. + When both interceptors are used, this `post_list_participant_sessions_with_metadata` interceptor runs after the + `post_list_participant_sessions` interceptor. The (possibly modified) response returned by + `post_list_participant_sessions` will be passed to + `post_list_participant_sessions_with_metadata`. + """ + return response, metadata + def pre_list_recordings( self, request: service.ListRecordingsRequest, @@ -402,12 +615,35 @@ def post_list_recordings( ) -> service.ListRecordingsResponse: """Post-rpc interceptor for list_recordings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_recordings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_recordings` interceptor runs + before the `post_list_recordings_with_metadata` interceptor. """ return response + def post_list_recordings_with_metadata( + self, + response: service.ListRecordingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListRecordingsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_recordings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_recordings_with_metadata` + interceptor in new development instead of the `post_list_recordings` interceptor. + When both interceptors are used, this `post_list_recordings_with_metadata` interceptor runs after the + `post_list_recordings` interceptor. The (possibly modified) response returned by + `post_list_recordings` will be passed to + `post_list_recordings_with_metadata`. + """ + return response, metadata + def pre_list_transcript_entries( self, request: service.ListTranscriptEntriesRequest, @@ -427,12 +663,37 @@ def post_list_transcript_entries( ) -> service.ListTranscriptEntriesResponse: """Post-rpc interceptor for list_transcript_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transcript_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_transcript_entries` interceptor runs + before the `post_list_transcript_entries_with_metadata` interceptor. """ return response + def post_list_transcript_entries_with_metadata( + self, + response: service.ListTranscriptEntriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListTranscriptEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_transcript_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_transcript_entries_with_metadata` + interceptor in new development instead of the `post_list_transcript_entries` interceptor. + When both interceptors are used, this `post_list_transcript_entries_with_metadata` interceptor runs after the + `post_list_transcript_entries` interceptor. The (possibly modified) response returned by + `post_list_transcript_entries` will be passed to + `post_list_transcript_entries_with_metadata`. + """ + return response, metadata + def pre_list_transcripts( self, request: service.ListTranscriptsRequest, @@ -450,12 +711,37 @@ def post_list_transcripts( ) -> service.ListTranscriptsResponse: """Post-rpc interceptor for list_transcripts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transcripts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_transcripts` interceptor runs + before the `post_list_transcripts_with_metadata` interceptor. """ return response + def post_list_transcripts_with_metadata( + self, + response: service.ListTranscriptsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListTranscriptsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_transcripts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_transcripts_with_metadata` + interceptor in new development instead of the `post_list_transcripts` interceptor. + When both interceptors are used, this `post_list_transcripts_with_metadata` interceptor runs after the + `post_list_transcripts` interceptor. The (possibly modified) response returned by + `post_list_transcripts` will be passed to + `post_list_transcripts_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ConferenceRecordsServiceRestStub: @@ -665,6 +951,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conference_record(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conference_record_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -810,6 +1100,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -962,6 +1256,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_participant_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_participant_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1108,6 +1406,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_recording(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_recording_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1255,6 +1557,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transcript(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transcript_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1403,6 +1709,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transcript_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transcript_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1549,6 +1859,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conference_records(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conference_records_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1697,6 +2011,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_participants(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_participants_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1844,6 +2162,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_participant_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_participant_sessions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1989,6 +2311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_recordings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_recordings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2135,6 +2461,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transcript_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transcript_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2282,6 +2612,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transcripts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transcripts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py index ae4d3a81f6fd..8c6ed5c7b436 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/async_client.py @@ -403,7 +403,10 @@ async def get_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""Gets a space by ``space_id`` or ``meeting_code``. + r"""Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. .. code-block:: python @@ -436,6 +439,29 @@ async def sample_get_space(): The request object. Request to get a space. name (:class:`str`): Required. Resource name of the space. + + Format: ``spaces/{space}`` or ``spaces/{meetingCode}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + ``{meetingCode}`` is an alias for the space. It's a + typeable, unique character string and is non-case + sensitive. For example, ``abc-mnop-xyz``. The maximum + length is 128 characters. + + A ``meetingCode`` shouldn't be stored long term as it + can become dissociated from a meeting space and can be + reused for different meeting spaces in the future. + Generally, a ``meetingCode`` expires 365 days after last + use. For more information, see `Learn about meeting + codes in Google + Meet `__. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -510,7 +536,10 @@ async def update_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""Updates a space. + r"""Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. .. code-block:: python @@ -547,9 +576,11 @@ async def sample_update_space(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. Field mask used to specify the fields to be - updated in the space. If update_mask isn't provided, it - defaults to '*' and updates all fields provided in the - request, including deleting fields not set in the + updated in the space. If update_mask isn't provided(not + set, set with empty paths, or only has "" as paths), it + defaults to update all fields provided with values in + the request. Using "*" as update_mask will update all + fields, including deleting fields not set in the request. This corresponds to the ``update_mask`` field @@ -631,6 +662,9 @@ async def end_active_conference( ) -> None: r"""Ends an active conference (if there's one). + For an example, see `End active + conference `__. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -660,6 +694,16 @@ async def sample_end_active_conference(): of a space. name (:class:`str`): Required. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py index 0f1a108b9637..74463bc83da7 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -796,7 +825,10 @@ def get_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""Gets a space by ``space_id`` or ``meeting_code``. + r"""Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. .. code-block:: python @@ -829,6 +861,29 @@ def sample_get_space(): The request object. Request to get a space. name (str): Required. Resource name of the space. + + Format: ``spaces/{space}`` or ``spaces/{meetingCode}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + ``{meetingCode}`` is an alias for the space. It's a + typeable, unique character string and is non-case + sensitive. For example, ``abc-mnop-xyz``. The maximum + length is 128 characters. + + A ``meetingCode`` shouldn't be stored long term as it + can become dissociated from a meeting space and can be + reused for different meeting spaces in the future. + Generally, a ``meetingCode`` expires 365 days after last + use. For more information, see `Learn about meeting + codes in Google + Meet `__. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -900,7 +955,10 @@ def update_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""Updates a space. + r"""Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. .. code-block:: python @@ -937,9 +995,11 @@ def sample_update_space(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask used to specify the fields to be - updated in the space. If update_mask isn't provided, it - defaults to '*' and updates all fields provided in the - request, including deleting fields not set in the + updated in the space. If update_mask isn't provided(not + set, set with empty paths, or only has "" as paths), it + defaults to update all fields provided with values in + the request. Using "*" as update_mask will update all + fields, including deleting fields not set in the request. This corresponds to the ``update_mask`` field @@ -1018,6 +1078,9 @@ def end_active_conference( ) -> None: r"""Ends an active conference (if there's one). + For an example, see `End active + conference `__. + .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1047,6 +1110,16 @@ def sample_end_active_conference(): of a space. name (str): Required. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/base.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/base.py index 571ff4192a03..b116cff76468 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/base.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/base.py @@ -39,6 +39,7 @@ class SpacesServiceTransport(abc.ABC): AUTH_SCOPES = ( "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", ) DEFAULT_HOST: str = "meet.googleapis.com" diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc.py index 104bc6e1b50d..cad85152f698 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc.py @@ -349,7 +349,10 @@ def create_space(self) -> Callable[[service.CreateSpaceRequest], resource.Space] def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: r"""Return a callable for the get space method over gRPC. - Gets a space by ``space_id`` or ``meeting_code``. + Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. Returns: Callable[[~.GetSpaceRequest], @@ -373,7 +376,10 @@ def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: def update_space(self) -> Callable[[service.UpdateSpaceRequest], resource.Space]: r"""Return a callable for the update space method over gRPC. - Updates a space. + Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. Returns: Callable[[~.UpdateSpaceRequest], @@ -401,6 +407,9 @@ def end_active_conference( Ends an active conference (if there's one). + For an example, see `End active + conference `__. + Returns: Callable[[~.EndActiveConferenceRequest], ~.Empty]: diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc_asyncio.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc_asyncio.py index 5bdc968f2bb4..d67acb468854 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc_asyncio.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/grpc_asyncio.py @@ -360,7 +360,10 @@ def get_space( ) -> Callable[[service.GetSpaceRequest], Awaitable[resource.Space]]: r"""Return a callable for the get space method over gRPC. - Gets a space by ``space_id`` or ``meeting_code``. + Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. Returns: Callable[[~.GetSpaceRequest], @@ -386,7 +389,10 @@ def update_space( ) -> Callable[[service.UpdateSpaceRequest], Awaitable[resource.Space]]: r"""Return a callable for the update space method over gRPC. - Updates a space. + Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. Returns: Callable[[~.UpdateSpaceRequest], @@ -414,6 +420,9 @@ def end_active_conference( Ends an active conference (if there's one). + For an example, see `End active + conference `__. + Returns: Callable[[~.EndActiveConferenceRequest], Awaitable[~.Empty]]: diff --git a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/rest.py b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/rest.py index 5ffeecf68bd7..b18d4cd7f824 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/rest.py +++ b/packages/google-apps-meet/google/apps/meet_v2/services/spaces_service/transports/rest.py @@ -118,12 +118,35 @@ def pre_create_space( def post_create_space(self, response: resource.Space) -> resource.Space: """Post-rpc interceptor for create_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SpacesService server but before - it is returned to user code. + it is returned to user code. This `post_create_space` interceptor runs + before the `post_create_space_with_metadata` interceptor. """ return response + def post_create_space_with_metadata( + self, + response: resource.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_create_space_with_metadata` + interceptor in new development instead of the `post_create_space` interceptor. + When both interceptors are used, this `post_create_space_with_metadata` interceptor runs after the + `post_create_space` interceptor. The (possibly modified) response returned by + `post_create_space` will be passed to + `post_create_space_with_metadata`. + """ + return response, metadata + def pre_end_active_conference( self, request: service.EndActiveConferenceRequest, @@ -153,12 +176,35 @@ def pre_get_space( def post_get_space(self, response: resource.Space) -> resource.Space: """Post-rpc interceptor for get_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SpacesService server but before - it is returned to user code. + it is returned to user code. This `post_get_space` interceptor runs + before the `post_get_space_with_metadata` interceptor. """ return response + def post_get_space_with_metadata( + self, + response: resource.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_get_space_with_metadata` + interceptor in new development instead of the `post_get_space` interceptor. + When both interceptors are used, this `post_get_space_with_metadata` interceptor runs after the + `post_get_space` interceptor. The (possibly modified) response returned by + `post_get_space` will be passed to + `post_get_space_with_metadata`. + """ + return response, metadata + def pre_update_space( self, request: service.UpdateSpaceRequest, @@ -174,12 +220,35 @@ def pre_update_space( def post_update_space(self, response: resource.Space) -> resource.Space: """Post-rpc interceptor for update_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SpacesService server but before - it is returned to user code. + it is returned to user code. This `post_update_space` interceptor runs + before the `post_update_space_with_metadata` interceptor. """ return response + def post_update_space_with_metadata( + self, + response: resource.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_update_space_with_metadata` + interceptor in new development instead of the `post_update_space` interceptor. + When both interceptors are used, this `post_update_space_with_metadata` interceptor runs after the + `post_update_space` interceptor. The (possibly modified) response returned by + `post_update_space` will be passed to + `post_update_space_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SpacesServiceRestStub: @@ -397,6 +466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -659,6 +732,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -812,6 +889,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-apps-meet/google/apps/meet_v2/types/resource.py b/packages/google-apps-meet/google/apps/meet_v2/types/resource.py index d5fe5d5b0848..c4554f9d0d9b 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/types/resource.py +++ b/packages/google-apps-meet/google/apps/meet_v2/types/resource.py @@ -47,16 +47,32 @@ class Space(proto.Message): Attributes: name (str): - Immutable. Resource name of the space. Format: - ``spaces/{space}`` + Immutable. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. It's a + unique, server-generated ID and is case sensitive. For + example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. meeting_uri (str): - Output only. URI used to join meetings, such as + Output only. URI used to join meetings consisting of + ``https://meet.google.com/`` followed by the + ``meeting_code``. For example, ``https://meet.google.com/abc-mnop-xyz``. meeting_code (str): - Output only. Type friendly code to join the meeting. Format: - ``[a-z]+-[a-z]+-[a-z]+`` such as ``abc-mnop-xyz``. The - maximum length is 128 characters. Can only be used as an - alias of the space ID to get the space. + Output only. Type friendly unique string used to join the + meeting. + + Format: ``[a-z]+-[a-z]+-[a-z]+``. For example, + ``abc-mnop-xyz``. + + The maximum length is 128 characters. + + Can only be used as an alias of the space name to get the + space. config (google.apps.meet_v2.types.SpaceConfig): Configuration pertaining to the meeting space. diff --git a/packages/google-apps-meet/google/apps/meet_v2/types/service.py b/packages/google-apps-meet/google/apps/meet_v2/types/service.py index 46828ff21597..1869e23b714c 100644 --- a/packages/google-apps-meet/google/apps/meet_v2/types/service.py +++ b/packages/google-apps-meet/google/apps/meet_v2/types/service.py @@ -75,6 +75,27 @@ class GetSpaceRequest(proto.Message): Attributes: name (str): Required. Resource name of the space. + + Format: ``spaces/{space}`` or ``spaces/{meetingCode}``. + + ``{space}`` is the resource identifier for the space. It's a + unique, server-generated ID and is case sensitive. For + example, ``jQCFfuBOdN5z``. + + ``{meetingCode}`` is an alias for the space. It's a + typeable, unique character string and is non-case sensitive. + For example, ``abc-mnop-xyz``. The maximum length is 128 + characters. + + A ``meetingCode`` shouldn't be stored long term as it can + become dissociated from a meeting space and can be reused + for different meeting spaces in the future. Generally, a + ``meetingCode`` expires 365 days after last use. For more + information, see `Learn about meeting codes in Google + Meet `__. + + For more information, see `How Meet identifies a meeting + space `__. """ name: str = proto.Field( @@ -91,9 +112,11 @@ class UpdateSpaceRequest(proto.Message): Required. Space to be updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask used to specify the fields to be - updated in the space. If update_mask isn't provided, it - defaults to '*' and updates all fields provided in the - request, including deleting fields not set in the request. + updated in the space. If update_mask isn't provided(not set, + set with empty paths, or only has "" as paths), it defaults + to update all fields provided with values in the request. + Using "*" as update_mask will update all fields, including + deleting fields not set in the request. """ space: resource.Space = proto.Field( @@ -114,6 +137,15 @@ class EndActiveConferenceRequest(proto.Message): Attributes: name (str): Required. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. It's a + unique, server-generated ID and is case sensitive. For + example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. """ name: str = proto.Field( @@ -160,7 +192,12 @@ class ListConferenceRecordsRequest(proto.Message): - ``start_time`` - ``end_time`` - For example, ``space.meeting_code = "abc-mnop-xyz"``. + For example, consider the following filters: + + - ``space.name = "spaces/NAME"`` + - ``space.meeting_code = "abc-mnop-xyz"`` + - ``start_time>="2024-01-01T00:00:00.000Z" AND start_time<="2024-01-02T00:00:00.000Z"`` + - ``end_time IS NULL`` """ page_size: int = proto.Field( diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py index 83b25bf0634a..026b81f460e5 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/__init__.py @@ -29,6 +29,7 @@ ConferenceRecord, DocsDestination, DriveDestination, + Member, Participant, ParticipantSession, PhoneUser, @@ -40,9 +41,12 @@ TranscriptEntry, ) from .types.service import ( + CreateMemberRequest, CreateSpaceRequest, + DeleteMemberRequest, EndActiveConferenceRequest, GetConferenceRecordRequest, + GetMemberRequest, GetParticipantRequest, GetParticipantSessionRequest, GetRecordingRequest, @@ -51,6 +55,8 @@ GetTranscriptRequest, ListConferenceRecordsRequest, ListConferenceRecordsResponse, + ListMembersRequest, + ListMembersResponse, ListParticipantSessionsRequest, ListParticipantSessionsResponse, ListParticipantsRequest, @@ -71,11 +77,14 @@ "AnonymousUser", "ConferenceRecord", "ConferenceRecordsServiceClient", + "CreateMemberRequest", "CreateSpaceRequest", + "DeleteMemberRequest", "DocsDestination", "DriveDestination", "EndActiveConferenceRequest", "GetConferenceRecordRequest", + "GetMemberRequest", "GetParticipantRequest", "GetParticipantSessionRequest", "GetRecordingRequest", @@ -84,6 +93,8 @@ "GetTranscriptRequest", "ListConferenceRecordsRequest", "ListConferenceRecordsResponse", + "ListMembersRequest", + "ListMembersResponse", "ListParticipantSessionsRequest", "ListParticipantSessionsResponse", "ListParticipantsRequest", @@ -94,6 +105,7 @@ "ListTranscriptEntriesResponse", "ListTranscriptsRequest", "ListTranscriptsResponse", + "Member", "Participant", "ParticipantSession", "PhoneUser", diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json index 07fcccd295b7..930d36049c3a 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_metadata.json @@ -209,21 +209,41 @@ "grpc": { "libraryClient": "SpacesServiceClient", "rpcs": { + "CreateMember": { + "methods": [ + "create_member" + ] + }, "CreateSpace": { "methods": [ "create_space" ] }, + "DeleteMember": { + "methods": [ + "delete_member" + ] + }, "EndActiveConference": { "methods": [ "end_active_conference" ] }, + "GetMember": { + "methods": [ + "get_member" + ] + }, "GetSpace": { "methods": [ "get_space" ] }, + "ListMembers": { + "methods": [ + "list_members" + ] + }, "UpdateSpace": { "methods": [ "update_space" @@ -234,21 +254,41 @@ "grpc-async": { "libraryClient": "SpacesServiceAsyncClient", "rpcs": { + "CreateMember": { + "methods": [ + "create_member" + ] + }, "CreateSpace": { "methods": [ "create_space" ] }, + "DeleteMember": { + "methods": [ + "delete_member" + ] + }, "EndActiveConference": { "methods": [ "end_active_conference" ] }, + "GetMember": { + "methods": [ + "get_member" + ] + }, "GetSpace": { "methods": [ "get_space" ] }, + "ListMembers": { + "methods": [ + "list_members" + ] + }, "UpdateSpace": { "methods": [ "update_space" @@ -259,21 +299,41 @@ "rest": { "libraryClient": "SpacesServiceClient", "rpcs": { + "CreateMember": { + "methods": [ + "create_member" + ] + }, "CreateSpace": { "methods": [ "create_space" ] }, + "DeleteMember": { + "methods": [ + "delete_member" + ] + }, "EndActiveConference": { "methods": [ "end_active_conference" ] }, + "GetMember": { + "methods": [ + "get_member" + ] + }, "GetSpace": { "methods": [ "get_space" ] }, + "ListMembers": { + "methods": [ + "list_members" + ] + }, "UpdateSpace": { "methods": [ "update_space" diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py index 4b834789ba9e..7daf9a1dd221 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py index d87b689702c1..7da9b7eebc5a 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/async_client.py @@ -337,9 +337,7 @@ async def get_conference_record( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.ConferenceRecord: - r"""`Developer - Preview `__. - Gets a conference record by conference ID. + r"""Gets a conference record by conference ID. .. code-block:: python @@ -387,8 +385,8 @@ async def sample_get_conference_record(): Returns: google.apps.meet_v2beta.types.ConferenceRecord: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Single instance of a meeting held in a space. + Single instance of a meeting held in + a space. """ # Create or coerce a protobuf request object. @@ -445,10 +443,8 @@ async def list_conference_records( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListConferenceRecordsAsyncPager: - r"""`Developer - Preview `__. - Lists the conference records by start time and in descending - order. + r"""Lists the conference records. By default, ordered by + start time and in descending order. .. code-block:: python @@ -543,9 +539,7 @@ async def get_participant( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Participant: - r"""`Developer - Preview `__. - Gets a participant by participant ID. + r"""Gets a participant by participant ID. .. code-block:: python @@ -575,7 +569,7 @@ async def sample_get_participant(): Args: request (Optional[Union[google.apps.meet_v2beta.types.GetParticipantRequest, dict]]): - The request object. Request to get a Participant. + The request object. Request to get a participant. name (:class:`str`): Required. Resource name of the participant. @@ -593,8 +587,8 @@ async def sample_get_participant(): Returns: google.apps.meet_v2beta.types.Participant: - [Developer Preview](\ https://developers.google.com/workspace/preview). - User who attended or is attending a conference. + User who attended or is attending a + conference. """ # Create or coerce a protobuf request object. @@ -652,9 +646,7 @@ async def list_participants( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListParticipantsAsyncPager: - r"""`Developer - Preview `__. - Lists the participants in a conference record, by default + r"""Lists the participants in a conference record. By default, ordered by join time and in descending order. This API supports ``fields`` as standard parameters like every other API. However, when the ``fields`` request parameter is omitted, this API @@ -689,7 +681,7 @@ async def sample_list_participants(): Args: request (Optional[Union[google.apps.meet_v2beta.types.ListParticipantsRequest, dict]]): - The request object. Request to fetch list of participant + The request object. Request to fetch list of participants per conference. parent (:class:`str`): Required. Format: @@ -781,9 +773,7 @@ async def get_participant_session( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.ParticipantSession: - r"""`Developer - Preview `__. - Gets a participant session by participant session ID. + r"""Gets a participant session by participant session ID. .. code-block:: python @@ -831,14 +821,15 @@ async def sample_get_participant_session(): Returns: google.apps.meet_v2beta.types.ParticipantSession: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Refers to each unique join/leave session when a user - joins a conference from a device. Note that any time - a user joins the conference a new unique ID is - assigned. That means if a user joins a space multiple - times from the same device, they're assigned - different IDs, and are also be treated as different - participant sessions. + Refers to each unique join or leave + session when a user joins a conference + from a device. Note that any time a user + joins the conference a new unique ID is + assigned. That means if a user joins a + space multiple times from the same + device, they're assigned different IDs, + and are also be treated as different + participant sessions. """ # Create or coerce a protobuf request object. @@ -896,13 +887,11 @@ async def list_participant_sessions( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListParticipantSessionsAsyncPager: - r"""`Developer - Preview `__. - Lists the participant sessions of a participant in a conference - record, by default ordered by join time and in descending order. - This API supports ``fields`` as standard parameters like every - other API. However, when the ``fields`` request parameter is - omitted this API defaults to + r"""Lists the participant sessions of a participant in a conference + record. By default, ordered by join time and in descending + order. This API supports ``fields`` as standard parameters like + every other API. However, when the ``fields`` request parameter + is omitted this API defaults to ``'participantsessions/*, next_page_token'``. .. code-block:: python @@ -935,7 +924,7 @@ async def sample_list_participant_sessions(): Args: request (Optional[Union[google.apps.meet_v2beta.types.ListParticipantSessionsRequest, dict]]): The request object. Request to fetch list of participant - sessions per conference record per + sessions per conference record, per participant. parent (:class:`str`): Required. Format: @@ -1027,9 +1016,7 @@ async def get_recording( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Recording: - r"""`Developer - Preview `__. - Gets a recording by recording ID. + r"""Gets a recording by recording ID. .. code-block:: python @@ -1078,9 +1065,8 @@ async def sample_get_recording(): Returns: google.apps.meet_v2beta.types.Recording: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Metadata about a recording created during a - conference. + Metadata about a recording created + during a conference. """ # Create or coerce a protobuf request object. @@ -1138,9 +1124,9 @@ async def list_recordings( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListRecordingsAsyncPager: - r"""`Developer - Preview `__. - Lists the recording resources from the conference record. + r"""Lists the recording resources from the conference + record. By default, ordered by start time and in + ascending order. .. code-block:: python @@ -1262,9 +1248,7 @@ async def get_transcript( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Transcript: - r"""`Developer - Preview `__. - Gets a transcript by transcript ID. + r"""Gets a transcript by transcript ID. .. code-block:: python @@ -1312,11 +1296,10 @@ async def sample_get_transcript(): Returns: google.apps.meet_v2beta.types.Transcript: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Metadata for a transcript generated from a - conference. It refers to the ASR (Automatic Speech - Recognition) result of user's speech during the - conference. + Metadata for a transcript generated + from a conference. It refers to the ASR + (Automatic Speech Recognition) result of + user's speech during the conference. """ # Create or coerce a protobuf request object. @@ -1374,9 +1357,9 @@ async def list_transcripts( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTranscriptsAsyncPager: - r"""`Developer - Preview `__. - Lists the set of transcripts from the conference record. + r"""Lists the set of transcripts from the conference + record. By default, ordered by start time and in + ascending order. .. code-block:: python @@ -1498,9 +1481,7 @@ async def get_transcript_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.TranscriptEntry: - r"""`Developer - Preview `__. - Gets a ``TranscriptEntry`` resource by entry ID. + r"""Gets a ``TranscriptEntry`` resource by entry ID. Note: The transcript entries returned by the Google Meet API might not match the transcription found in the Google Docs @@ -1552,9 +1533,8 @@ async def sample_get_transcript_entry(): Returns: google.apps.meet_v2beta.types.TranscriptEntry: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Single entry for one user’s speech during a - transcript session. + Single entry for one user’s speech + during a transcript session. """ # Create or coerce a protobuf request object. @@ -1612,15 +1592,15 @@ async def list_transcript_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTranscriptEntriesAsyncPager: - r"""`Developer - Preview `__. - Lists the structured transcript entries per transcript. By - default, ordered by start time and in ascending order. + r"""Lists the structured transcript entries per + transcript. By default, ordered by start time and in + ascending order. - Note: The transcript entries returned by the Google Meet API - might not match the transcription found in the Google Docs - transcript file. This can occur when the Google Docs transcript - file is modified after generation. + Note: The transcript entries returned by the Google Meet + API might not match the transcription found in the + Google Docs transcript file. This can occur when the + Google Docs transcript file is modified after + generation. .. code-block:: python @@ -1671,7 +1651,7 @@ async def sample_list_transcript_entries(): Returns: google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptEntriesAsyncPager: Response for ListTranscriptEntries - method + method. Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py index a9eeea0f72e2..57aae5a468e2 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -602,6 +604,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -814,9 +843,7 @@ def get_conference_record( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.ConferenceRecord: - r"""`Developer - Preview `__. - Gets a conference record by conference ID. + r"""Gets a conference record by conference ID. .. code-block:: python @@ -864,8 +891,8 @@ def sample_get_conference_record(): Returns: google.apps.meet_v2beta.types.ConferenceRecord: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Single instance of a meeting held in a space. + Single instance of a meeting held in + a space. """ # Create or coerce a protobuf request object. @@ -919,10 +946,8 @@ def list_conference_records( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListConferenceRecordsPager: - r"""`Developer - Preview `__. - Lists the conference records by start time and in descending - order. + r"""Lists the conference records. By default, ordered by + start time and in descending order. .. code-block:: python @@ -1015,9 +1040,7 @@ def get_participant( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Participant: - r"""`Developer - Preview `__. - Gets a participant by participant ID. + r"""Gets a participant by participant ID. .. code-block:: python @@ -1047,7 +1070,7 @@ def sample_get_participant(): Args: request (Union[google.apps.meet_v2beta.types.GetParticipantRequest, dict]): - The request object. Request to get a Participant. + The request object. Request to get a participant. name (str): Required. Resource name of the participant. @@ -1065,8 +1088,8 @@ def sample_get_participant(): Returns: google.apps.meet_v2beta.types.Participant: - [Developer Preview](\ https://developers.google.com/workspace/preview). - User who attended or is attending a conference. + User who attended or is attending a + conference. """ # Create or coerce a protobuf request object. @@ -1121,9 +1144,7 @@ def list_participants( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListParticipantsPager: - r"""`Developer - Preview `__. - Lists the participants in a conference record, by default + r"""Lists the participants in a conference record. By default, ordered by join time and in descending order. This API supports ``fields`` as standard parameters like every other API. However, when the ``fields`` request parameter is omitted, this API @@ -1158,7 +1179,7 @@ def sample_list_participants(): Args: request (Union[google.apps.meet_v2beta.types.ListParticipantsRequest, dict]): - The request object. Request to fetch list of participant + The request object. Request to fetch list of participants per conference. parent (str): Required. Format: @@ -1247,9 +1268,7 @@ def get_participant_session( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.ParticipantSession: - r"""`Developer - Preview `__. - Gets a participant session by participant session ID. + r"""Gets a participant session by participant session ID. .. code-block:: python @@ -1297,14 +1316,15 @@ def sample_get_participant_session(): Returns: google.apps.meet_v2beta.types.ParticipantSession: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Refers to each unique join/leave session when a user - joins a conference from a device. Note that any time - a user joins the conference a new unique ID is - assigned. That means if a user joins a space multiple - times from the same device, they're assigned - different IDs, and are also be treated as different - participant sessions. + Refers to each unique join or leave + session when a user joins a conference + from a device. Note that any time a user + joins the conference a new unique ID is + assigned. That means if a user joins a + space multiple times from the same + device, they're assigned different IDs, + and are also be treated as different + participant sessions. """ # Create or coerce a protobuf request object. @@ -1359,13 +1379,11 @@ def list_participant_sessions( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListParticipantSessionsPager: - r"""`Developer - Preview `__. - Lists the participant sessions of a participant in a conference - record, by default ordered by join time and in descending order. - This API supports ``fields`` as standard parameters like every - other API. However, when the ``fields`` request parameter is - omitted this API defaults to + r"""Lists the participant sessions of a participant in a conference + record. By default, ordered by join time and in descending + order. This API supports ``fields`` as standard parameters like + every other API. However, when the ``fields`` request parameter + is omitted this API defaults to ``'participantsessions/*, next_page_token'``. .. code-block:: python @@ -1398,7 +1416,7 @@ def sample_list_participant_sessions(): Args: request (Union[google.apps.meet_v2beta.types.ListParticipantSessionsRequest, dict]): The request object. Request to fetch list of participant - sessions per conference record per + sessions per conference record, per participant. parent (str): Required. Format: @@ -1489,9 +1507,7 @@ def get_recording( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Recording: - r"""`Developer - Preview `__. - Gets a recording by recording ID. + r"""Gets a recording by recording ID. .. code-block:: python @@ -1540,9 +1556,8 @@ def sample_get_recording(): Returns: google.apps.meet_v2beta.types.Recording: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Metadata about a recording created during a - conference. + Metadata about a recording created + during a conference. """ # Create or coerce a protobuf request object. @@ -1597,9 +1612,9 @@ def list_recordings( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListRecordingsPager: - r"""`Developer - Preview `__. - Lists the recording resources from the conference record. + r"""Lists the recording resources from the conference + record. By default, ordered by start time and in + ascending order. .. code-block:: python @@ -1718,9 +1733,7 @@ def get_transcript( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Transcript: - r"""`Developer - Preview `__. - Gets a transcript by transcript ID. + r"""Gets a transcript by transcript ID. .. code-block:: python @@ -1768,11 +1781,10 @@ def sample_get_transcript(): Returns: google.apps.meet_v2beta.types.Transcript: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Metadata for a transcript generated from a - conference. It refers to the ASR (Automatic Speech - Recognition) result of user's speech during the - conference. + Metadata for a transcript generated + from a conference. It refers to the ASR + (Automatic Speech Recognition) result of + user's speech during the conference. """ # Create or coerce a protobuf request object. @@ -1827,9 +1839,9 @@ def list_transcripts( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTranscriptsPager: - r"""`Developer - Preview `__. - Lists the set of transcripts from the conference record. + r"""Lists the set of transcripts from the conference + record. By default, ordered by start time and in + ascending order. .. code-block:: python @@ -1948,9 +1960,7 @@ def get_transcript_entry( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.TranscriptEntry: - r"""`Developer - Preview `__. - Gets a ``TranscriptEntry`` resource by entry ID. + r"""Gets a ``TranscriptEntry`` resource by entry ID. Note: The transcript entries returned by the Google Meet API might not match the transcription found in the Google Docs @@ -2002,9 +2012,8 @@ def sample_get_transcript_entry(): Returns: google.apps.meet_v2beta.types.TranscriptEntry: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Single entry for one user’s speech during a - transcript session. + Single entry for one user’s speech + during a transcript session. """ # Create or coerce a protobuf request object. @@ -2059,15 +2068,15 @@ def list_transcript_entries( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTranscriptEntriesPager: - r"""`Developer - Preview `__. - Lists the structured transcript entries per transcript. By - default, ordered by start time and in ascending order. + r"""Lists the structured transcript entries per + transcript. By default, ordered by start time and in + ascending order. - Note: The transcript entries returned by the Google Meet API - might not match the transcription found in the Google Docs - transcript file. This can occur when the Google Docs transcript - file is modified after generation. + Note: The transcript entries returned by the Google Meet + API might not match the transcription found in the + Google Docs transcript file. This can occur when the + Google Docs transcript file is modified after + generation. .. code-block:: python @@ -2118,7 +2127,7 @@ def sample_list_transcript_entries(): Returns: google.apps.meet_v2beta.services.conference_records_service.pagers.ListTranscriptEntriesPager: Response for ListTranscriptEntries - method + method. Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py index 0c4a9f025404..f3427791a4f2 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/base.py @@ -35,7 +35,10 @@ class ConferenceRecordsServiceTransport(abc.ABC): """Abstract transport class for ConferenceRecordsService.""" - AUTH_SCOPES = () + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + ) DEFAULT_HOST: str = "meet.googleapis.com" diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py index 7d9245780b3d..ffc9a7965ceb 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc.py @@ -326,8 +326,6 @@ def get_conference_record( ) -> Callable[[service.GetConferenceRecordRequest], resource.ConferenceRecord]: r"""Return a callable for the get conference record method over gRPC. - `Developer - Preview `__. Gets a conference record by conference ID. Returns: @@ -356,10 +354,8 @@ def list_conference_records( ]: r"""Return a callable for the list conference records method over gRPC. - `Developer - Preview `__. - Lists the conference records by start time and in descending - order. + Lists the conference records. By default, ordered by + start time and in descending order. Returns: Callable[[~.ListConferenceRecordsRequest], @@ -385,8 +381,6 @@ def get_participant( ) -> Callable[[service.GetParticipantRequest], resource.Participant]: r"""Return a callable for the get participant method over gRPC. - `Developer - Preview `__. Gets a participant by participant ID. Returns: @@ -413,9 +407,7 @@ def list_participants( ) -> Callable[[service.ListParticipantsRequest], service.ListParticipantsResponse]: r"""Return a callable for the list participants method over gRPC. - `Developer - Preview `__. - Lists the participants in a conference record, by default + Lists the participants in a conference record. By default, ordered by join time and in descending order. This API supports ``fields`` as standard parameters like every other API. However, when the ``fields`` request parameter is omitted, this API @@ -445,8 +437,6 @@ def get_participant_session( ) -> Callable[[service.GetParticipantSessionRequest], resource.ParticipantSession]: r"""Return a callable for the get participant session method over gRPC. - `Developer - Preview `__. Gets a participant session by participant session ID. Returns: @@ -476,13 +466,11 @@ def list_participant_sessions( ]: r"""Return a callable for the list participant sessions method over gRPC. - `Developer - Preview `__. Lists the participant sessions of a participant in a conference - record, by default ordered by join time and in descending order. - This API supports ``fields`` as standard parameters like every - other API. However, when the ``fields`` request parameter is - omitted this API defaults to + record. By default, ordered by join time and in descending + order. This API supports ``fields`` as standard parameters like + every other API. However, when the ``fields`` request parameter + is omitted this API defaults to ``'participantsessions/*, next_page_token'``. Returns: @@ -509,8 +497,6 @@ def get_recording( ) -> Callable[[service.GetRecordingRequest], resource.Recording]: r"""Return a callable for the get recording method over gRPC. - `Developer - Preview `__. Gets a recording by recording ID. Returns: @@ -537,9 +523,9 @@ def list_recordings( ) -> Callable[[service.ListRecordingsRequest], service.ListRecordingsResponse]: r"""Return a callable for the list recordings method over gRPC. - `Developer - Preview `__. - Lists the recording resources from the conference record. + Lists the recording resources from the conference + record. By default, ordered by start time and in + ascending order. Returns: Callable[[~.ListRecordingsRequest], @@ -565,8 +551,6 @@ def get_transcript( ) -> Callable[[service.GetTranscriptRequest], resource.Transcript]: r"""Return a callable for the get transcript method over gRPC. - `Developer - Preview `__. Gets a transcript by transcript ID. Returns: @@ -593,9 +577,9 @@ def list_transcripts( ) -> Callable[[service.ListTranscriptsRequest], service.ListTranscriptsResponse]: r"""Return a callable for the list transcripts method over gRPC. - `Developer - Preview `__. - Lists the set of transcripts from the conference record. + Lists the set of transcripts from the conference + record. By default, ordered by start time and in + ascending order. Returns: Callable[[~.ListTranscriptsRequest], @@ -621,8 +605,6 @@ def get_transcript_entry( ) -> Callable[[service.GetTranscriptEntryRequest], resource.TranscriptEntry]: r"""Return a callable for the get transcript entry method over gRPC. - `Developer - Preview `__. Gets a ``TranscriptEntry`` resource by entry ID. Note: The transcript entries returned by the Google Meet API @@ -656,15 +638,15 @@ def list_transcript_entries( ]: r"""Return a callable for the list transcript entries method over gRPC. - `Developer - Preview `__. - Lists the structured transcript entries per transcript. By - default, ordered by start time and in ascending order. + Lists the structured transcript entries per + transcript. By default, ordered by start time and in + ascending order. - Note: The transcript entries returned by the Google Meet API - might not match the transcription found in the Google Docs - transcript file. This can occur when the Google Docs transcript - file is modified after generation. + Note: The transcript entries returned by the Google Meet + API might not match the transcription found in the + Google Docs transcript file. This can occur when the + Google Docs transcript file is modified after + generation. Returns: Callable[[~.ListTranscriptEntriesRequest], diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py index ecee5c67d7dd..29c253a87b36 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/grpc_asyncio.py @@ -335,8 +335,6 @@ def get_conference_record( ]: r"""Return a callable for the get conference record method over gRPC. - `Developer - Preview `__. Gets a conference record by conference ID. Returns: @@ -366,10 +364,8 @@ def list_conference_records( ]: r"""Return a callable for the list conference records method over gRPC. - `Developer - Preview `__. - Lists the conference records by start time and in descending - order. + Lists the conference records. By default, ordered by + start time and in descending order. Returns: Callable[[~.ListConferenceRecordsRequest], @@ -395,8 +391,6 @@ def get_participant( ) -> Callable[[service.GetParticipantRequest], Awaitable[resource.Participant]]: r"""Return a callable for the get participant method over gRPC. - `Developer - Preview `__. Gets a participant by participant ID. Returns: @@ -425,9 +419,7 @@ def list_participants( ]: r"""Return a callable for the list participants method over gRPC. - `Developer - Preview `__. - Lists the participants in a conference record, by default + Lists the participants in a conference record. By default, ordered by join time and in descending order. This API supports ``fields`` as standard parameters like every other API. However, when the ``fields`` request parameter is omitted, this API @@ -459,8 +451,6 @@ def get_participant_session( ]: r"""Return a callable for the get participant session method over gRPC. - `Developer - Preview `__. Gets a participant session by participant session ID. Returns: @@ -490,13 +480,11 @@ def list_participant_sessions( ]: r"""Return a callable for the list participant sessions method over gRPC. - `Developer - Preview `__. Lists the participant sessions of a participant in a conference - record, by default ordered by join time and in descending order. - This API supports ``fields`` as standard parameters like every - other API. However, when the ``fields`` request parameter is - omitted this API defaults to + record. By default, ordered by join time and in descending + order. This API supports ``fields`` as standard parameters like + every other API. However, when the ``fields`` request parameter + is omitted this API defaults to ``'participantsessions/*, next_page_token'``. Returns: @@ -523,8 +511,6 @@ def get_recording( ) -> Callable[[service.GetRecordingRequest], Awaitable[resource.Recording]]: r"""Return a callable for the get recording method over gRPC. - `Developer - Preview `__. Gets a recording by recording ID. Returns: @@ -553,9 +539,9 @@ def list_recordings( ]: r"""Return a callable for the list recordings method over gRPC. - `Developer - Preview `__. - Lists the recording resources from the conference record. + Lists the recording resources from the conference + record. By default, ordered by start time and in + ascending order. Returns: Callable[[~.ListRecordingsRequest], @@ -581,8 +567,6 @@ def get_transcript( ) -> Callable[[service.GetTranscriptRequest], Awaitable[resource.Transcript]]: r"""Return a callable for the get transcript method over gRPC. - `Developer - Preview `__. Gets a transcript by transcript ID. Returns: @@ -611,9 +595,9 @@ def list_transcripts( ]: r"""Return a callable for the list transcripts method over gRPC. - `Developer - Preview `__. - Lists the set of transcripts from the conference record. + Lists the set of transcripts from the conference + record. By default, ordered by start time and in + ascending order. Returns: Callable[[~.ListTranscriptsRequest], @@ -641,8 +625,6 @@ def get_transcript_entry( ]: r"""Return a callable for the get transcript entry method over gRPC. - `Developer - Preview `__. Gets a ``TranscriptEntry`` resource by entry ID. Note: The transcript entries returned by the Google Meet API @@ -677,15 +659,15 @@ def list_transcript_entries( ]: r"""Return a callable for the list transcript entries method over gRPC. - `Developer - Preview `__. - Lists the structured transcript entries per transcript. By - default, ordered by start time and in ascending order. + Lists the structured transcript entries per + transcript. By default, ordered by start time and in + ascending order. - Note: The transcript entries returned by the Google Meet API - might not match the transcription found in the Google Docs - transcript file. This can occur when the Google Docs transcript - file is modified after generation. + Note: The transcript entries returned by the Google Meet + API might not match the transcription found in the + Google Docs transcript file. This can occur when the + Google Docs transcript file is modified after + generation. Returns: Callable[[~.ListTranscriptEntriesRequest], diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py index 8d2835db8005..a718fb67d700 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/conference_records_service/transports/rest.py @@ -189,12 +189,35 @@ def post_get_conference_record( ) -> resource.ConferenceRecord: """Post-rpc interceptor for get_conference_record - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conference_record_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_conference_record` interceptor runs + before the `post_get_conference_record_with_metadata` interceptor. """ return response + def post_get_conference_record_with_metadata( + self, + response: resource.ConferenceRecord, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.ConferenceRecord, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conference_record + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_conference_record_with_metadata` + interceptor in new development instead of the `post_get_conference_record` interceptor. + When both interceptors are used, this `post_get_conference_record_with_metadata` interceptor runs after the + `post_get_conference_record` interceptor. The (possibly modified) response returned by + `post_get_conference_record` will be passed to + `post_get_conference_record_with_metadata`. + """ + return response, metadata + def pre_get_participant( self, request: service.GetParticipantRequest, @@ -212,12 +235,35 @@ def post_get_participant( ) -> resource.Participant: """Post-rpc interceptor for get_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_participant` interceptor runs + before the `post_get_participant_with_metadata` interceptor. """ return response + def post_get_participant_with_metadata( + self, + response: resource.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_participant_with_metadata` + interceptor in new development instead of the `post_get_participant` interceptor. + When both interceptors are used, this `post_get_participant_with_metadata` interceptor runs after the + `post_get_participant` interceptor. The (possibly modified) response returned by + `post_get_participant` will be passed to + `post_get_participant_with_metadata`. + """ + return response, metadata + def pre_get_participant_session( self, request: service.GetParticipantSessionRequest, @@ -237,12 +283,35 @@ def post_get_participant_session( ) -> resource.ParticipantSession: """Post-rpc interceptor for get_participant_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_participant_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_participant_session` interceptor runs + before the `post_get_participant_session_with_metadata` interceptor. """ return response + def post_get_participant_session_with_metadata( + self, + response: resource.ParticipantSession, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.ParticipantSession, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_participant_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_participant_session_with_metadata` + interceptor in new development instead of the `post_get_participant_session` interceptor. + When both interceptors are used, this `post_get_participant_session_with_metadata` interceptor runs after the + `post_get_participant_session` interceptor. The (possibly modified) response returned by + `post_get_participant_session` will be passed to + `post_get_participant_session_with_metadata`. + """ + return response, metadata + def pre_get_recording( self, request: service.GetRecordingRequest, @@ -258,12 +327,35 @@ def pre_get_recording( def post_get_recording(self, response: resource.Recording) -> resource.Recording: """Post-rpc interceptor for get_recording - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_recording_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_recording` interceptor runs + before the `post_get_recording_with_metadata` interceptor. """ return response + def post_get_recording_with_metadata( + self, + response: resource.Recording, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Recording, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_recording + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_recording_with_metadata` + interceptor in new development instead of the `post_get_recording` interceptor. + When both interceptors are used, this `post_get_recording_with_metadata` interceptor runs after the + `post_get_recording` interceptor. The (possibly modified) response returned by + `post_get_recording` will be passed to + `post_get_recording_with_metadata`. + """ + return response, metadata + def pre_get_transcript( self, request: service.GetTranscriptRequest, @@ -279,12 +371,35 @@ def pre_get_transcript( def post_get_transcript(self, response: resource.Transcript) -> resource.Transcript: """Post-rpc interceptor for get_transcript - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transcript_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_transcript` interceptor runs + before the `post_get_transcript_with_metadata` interceptor. """ return response + def post_get_transcript_with_metadata( + self, + response: resource.Transcript, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Transcript, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_transcript + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_transcript_with_metadata` + interceptor in new development instead of the `post_get_transcript` interceptor. + When both interceptors are used, this `post_get_transcript_with_metadata` interceptor runs after the + `post_get_transcript` interceptor. The (possibly modified) response returned by + `post_get_transcript` will be passed to + `post_get_transcript_with_metadata`. + """ + return response, metadata + def pre_get_transcript_entry( self, request: service.GetTranscriptEntryRequest, @@ -304,12 +419,35 @@ def post_get_transcript_entry( ) -> resource.TranscriptEntry: """Post-rpc interceptor for get_transcript_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transcript_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_get_transcript_entry` interceptor runs + before the `post_get_transcript_entry_with_metadata` interceptor. """ return response + def post_get_transcript_entry_with_metadata( + self, + response: resource.TranscriptEntry, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.TranscriptEntry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_transcript_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_get_transcript_entry_with_metadata` + interceptor in new development instead of the `post_get_transcript_entry` interceptor. + When both interceptors are used, this `post_get_transcript_entry_with_metadata` interceptor runs after the + `post_get_transcript_entry` interceptor. The (possibly modified) response returned by + `post_get_transcript_entry` will be passed to + `post_get_transcript_entry_with_metadata`. + """ + return response, metadata + def pre_list_conference_records( self, request: service.ListConferenceRecordsRequest, @@ -329,12 +467,37 @@ def post_list_conference_records( ) -> service.ListConferenceRecordsResponse: """Post-rpc interceptor for list_conference_records - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conference_records_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_conference_records` interceptor runs + before the `post_list_conference_records_with_metadata` interceptor. """ return response + def post_list_conference_records_with_metadata( + self, + response: service.ListConferenceRecordsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListConferenceRecordsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_conference_records + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_conference_records_with_metadata` + interceptor in new development instead of the `post_list_conference_records` interceptor. + When both interceptors are used, this `post_list_conference_records_with_metadata` interceptor runs after the + `post_list_conference_records` interceptor. The (possibly modified) response returned by + `post_list_conference_records` will be passed to + `post_list_conference_records_with_metadata`. + """ + return response, metadata + def pre_list_participants( self, request: service.ListParticipantsRequest, @@ -354,12 +517,37 @@ def post_list_participants( ) -> service.ListParticipantsResponse: """Post-rpc interceptor for list_participants - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_participants_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_participants` interceptor runs + before the `post_list_participants_with_metadata` interceptor. """ return response + def post_list_participants_with_metadata( + self, + response: service.ListParticipantsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListParticipantsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_participants + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_participants_with_metadata` + interceptor in new development instead of the `post_list_participants` interceptor. + When both interceptors are used, this `post_list_participants_with_metadata` interceptor runs after the + `post_list_participants` interceptor. The (possibly modified) response returned by + `post_list_participants` will be passed to + `post_list_participants_with_metadata`. + """ + return response, metadata + def pre_list_participant_sessions( self, request: service.ListParticipantSessionsRequest, @@ -379,12 +567,37 @@ def post_list_participant_sessions( ) -> service.ListParticipantSessionsResponse: """Post-rpc interceptor for list_participant_sessions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_participant_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_participant_sessions` interceptor runs + before the `post_list_participant_sessions_with_metadata` interceptor. """ return response + def post_list_participant_sessions_with_metadata( + self, + response: service.ListParticipantSessionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListParticipantSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_participant_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_participant_sessions_with_metadata` + interceptor in new development instead of the `post_list_participant_sessions` interceptor. + When both interceptors are used, this `post_list_participant_sessions_with_metadata` interceptor runs after the + `post_list_participant_sessions` interceptor. The (possibly modified) response returned by + `post_list_participant_sessions` will be passed to + `post_list_participant_sessions_with_metadata`. + """ + return response, metadata + def pre_list_recordings( self, request: service.ListRecordingsRequest, @@ -402,12 +615,35 @@ def post_list_recordings( ) -> service.ListRecordingsResponse: """Post-rpc interceptor for list_recordings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_recordings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_recordings` interceptor runs + before the `post_list_recordings_with_metadata` interceptor. """ return response + def post_list_recordings_with_metadata( + self, + response: service.ListRecordingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListRecordingsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_recordings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_recordings_with_metadata` + interceptor in new development instead of the `post_list_recordings` interceptor. + When both interceptors are used, this `post_list_recordings_with_metadata` interceptor runs after the + `post_list_recordings` interceptor. The (possibly modified) response returned by + `post_list_recordings` will be passed to + `post_list_recordings_with_metadata`. + """ + return response, metadata + def pre_list_transcript_entries( self, request: service.ListTranscriptEntriesRequest, @@ -427,12 +663,37 @@ def post_list_transcript_entries( ) -> service.ListTranscriptEntriesResponse: """Post-rpc interceptor for list_transcript_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transcript_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_transcript_entries` interceptor runs + before the `post_list_transcript_entries_with_metadata` interceptor. """ return response + def post_list_transcript_entries_with_metadata( + self, + response: service.ListTranscriptEntriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListTranscriptEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_transcript_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_transcript_entries_with_metadata` + interceptor in new development instead of the `post_list_transcript_entries` interceptor. + When both interceptors are used, this `post_list_transcript_entries_with_metadata` interceptor runs after the + `post_list_transcript_entries` interceptor. The (possibly modified) response returned by + `post_list_transcript_entries` will be passed to + `post_list_transcript_entries_with_metadata`. + """ + return response, metadata + def pre_list_transcripts( self, request: service.ListTranscriptsRequest, @@ -450,12 +711,37 @@ def post_list_transcripts( ) -> service.ListTranscriptsResponse: """Post-rpc interceptor for list_transcripts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transcripts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConferenceRecordsService server but before - it is returned to user code. + it is returned to user code. This `post_list_transcripts` interceptor runs + before the `post_list_transcripts_with_metadata` interceptor. """ return response + def post_list_transcripts_with_metadata( + self, + response: service.ListTranscriptsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListTranscriptsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_transcripts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConferenceRecordsService server but before it is returned to user code. + + We recommend only using this `post_list_transcripts_with_metadata` + interceptor in new development instead of the `post_list_transcripts` interceptor. + When both interceptors are used, this `post_list_transcripts_with_metadata` interceptor runs after the + `post_list_transcripts` interceptor. The (possibly modified) response returned by + `post_list_transcripts` will be passed to + `post_list_transcripts_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ConferenceRecordsServiceRestStub: @@ -595,9 +881,8 @@ def __call__( Returns: ~.resource.ConferenceRecord: - `Developer - Preview `__. - Single instance of a meeting held in a space. + Single instance of a meeting held in + a space. """ @@ -666,6 +951,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conference_record(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conference_record_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -730,7 +1019,7 @@ def __call__( Args: request (~.service.GetParticipantRequest): - The request object. Request to get a Participant. + The request object. Request to get a participant. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -741,9 +1030,8 @@ def __call__( Returns: ~.resource.Participant: - `Developer - Preview `__. - User who attended or is attending a conference. + User who attended or is attending a + conference. """ @@ -812,6 +1100,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -887,14 +1179,15 @@ def __call__( Returns: ~.resource.ParticipantSession: - `Developer - Preview `__. - Refers to each unique join/leave session when a user - joins a conference from a device. Note that any time a - user joins the conference a new unique ID is assigned. - That means if a user joins a space multiple times from - the same device, they're assigned different IDs, and are - also be treated as different participant sessions. + Refers to each unique join or leave + session when a user joins a conference + from a device. Note that any time a user + joins the conference a new unique ID is + assigned. That means if a user joins a + space multiple times from the same + device, they're assigned different IDs, + and are also be treated as different + participant sessions. """ @@ -963,6 +1256,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_participant_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_participant_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1039,9 +1336,8 @@ def __call__( Returns: ~.resource.Recording: - `Developer - Preview `__. - Metadata about a recording created during a conference. + Metadata about a recording created + during a conference. """ @@ -1110,6 +1406,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_recording(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_recording_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1185,11 +1485,10 @@ def __call__( Returns: ~.resource.Transcript: - `Developer - Preview `__. - Metadata for a transcript generated from a conference. - It refers to the ASR (Automatic Speech Recognition) - result of user's speech during the conference. + Metadata for a transcript generated + from a conference. It refers to the ASR + (Automatic Speech Recognition) result of + user's speech during the conference. """ @@ -1258,6 +1557,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transcript(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transcript_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1334,10 +1637,8 @@ def __call__( Returns: ~.resource.TranscriptEntry: - `Developer - Preview `__. - Single entry for one user’s speech during a transcript - session. + Single entry for one user’s speech + during a transcript session. """ @@ -1408,6 +1709,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transcript_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transcript_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1554,6 +1859,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conference_records(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conference_records_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1620,7 +1929,7 @@ def __call__( Args: request (~.service.ListParticipantsRequest): - The request object. Request to fetch list of participant + The request object. Request to fetch list of participants per conference. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1702,6 +2011,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_participants(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_participants_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1769,7 +2082,7 @@ def __call__( Args: request (~.service.ListParticipantSessionsRequest): The request object. Request to fetch list of participant - sessions per conference record per + sessions per conference record, per participant. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1849,6 +2162,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_participant_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_participant_sessions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1994,6 +2311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_recordings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_recordings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2071,7 +2392,7 @@ def __call__( Returns: ~.service.ListTranscriptEntriesResponse: Response for ListTranscriptEntries - method + method. """ @@ -2140,6 +2461,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transcript_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transcript_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2287,6 +2612,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transcripts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transcripts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py index 6f69c1d7dc59..ae7be2d55b03 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/async_client.py @@ -45,6 +45,7 @@ from google.protobuf import field_mask_pb2 # type: ignore +from google.apps.meet_v2beta.services.spaces_service import pagers from google.apps.meet_v2beta.types import resource, service from .client import SpacesServiceClient @@ -77,6 +78,8 @@ class SpacesServiceAsyncClient: parse_conference_record_path = staticmethod( SpacesServiceClient.parse_conference_record_path ) + member_path = staticmethod(SpacesServiceClient.member_path) + parse_member_path = staticmethod(SpacesServiceClient.parse_member_path) space_path = staticmethod(SpacesServiceClient.space_path) parse_space_path = staticmethod(SpacesServiceClient.parse_space_path) common_billing_account_path = staticmethod( @@ -300,9 +303,7 @@ async def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""`Developer - Preview `__. - Creates a space. + r"""Creates a space. .. code-block:: python @@ -351,10 +352,9 @@ async def sample_create_space(): Returns: google.apps.meet_v2beta.types.Space: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Virtual place where conferences are held. Only one - active conference can be held in one space at any - given time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ # Create or coerce a protobuf request object. @@ -406,9 +406,10 @@ async def get_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""`Developer - Preview `__. - Gets a space by ``space_id`` or ``meeting_code``. + r"""Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. .. code-block:: python @@ -441,6 +442,29 @@ async def sample_get_space(): The request object. Request to get a space. name (:class:`str`): Required. Resource name of the space. + + Format: ``spaces/{space}`` or ``spaces/{meetingCode}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + ``{meetingCode}`` is an alias for the space. It's a + typeable, unique character string and is non-case + sensitive. For example, ``abc-mnop-xyz``. The maximum + length is 128 characters. + + A ``meetingCode`` shouldn't be stored long term as it + can become dissociated from a meeting space and can be + reused for different meeting spaces in the future. + Generally, a ``meetingCode`` expires 365 days after last + use. For more information, see `Learn about meeting + codes in Google + Meet `__. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -454,10 +478,9 @@ async def sample_get_space(): Returns: google.apps.meet_v2beta.types.Space: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Virtual place where conferences are held. Only one - active conference can be held in one space at any - given time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ # Create or coerce a protobuf request object. @@ -516,9 +539,10 @@ async def update_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""`Developer - Preview `__. - Updates a space. + r"""Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. .. code-block:: python @@ -555,9 +579,11 @@ async def sample_update_space(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. Field mask used to specify the fields to be - updated in the space. If update_mask isn't provided, it - defaults to '*' and updates all fields provided in the - request, including deleting fields not set in the + updated in the space. If update_mask isn't provided(not + set, set with empty paths, or only has "" as paths), it + defaults to update all fields provided with values in + the request. Using "*" as update_mask will update all + fields, including deleting fields not set in the request. This corresponds to the ``update_mask`` field @@ -573,10 +599,9 @@ async def sample_update_space(): Returns: google.apps.meet_v2beta.types.Space: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Virtual place where conferences are held. Only one - active conference can be held in one space at any - given time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ # Create or coerce a protobuf request object. @@ -638,9 +663,10 @@ async def end_active_conference( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""`Developer - Preview `__. - Ends an active conference (if there is one). + r"""Ends an active conference (if there's one). + + For an example, see `End active + conference `__. .. code-block:: python @@ -671,6 +697,16 @@ async def sample_end_active_conference(): of a space. name (:class:`str`): Required. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -725,6 +761,470 @@ async def sample_end_active_conference(): metadata=metadata, ) + async def create_member( + self, + request: Optional[Union[service.CreateMemberRequest, dict]] = None, + *, + parent: Optional[str] = None, + member: Optional[resource.Member] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resource.Member: + r"""`Developer + Preview `__: + Create a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_create_member(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.CreateMemberRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_member(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.CreateMemberRequest, dict]]): + The request object. Request to create a member for a + space. + parent (:class:`str`): + Required. Format: spaces/{space} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + member (:class:`google.apps.meet_v2beta.types.Member`): + Required. The member to be created. + This corresponds to the ``member`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.meet_v2beta.types.Member: + Users who are configured to have a + role in the space. These users can join + the space without knocking. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, member]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateMemberRequest): + request = service.CreateMemberRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if member is not None: + request.member = member + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_member + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_member( + self, + request: Optional[Union[service.GetMemberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resource.Member: + r"""`Developer + Preview `__: + Get a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_get_member(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetMemberRequest( + name="name_value", + ) + + # Make the request + response = await client.get_member(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.GetMemberRequest, dict]]): + The request object. Request to get a member from a space. + name (:class:`str`): + Required. Format: + “spaces/{space}/members/{member}” + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.meet_v2beta.types.Member: + Users who are configured to have a + role in the space. These users can join + the space without knocking. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetMemberRequest): + request = service.GetMemberRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_member + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_members( + self, + request: Optional[Union[service.ListMembersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMembersAsyncPager: + r"""`Developer + Preview `__: + List members. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted this API response will + default to "name,email,role,user". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_list_members(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListMembersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_members(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.ListMembersRequest, dict]]): + The request object. Request to list all members of a + space. + parent (:class:`str`): + Required. Format: spaces/{space} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.meet_v2beta.services.spaces_service.pagers.ListMembersAsyncPager: + Response of list members. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListMembersRequest): + request = service.ListMembersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_members + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMembersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_member( + self, + request: Optional[Union[service.DeleteMemberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""`Developer + Preview `__: + Delete the member who was previously assigned roles in the + space. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + async def sample_delete_member(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.DeleteMemberRequest( + name="name_value", + ) + + # Make the request + await client.delete_member(request=request) + + Args: + request (Optional[Union[google.apps.meet_v2beta.types.DeleteMemberRequest, dict]]): + The request object. Request to delete a member from a + space. + name (:class:`str`): + Required. Format: + “spaces/{space}/members/{member}” + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteMemberRequest): + request = service.DeleteMemberRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_member + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "SpacesServiceAsyncClient": return self diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py index 407004ba42b9..4c0fc0cf3c9f 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -60,6 +62,7 @@ from google.protobuf import field_mask_pb2 # type: ignore +from google.apps.meet_v2beta.services.spaces_service import pagers from google.apps.meet_v2beta.types import resource, service from .transports.base import DEFAULT_CLIENT_INFO, SpacesServiceTransport @@ -207,6 +210,23 @@ def parse_conference_record_path(path: str) -> Dict[str, str]: m = re.match(r"^conferenceRecords/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def member_path( + space: str, + member: str, + ) -> str: + """Returns a fully-qualified member string.""" + return "spaces/{space}/members/{member}".format( + space=space, + member=member, + ) + + @staticmethod + def parse_member_path(path: str) -> Dict[str, str]: + """Parses a member path into its component segments.""" + m = re.match(r"^spaces/(?P.+?)/members/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def space_path( space: str, @@ -491,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -696,9 +743,7 @@ def create_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""`Developer - Preview `__. - Creates a space. + r"""Creates a space. .. code-block:: python @@ -747,10 +792,9 @@ def sample_create_space(): Returns: google.apps.meet_v2beta.types.Space: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Virtual place where conferences are held. Only one - active conference can be held in one space at any - given time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ # Create or coerce a protobuf request object. @@ -799,9 +843,10 @@ def get_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""`Developer - Preview `__. - Gets a space by ``space_id`` or ``meeting_code``. + r"""Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. .. code-block:: python @@ -834,6 +879,29 @@ def sample_get_space(): The request object. Request to get a space. name (str): Required. Resource name of the space. + + Format: ``spaces/{space}`` or ``spaces/{meetingCode}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + ``{meetingCode}`` is an alias for the space. It's a + typeable, unique character string and is non-case + sensitive. For example, ``abc-mnop-xyz``. The maximum + length is 128 characters. + + A ``meetingCode`` shouldn't be stored long term as it + can become dissociated from a meeting space and can be + reused for different meeting spaces in the future. + Generally, a ``meetingCode`` expires 365 days after last + use. For more information, see `Learn about meeting + codes in Google + Meet `__. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -847,10 +915,9 @@ def sample_get_space(): Returns: google.apps.meet_v2beta.types.Space: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Virtual place where conferences are held. Only one - active conference can be held in one space at any - given time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ # Create or coerce a protobuf request object. @@ -906,9 +973,10 @@ def update_space( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resource.Space: - r"""`Developer - Preview `__. - Updates a space. + r"""Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. .. code-block:: python @@ -945,9 +1013,11 @@ def sample_update_space(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask used to specify the fields to be - updated in the space. If update_mask isn't provided, it - defaults to '*' and updates all fields provided in the - request, including deleting fields not set in the + updated in the space. If update_mask isn't provided(not + set, set with empty paths, or only has "" as paths), it + defaults to update all fields provided with values in + the request. Using "*" as update_mask will update all + fields, including deleting fields not set in the request. This corresponds to the ``update_mask`` field @@ -963,10 +1033,9 @@ def sample_update_space(): Returns: google.apps.meet_v2beta.types.Space: - [Developer Preview](\ https://developers.google.com/workspace/preview). - Virtual place where conferences are held. Only one - active conference can be held in one space at any - given time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ # Create or coerce a protobuf request object. @@ -1025,9 +1094,10 @@ def end_active_conference( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: - r"""`Developer - Preview `__. - Ends an active conference (if there is one). + r"""Ends an active conference (if there's one). + + For an example, see `End active + conference `__. .. code-block:: python @@ -1058,6 +1128,16 @@ def sample_end_active_conference(): of a space. name (str): Required. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. + It's a unique, server-generated ID and is case + sensitive. For example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1109,6 +1189,458 @@ def sample_end_active_conference(): metadata=metadata, ) + def create_member( + self, + request: Optional[Union[service.CreateMemberRequest, dict]] = None, + *, + parent: Optional[str] = None, + member: Optional[resource.Member] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resource.Member: + r"""`Developer + Preview `__: + Create a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_create_member(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.CreateMemberRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_member(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.CreateMemberRequest, dict]): + The request object. Request to create a member for a + space. + parent (str): + Required. Format: spaces/{space} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + member (google.apps.meet_v2beta.types.Member): + Required. The member to be created. + This corresponds to the ``member`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.meet_v2beta.types.Member: + Users who are configured to have a + role in the space. These users can join + the space without knocking. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, member]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateMemberRequest): + request = service.CreateMemberRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if member is not None: + request.member = member + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_member] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_member( + self, + request: Optional[Union[service.GetMemberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resource.Member: + r"""`Developer + Preview `__: + Get a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_get_member(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetMemberRequest( + name="name_value", + ) + + # Make the request + response = client.get_member(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.GetMemberRequest, dict]): + The request object. Request to get a member from a space. + name (str): + Required. Format: + “spaces/{space}/members/{member}” + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.meet_v2beta.types.Member: + Users who are configured to have a + role in the space. These users can join + the space without knocking. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetMemberRequest): + request = service.GetMemberRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_member] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_members( + self, + request: Optional[Union[service.ListMembersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMembersPager: + r"""`Developer + Preview `__: + List members. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted this API response will + default to "name,email,role,user". + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_list_members(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListMembersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_members(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.meet_v2beta.types.ListMembersRequest, dict]): + The request object. Request to list all members of a + space. + parent (str): + Required. Format: spaces/{space} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.meet_v2beta.services.spaces_service.pagers.ListMembersPager: + Response of list members. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListMembersRequest): + request = service.ListMembersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_members] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMembersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_member( + self, + request: Optional[Union[service.DeleteMemberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""`Developer + Preview `__: + Delete the member who was previously assigned roles in the + space. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import meet_v2beta + + def sample_delete_member(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.DeleteMemberRequest( + name="name_value", + ) + + # Make the request + client.delete_member(request=request) + + Args: + request (Union[google.apps.meet_v2beta.types.DeleteMemberRequest, dict]): + The request object. Request to delete a member from a + space. + name (str): + Required. Format: + “spaces/{space}/members/{member}” + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteMemberRequest): + request = service.DeleteMemberRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_member] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def __enter__(self) -> "SpacesServiceClient": return self diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/pagers.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/pagers.py new file mode 100644 index 000000000000..7a822dda143e --- /dev/null +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/pagers.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.apps.meet_v2beta.types import resource, service + + +class ListMembersPager: + """A pager for iterating through ``list_members`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListMembersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``members`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMembers`` requests and continue to iterate + through the ``members`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListMembersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListMembersResponse], + request: service.ListMembersRequest, + response: service.ListMembersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListMembersRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListMembersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListMembersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListMembersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resource.Member]: + for page in self.pages: + yield from page.members + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListMembersAsyncPager: + """A pager for iterating through ``list_members`` requests. + + This class thinly wraps an initial + :class:`google.apps.meet_v2beta.types.ListMembersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``members`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMembers`` requests and continue to iterate + through the ``members`` field on the + corresponding responses. + + All the usual :class:`google.apps.meet_v2beta.types.ListMembersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListMembersResponse]], + request: service.ListMembersRequest, + response: service.ListMembersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.meet_v2beta.types.ListMembersRequest): + The initial request object. + response (google.apps.meet_v2beta.types.ListMembersResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = service.ListMembersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListMembersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resource.Member]: + async def async_generator(): + async for page in self.pages: + for response in page.members: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py index 2b61146aef38..a26541e5e2d9 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/base.py @@ -36,7 +36,11 @@ class SpacesServiceTransport(abc.ABC): """Abstract transport class for SpacesService.""" - AUTH_SCOPES = () + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", + ) DEFAULT_HOST: str = "meet.googleapis.com" @@ -158,6 +162,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_member: gapic_v1.method.wrap_method( + self.create_member, + default_timeout=60.0, + client_info=client_info, + ), + self.get_member: gapic_v1.method.wrap_method( + self.get_member, + default_timeout=60.0, + client_info=client_info, + ), + self.list_members: gapic_v1.method.wrap_method( + self.list_members, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_member: gapic_v1.method.wrap_method( + self.delete_member, + default_timeout=60.0, + client_info=client_info, + ), } def close(self): @@ -202,6 +226,41 @@ def end_active_conference( ]: raise NotImplementedError() + @property + def create_member( + self, + ) -> Callable[ + [service.CreateMemberRequest], + Union[resource.Member, Awaitable[resource.Member]], + ]: + raise NotImplementedError() + + @property + def get_member( + self, + ) -> Callable[ + [service.GetMemberRequest], Union[resource.Member, Awaitable[resource.Member]] + ]: + raise NotImplementedError() + + @property + def list_members( + self, + ) -> Callable[ + [service.ListMembersRequest], + Union[service.ListMembersResponse, Awaitable[service.ListMembersResponse]], + ]: + raise NotImplementedError() + + @property + def delete_member( + self, + ) -> Callable[ + [service.DeleteMemberRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py index 9c6a33e1355a..c02301971bc4 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc.py @@ -325,8 +325,6 @@ def grpc_channel(self) -> grpc.Channel: def create_space(self) -> Callable[[service.CreateSpaceRequest], resource.Space]: r"""Return a callable for the create space method over gRPC. - `Developer - Preview `__. Creates a space. Returns: @@ -351,9 +349,10 @@ def create_space(self) -> Callable[[service.CreateSpaceRequest], resource.Space] def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: r"""Return a callable for the get space method over gRPC. - `Developer - Preview `__. - Gets a space by ``space_id`` or ``meeting_code``. + Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. Returns: Callable[[~.GetSpaceRequest], @@ -377,9 +376,10 @@ def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: def update_space(self) -> Callable[[service.UpdateSpaceRequest], resource.Space]: r"""Return a callable for the update space method over gRPC. - `Developer - Preview `__. - Updates a space. + Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. Returns: Callable[[~.UpdateSpaceRequest], @@ -405,9 +405,10 @@ def end_active_conference( ) -> Callable[[service.EndActiveConferenceRequest], empty_pb2.Empty]: r"""Return a callable for the end active conference method over gRPC. - `Developer - Preview `__. - Ends an active conference (if there is one). + Ends an active conference (if there's one). + + For an example, see `End active + conference `__. Returns: Callable[[~.EndActiveConferenceRequest], @@ -427,6 +428,128 @@ def end_active_conference( ) return self._stubs["end_active_conference"] + @property + def create_member(self) -> Callable[[service.CreateMemberRequest], resource.Member]: + r"""Return a callable for the create member method over gRPC. + + `Developer + Preview `__: + Create a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + Returns: + Callable[[~.CreateMemberRequest], + ~.Member]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_member" not in self._stubs: + self._stubs["create_member"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/CreateMember", + request_serializer=service.CreateMemberRequest.serialize, + response_deserializer=resource.Member.deserialize, + ) + return self._stubs["create_member"] + + @property + def get_member(self) -> Callable[[service.GetMemberRequest], resource.Member]: + r"""Return a callable for the get member method over gRPC. + + `Developer + Preview `__: + Get a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + Returns: + Callable[[~.GetMemberRequest], + ~.Member]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_member" not in self._stubs: + self._stubs["get_member"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/GetMember", + request_serializer=service.GetMemberRequest.serialize, + response_deserializer=resource.Member.deserialize, + ) + return self._stubs["get_member"] + + @property + def list_members( + self, + ) -> Callable[[service.ListMembersRequest], service.ListMembersResponse]: + r"""Return a callable for the list members method over gRPC. + + `Developer + Preview `__: + List members. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted this API response will + default to "name,email,role,user". + + Returns: + Callable[[~.ListMembersRequest], + ~.ListMembersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_members" not in self._stubs: + self._stubs["list_members"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/ListMembers", + request_serializer=service.ListMembersRequest.serialize, + response_deserializer=service.ListMembersResponse.deserialize, + ) + return self._stubs["list_members"] + + @property + def delete_member(self) -> Callable[[service.DeleteMemberRequest], empty_pb2.Empty]: + r"""Return a callable for the delete member method over gRPC. + + `Developer + Preview `__: + Delete the member who was previously assigned roles in the + space. + + Returns: + Callable[[~.DeleteMemberRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_member" not in self._stubs: + self._stubs["delete_member"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/DeleteMember", + request_serializer=service.DeleteMemberRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_member"] + def close(self): self._logged_channel.close() diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py index 79986cd4c0f6..f97c6f19ad4d 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/grpc_asyncio.py @@ -334,8 +334,6 @@ def create_space( ) -> Callable[[service.CreateSpaceRequest], Awaitable[resource.Space]]: r"""Return a callable for the create space method over gRPC. - `Developer - Preview `__. Creates a space. Returns: @@ -362,9 +360,10 @@ def get_space( ) -> Callable[[service.GetSpaceRequest], Awaitable[resource.Space]]: r"""Return a callable for the get space method over gRPC. - `Developer - Preview `__. - Gets a space by ``space_id`` or ``meeting_code``. + Gets details about a meeting space. + + For an example, see `Get a meeting + space `__. Returns: Callable[[~.GetSpaceRequest], @@ -390,9 +389,10 @@ def update_space( ) -> Callable[[service.UpdateSpaceRequest], Awaitable[resource.Space]]: r"""Return a callable for the update space method over gRPC. - `Developer - Preview `__. - Updates a space. + Updates details about a meeting space. + + For an example, see `Update a meeting + space `__. Returns: Callable[[~.UpdateSpaceRequest], @@ -418,9 +418,10 @@ def end_active_conference( ) -> Callable[[service.EndActiveConferenceRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the end active conference method over gRPC. - `Developer - Preview `__. - Ends an active conference (if there is one). + Ends an active conference (if there's one). + + For an example, see `End active + conference `__. Returns: Callable[[~.EndActiveConferenceRequest], @@ -440,6 +441,134 @@ def end_active_conference( ) return self._stubs["end_active_conference"] + @property + def create_member( + self, + ) -> Callable[[service.CreateMemberRequest], Awaitable[resource.Member]]: + r"""Return a callable for the create member method over gRPC. + + `Developer + Preview `__: + Create a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + Returns: + Callable[[~.CreateMemberRequest], + Awaitable[~.Member]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_member" not in self._stubs: + self._stubs["create_member"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/CreateMember", + request_serializer=service.CreateMemberRequest.serialize, + response_deserializer=resource.Member.deserialize, + ) + return self._stubs["create_member"] + + @property + def get_member( + self, + ) -> Callable[[service.GetMemberRequest], Awaitable[resource.Member]]: + r"""Return a callable for the get member method over gRPC. + + `Developer + Preview `__: + Get a member. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted, this API response will + default to "name,email,role,user". + + Returns: + Callable[[~.GetMemberRequest], + Awaitable[~.Member]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_member" not in self._stubs: + self._stubs["get_member"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/GetMember", + request_serializer=service.GetMemberRequest.serialize, + response_deserializer=resource.Member.deserialize, + ) + return self._stubs["get_member"] + + @property + def list_members( + self, + ) -> Callable[[service.ListMembersRequest], Awaitable[service.ListMembersResponse]]: + r"""Return a callable for the list members method over gRPC. + + `Developer + Preview `__: + List members. + + This API supports the ``fields`` parameter in + `SystemParameterContext `__. + When the ``fields`` parameter is omitted this API response will + default to "name,email,role,user". + + Returns: + Callable[[~.ListMembersRequest], + Awaitable[~.ListMembersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_members" not in self._stubs: + self._stubs["list_members"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/ListMembers", + request_serializer=service.ListMembersRequest.serialize, + response_deserializer=service.ListMembersResponse.deserialize, + ) + return self._stubs["list_members"] + + @property + def delete_member( + self, + ) -> Callable[[service.DeleteMemberRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete member method over gRPC. + + `Developer + Preview `__: + Delete the member who was previously assigned roles in the + space. + + Returns: + Callable[[~.DeleteMemberRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_member" not in self._stubs: + self._stubs["delete_member"] = self._logged_channel.unary_unary( + "/google.apps.meet.v2beta.SpacesService/DeleteMember", + request_serializer=service.DeleteMemberRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_member"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -472,6 +601,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_member: self._wrap_method( + self.create_member, + default_timeout=60.0, + client_info=client_info, + ), + self.get_member: self._wrap_method( + self.get_member, + default_timeout=60.0, + client_info=client_info, + ), + self.list_members: self._wrap_method( + self.list_members, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_member: self._wrap_method( + self.delete_member, + default_timeout=60.0, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py index 787194df51e0..4bd95f866db5 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest.py @@ -69,6 +69,14 @@ class SpacesServiceRestInterceptor: .. code-block:: python class MyCustomSpacesServiceInterceptor(SpacesServiceRestInterceptor): + def pre_create_member(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_member(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -77,10 +85,22 @@ def post_create_space(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_member(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_end_active_conference(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_get_member(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_member(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -89,6 +109,14 @@ def post_get_space(self, response): logging.log(f"Received response: {response}") return response + def pre_list_members(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_members(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_space(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -103,6 +131,50 @@ def post_update_space(self, response): """ + def pre_create_member( + self, + request: service.CreateMemberRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.CreateMemberRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_member + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def post_create_member(self, response: resource.Member) -> resource.Member: + """Post-rpc interceptor for create_member + + DEPRECATED. Please use the `post_create_member_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SpacesService server but before + it is returned to user code. This `post_create_member` interceptor runs + before the `post_create_member_with_metadata` interceptor. + """ + return response + + def post_create_member_with_metadata( + self, + response: resource.Member, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Member, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_member + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_create_member_with_metadata` + interceptor in new development instead of the `post_create_member` interceptor. + When both interceptors are used, this `post_create_member_with_metadata` interceptor runs after the + `post_create_member` interceptor. The (possibly modified) response returned by + `post_create_member` will be passed to + `post_create_member_with_metadata`. + """ + return response, metadata + def pre_create_space( self, request: service.CreateSpaceRequest, @@ -118,12 +190,47 @@ def pre_create_space( def post_create_space(self, response: resource.Space) -> resource.Space: """Post-rpc interceptor for create_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SpacesService server but before - it is returned to user code. + it is returned to user code. This `post_create_space` interceptor runs + before the `post_create_space_with_metadata` interceptor. """ return response + def post_create_space_with_metadata( + self, + response: resource.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_create_space_with_metadata` + interceptor in new development instead of the `post_create_space` interceptor. + When both interceptors are used, this `post_create_space_with_metadata` interceptor runs after the + `post_create_space` interceptor. The (possibly modified) response returned by + `post_create_space` will be passed to + `post_create_space_with_metadata`. + """ + return response, metadata + + def pre_delete_member( + self, + request: service.DeleteMemberRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.DeleteMemberRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_member + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + def pre_end_active_conference( self, request: service.EndActiveConferenceRequest, @@ -138,6 +245,50 @@ def pre_end_active_conference( """ return request, metadata + def pre_get_member( + self, + request: service.GetMemberRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.GetMemberRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_member + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def post_get_member(self, response: resource.Member) -> resource.Member: + """Post-rpc interceptor for get_member + + DEPRECATED. Please use the `post_get_member_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SpacesService server but before + it is returned to user code. This `post_get_member` interceptor runs + before the `post_get_member_with_metadata` interceptor. + """ + return response + + def post_get_member_with_metadata( + self, + response: resource.Member, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Member, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_member + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_get_member_with_metadata` + interceptor in new development instead of the `post_get_member` interceptor. + When both interceptors are used, this `post_get_member_with_metadata` interceptor runs after the + `post_get_member` interceptor. The (possibly modified) response returned by + `post_get_member` will be passed to + `post_get_member_with_metadata`. + """ + return response, metadata + def pre_get_space( self, request: service.GetSpaceRequest, @@ -153,12 +304,81 @@ def pre_get_space( def post_get_space(self, response: resource.Space) -> resource.Space: """Post-rpc interceptor for get_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SpacesService server but before + it is returned to user code. This `post_get_space` interceptor runs + before the `post_get_space_with_metadata` interceptor. + """ + return response + + def post_get_space_with_metadata( + self, + response: resource.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_get_space_with_metadata` + interceptor in new development instead of the `post_get_space` interceptor. + When both interceptors are used, this `post_get_space_with_metadata` interceptor runs after the + `post_get_space` interceptor. The (possibly modified) response returned by + `post_get_space` will be passed to + `post_get_space_with_metadata`. + """ + return response, metadata + + def pre_list_members( + self, + request: service.ListMembersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListMembersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_members + + Override in a subclass to manipulate the request or metadata + before they are sent to the SpacesService server. + """ + return request, metadata + + def post_list_members( + self, response: service.ListMembersResponse + ) -> service.ListMembersResponse: + """Post-rpc interceptor for list_members + + DEPRECATED. Please use the `post_list_members_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SpacesService server but before - it is returned to user code. + it is returned to user code. This `post_list_members` interceptor runs + before the `post_list_members_with_metadata` interceptor. """ return response + def post_list_members_with_metadata( + self, + response: service.ListMembersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListMembersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_members + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_list_members_with_metadata` + interceptor in new development instead of the `post_list_members` interceptor. + When both interceptors are used, this `post_list_members_with_metadata` interceptor runs after the + `post_list_members` interceptor. The (possibly modified) response returned by + `post_list_members` will be passed to + `post_list_members_with_metadata`. + """ + return response, metadata + def pre_update_space( self, request: service.UpdateSpaceRequest, @@ -174,12 +394,35 @@ def pre_update_space( def post_update_space(self, response: resource.Space) -> resource.Space: """Post-rpc interceptor for update_space - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_space_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SpacesService server but before - it is returned to user code. + it is returned to user code. This `post_update_space` interceptor runs + before the `post_update_space_with_metadata` interceptor. """ return response + def post_update_space_with_metadata( + self, + response: resource.Space, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resource.Space, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_space + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SpacesService server but before it is returned to user code. + + We recommend only using this `post_update_space_with_metadata` + interceptor in new development instead of the `post_update_space` interceptor. + When both interceptors are used, this `post_update_space_with_metadata` interceptor runs after the + `post_update_space` interceptor. The (possibly modified) response returned by + `post_update_space` will be passed to + `post_update_space_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SpacesServiceRestStub: @@ -217,61 +460,594 @@ def __init__( ) -> None: """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'meet.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + Args: + host (Optional[str]): + The hostname to connect to (default: 'meet.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SpacesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateMember( + _BaseSpacesServiceRestTransport._BaseCreateMember, SpacesServiceRestStub + ): + def __hash__(self): + return hash("SpacesServiceRestTransport.CreateMember") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.CreateMemberRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resource.Member: + r"""Call the create member method over HTTP. + + Args: + request (~.service.CreateMemberRequest): + The request object. Request to create a member for a + space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resource.Member: + Users who are configured to have a + role in the space. These users can join + the space without knocking. + + """ + + http_options = ( + _BaseSpacesServiceRestTransport._BaseCreateMember._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_member(request, metadata) + transcoded_request = _BaseSpacesServiceRestTransport._BaseCreateMember._get_transcoded_request( + http_options, request + ) + + body = _BaseSpacesServiceRestTransport._BaseCreateMember._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSpacesServiceRestTransport._BaseCreateMember._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.CreateMember", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "CreateMember", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpacesServiceRestTransport._CreateMember._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Member() + pb_resp = resource.Member.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_member(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_member_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resource.Member.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.apps.meet_v2beta.SpacesServiceClient.create_member", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "CreateMember", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSpace( + _BaseSpacesServiceRestTransport._BaseCreateSpace, SpacesServiceRestStub + ): + def __hash__(self): + return hash("SpacesServiceRestTransport.CreateSpace") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.CreateSpaceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resource.Space: + r"""Call the create space method over HTTP. + + Args: + request (~.service.CreateSpaceRequest): + The request object. Request to create a space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resource.Space: + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. + + """ + + http_options = ( + _BaseSpacesServiceRestTransport._BaseCreateSpace._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_space(request, metadata) + transcoded_request = _BaseSpacesServiceRestTransport._BaseCreateSpace._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseSpacesServiceRestTransport._BaseCreateSpace._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSpacesServiceRestTransport._BaseCreateSpace._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.CreateSpace", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "CreateSpace", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpacesServiceRestTransport._CreateSpace._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resource.Space() + pb_resp = resource.Space.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_space_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resource.Space.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.apps.meet_v2beta.SpacesServiceClient.create_space", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "CreateSpace", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteMember( + _BaseSpacesServiceRestTransport._BaseDeleteMember, SpacesServiceRestStub + ): + def __hash__(self): + return hash("SpacesServiceRestTransport.DeleteMember") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: service.DeleteMemberRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete member method over HTTP. + + Args: + request (~.service.DeleteMemberRequest): + The request object. Request to delete a member from a + space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSpacesServiceRestTransport._BaseDeleteMember._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_member(request, metadata) + transcoded_request = _BaseSpacesServiceRestTransport._BaseDeleteMember._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSpacesServiceRestTransport._BaseDeleteMember._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.DeleteMember", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "DeleteMember", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SpacesServiceRestTransport._DeleteMember._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _EndActiveConference( + _BaseSpacesServiceRestTransport._BaseEndActiveConference, SpacesServiceRestStub + ): + def __hash__(self): + return hash("SpacesServiceRestTransport.EndActiveConference") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.EndActiveConferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the end active conference method over HTTP. + + Args: + request (~.service.EndActiveConferenceRequest): + The request object. Request to end an ongoing conference + of a space. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_http_options() + ) + + request, metadata = self._interceptor.pre_end_active_conference( + request, metadata + ) + transcoded_request = _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_transcoded_request( + http_options, request + ) + + body = _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.EndActiveConference", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "EndActiveConference", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or SpacesServiceRestInterceptor() - self._prep_wrapped_messages(client_info) + # Send the request + response = SpacesServiceRestTransport._EndActiveConference._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) - class _CreateSpace( - _BaseSpacesServiceRestTransport._BaseCreateSpace, SpacesServiceRestStub + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetMember( + _BaseSpacesServiceRestTransport._BaseGetMember, SpacesServiceRestStub ): def __hash__(self): - return hash("SpacesServiceRestTransport.CreateSpace") + return hash("SpacesServiceRestTransport.GetMember") @staticmethod def _get_response( @@ -292,23 +1068,22 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: service.CreateSpaceRequest, + request: service.GetMemberRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resource.Space: - r"""Call the create space method over HTTP. + ) -> resource.Member: + r"""Call the get member method over HTTP. Args: - request (~.service.CreateSpaceRequest): - The request object. Request to create a space. + request (~.service.GetMemberRequest): + The request object. Request to get a member from a space. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -318,33 +1093,27 @@ def __call__( be of type `bytes`. Returns: - ~.resource.Space: - `Developer - Preview `__. - Virtual place where conferences are held. Only one - active conference can be held in one space at any given - time. + ~.resource.Member: + Users who are configured to have a + role in the space. These users can join + the space without knocking. """ http_options = ( - _BaseSpacesServiceRestTransport._BaseCreateSpace._get_http_options() + _BaseSpacesServiceRestTransport._BaseGetMember._get_http_options() ) - request, metadata = self._interceptor.pre_create_space(request, metadata) - transcoded_request = _BaseSpacesServiceRestTransport._BaseCreateSpace._get_transcoded_request( - http_options, request - ) - - body = ( - _BaseSpacesServiceRestTransport._BaseCreateSpace._get_request_body_json( - transcoded_request + request, metadata = self._interceptor.pre_get_member(request, metadata) + transcoded_request = ( + _BaseSpacesServiceRestTransport._BaseGetMember._get_transcoded_request( + http_options, request ) ) # Jsonify the query params query_params = ( - _BaseSpacesServiceRestTransport._BaseCreateSpace._get_query_params_json( + _BaseSpacesServiceRestTransport._BaseGetMember._get_query_params_json( transcoded_request ) ) @@ -367,24 +1136,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.CreateSpace", + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.GetMember", extra={ "serviceName": "google.apps.meet.v2beta.SpacesService", - "rpcName": "CreateSpace", + "rpcName": "GetMember", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = SpacesServiceRestTransport._CreateSpace._get_response( + response = SpacesServiceRestTransport._GetMember._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -393,17 +1161,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resource.Space() - pb_resp = resource.Space.pb(resp) + resp = resource.Member() + pb_resp = resource.Member.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_space(resp) + resp = self._interceptor.post_get_member(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_member_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = resource.Space.to_json(response) + response_payload = resource.Member.to_json(response) except: response_payload = None http_response = { @@ -412,21 +1184,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.apps.meet_v2beta.SpacesServiceClient.create_space", + "Received response for google.apps.meet_v2beta.SpacesServiceClient.get_member", extra={ "serviceName": "google.apps.meet.v2beta.SpacesService", - "rpcName": "CreateSpace", + "rpcName": "GetMember", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _EndActiveConference( - _BaseSpacesServiceRestTransport._BaseEndActiveConference, SpacesServiceRestStub + class _GetSpace( + _BaseSpacesServiceRestTransport._BaseGetSpace, SpacesServiceRestStub ): def __hash__(self): - return hash("SpacesServiceRestTransport.EndActiveConference") + return hash("SpacesServiceRestTransport.GetSpace") @staticmethod def _get_response( @@ -447,24 +1219,22 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: service.EndActiveConferenceRequest, + request: service.GetSpaceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the end active conference method over HTTP. + ) -> resource.Space: + r"""Call the get space method over HTTP. Args: - request (~.service.EndActiveConferenceRequest): - The request object. Request to end an ongoing conference - of a space. + request (~.service.GetSpaceRequest): + The request object. Request to get a space. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -472,26 +1242,31 @@ def __call__( sent along with the request as metadata. Normally, each value must be of type `str`, but for metadata keys ending with the suffix `-bin`, the corresponding values must be of type `bytes`. + + Returns: + ~.resource.Space: + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. + """ http_options = ( - _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_http_options() - ) - - request, metadata = self._interceptor.pre_end_active_conference( - request, metadata - ) - transcoded_request = _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_transcoded_request( - http_options, request + _BaseSpacesServiceRestTransport._BaseGetSpace._get_http_options() ) - body = _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_request_body_json( - transcoded_request + request, metadata = self._interceptor.pre_get_space(request, metadata) + transcoded_request = ( + _BaseSpacesServiceRestTransport._BaseGetSpace._get_transcoded_request( + http_options, request + ) ) # Jsonify the query params - query_params = _BaseSpacesServiceRestTransport._BaseEndActiveConference._get_query_params_json( - transcoded_request + query_params = ( + _BaseSpacesServiceRestTransport._BaseGetSpace._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -502,7 +1277,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -512,24 +1287,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.EndActiveConference", + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.GetSpace", extra={ "serviceName": "google.apps.meet.v2beta.SpacesService", - "rpcName": "EndActiveConference", + "rpcName": "GetSpace", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = SpacesServiceRestTransport._EndActiveConference._get_response( + response = SpacesServiceRestTransport._GetSpace._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -537,11 +1311,45 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GetSpace( - _BaseSpacesServiceRestTransport._BaseGetSpace, SpacesServiceRestStub + # Return the response + resp = resource.Space() + pb_resp = resource.Space.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_space_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resource.Space.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.apps.meet_v2beta.SpacesServiceClient.get_space", + extra={ + "serviceName": "google.apps.meet.v2beta.SpacesService", + "rpcName": "GetSpace", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListMembers( + _BaseSpacesServiceRestTransport._BaseListMembers, SpacesServiceRestStub ): def __hash__(self): - return hash("SpacesServiceRestTransport.GetSpace") + return hash("SpacesServiceRestTransport.ListMembers") @staticmethod def _get_response( @@ -567,17 +1375,18 @@ def _get_response( def __call__( self, - request: service.GetSpaceRequest, + request: service.ListMembersRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resource.Space: - r"""Call the get space method over HTTP. + ) -> service.ListMembersResponse: + r"""Call the list members method over HTTP. Args: - request (~.service.GetSpaceRequest): - The request object. Request to get a space. + request (~.service.ListMembersRequest): + The request object. Request to list all members of a + space. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -587,29 +1396,22 @@ def __call__( be of type `bytes`. Returns: - ~.resource.Space: - `Developer - Preview `__. - Virtual place where conferences are held. Only one - active conference can be held in one space at any given - time. - + ~.service.ListMembersResponse: + Response of list members. """ http_options = ( - _BaseSpacesServiceRestTransport._BaseGetSpace._get_http_options() + _BaseSpacesServiceRestTransport._BaseListMembers._get_http_options() ) - request, metadata = self._interceptor.pre_get_space(request, metadata) - transcoded_request = ( - _BaseSpacesServiceRestTransport._BaseGetSpace._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_list_members(request, metadata) + transcoded_request = _BaseSpacesServiceRestTransport._BaseListMembers._get_transcoded_request( + http_options, request ) # Jsonify the query params query_params = ( - _BaseSpacesServiceRestTransport._BaseGetSpace._get_query_params_json( + _BaseSpacesServiceRestTransport._BaseListMembers._get_query_params_json( transcoded_request ) ) @@ -632,17 +1434,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.GetSpace", + f"Sending request for google.apps.meet_v2beta.SpacesServiceClient.ListMembers", extra={ "serviceName": "google.apps.meet.v2beta.SpacesService", - "rpcName": "GetSpace", + "rpcName": "ListMembers", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = SpacesServiceRestTransport._GetSpace._get_response( + response = SpacesServiceRestTransport._ListMembers._get_response( self._host, metadata, query_params, @@ -657,17 +1459,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = resource.Space() - pb_resp = resource.Space.pb(resp) + resp = service.ListMembersResponse() + pb_resp = service.ListMembersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_space(resp) + resp = self._interceptor.post_list_members(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_members_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = resource.Space.to_json(response) + response_payload = service.ListMembersResponse.to_json(response) except: response_payload = None http_response = { @@ -676,10 +1482,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.apps.meet_v2beta.SpacesServiceClient.get_space", + "Received response for google.apps.meet_v2beta.SpacesServiceClient.list_members", extra={ "serviceName": "google.apps.meet.v2beta.SpacesService", - "rpcName": "GetSpace", + "rpcName": "ListMembers", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -738,11 +1544,9 @@ def __call__( Returns: ~.resource.Space: - `Developer - Preview `__. - Virtual place where conferences are held. Only one - active conference can be held in one space at any given - time. + Virtual place where conferences are + held. Only one active conference can be + held in one space at any given time. """ @@ -818,6 +1622,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_space(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_space_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -841,12 +1649,24 @@ def __call__( ) return resp + @property + def create_member(self) -> Callable[[service.CreateMemberRequest], resource.Member]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateMember(self._session, self._host, self._interceptor) # type: ignore + @property def create_space(self) -> Callable[[service.CreateSpaceRequest], resource.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._CreateSpace(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_member(self) -> Callable[[service.DeleteMemberRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteMember(self._session, self._host, self._interceptor) # type: ignore + @property def end_active_conference( self, @@ -855,12 +1675,26 @@ def end_active_conference( # In C++ this would require a dynamic_cast return self._EndActiveConference(self._session, self._host, self._interceptor) # type: ignore + @property + def get_member(self) -> Callable[[service.GetMemberRequest], resource.Member]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMember(self._session, self._host, self._interceptor) # type: ignore + @property def get_space(self) -> Callable[[service.GetSpaceRequest], resource.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast return self._GetSpace(self._session, self._host, self._interceptor) # type: ignore + @property + def list_members( + self, + ) -> Callable[[service.ListMembersRequest], service.ListMembersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMembers(self._session, self._host, self._interceptor) # type: ignore + @property def update_space(self) -> Callable[[service.UpdateSpaceRequest], resource.Space]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest_base.py b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest_base.py index 8a8a7ad467be..27b5c9fd86b1 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest_base.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/services/spaces_service/transports/rest_base.py @@ -88,6 +88,63 @@ def __init__( api_audience=api_audience, ) + class _BaseCreateMember: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta/{parent=spaces/*}/members", + "body": "member", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateMemberRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSpacesServiceRestTransport._BaseCreateMember._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateSpace: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -130,6 +187,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteMember: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2beta/{name=spaces/*/members/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.DeleteMemberRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSpacesServiceRestTransport._BaseDeleteMember._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseEndActiveConference: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -187,6 +291,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetMember: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{name=spaces/*/members/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.GetMemberRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSpacesServiceRestTransport._BaseGetMember._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetSpace: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -234,6 +385,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListMembers: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta/{parent=spaces/*}/members", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.ListMembersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSpacesServiceRestTransport._BaseListMembers._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateSpace: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py b/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py index 18f59a904328..ffe987e9b960 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/types/__init__.py @@ -19,6 +19,7 @@ ConferenceRecord, DocsDestination, DriveDestination, + Member, Participant, ParticipantSession, PhoneUser, @@ -30,9 +31,12 @@ TranscriptEntry, ) from .service import ( + CreateMemberRequest, CreateSpaceRequest, + DeleteMemberRequest, EndActiveConferenceRequest, GetConferenceRecordRequest, + GetMemberRequest, GetParticipantRequest, GetParticipantSessionRequest, GetRecordingRequest, @@ -41,6 +45,8 @@ GetTranscriptRequest, ListConferenceRecordsRequest, ListConferenceRecordsResponse, + ListMembersRequest, + ListMembersResponse, ListParticipantSessionsRequest, ListParticipantSessionsResponse, ListParticipantsRequest, @@ -60,6 +66,7 @@ "ConferenceRecord", "DocsDestination", "DriveDestination", + "Member", "Participant", "ParticipantSession", "PhoneUser", @@ -69,9 +76,12 @@ "SpaceConfig", "Transcript", "TranscriptEntry", + "CreateMemberRequest", "CreateSpaceRequest", + "DeleteMemberRequest", "EndActiveConferenceRequest", "GetConferenceRecordRequest", + "GetMemberRequest", "GetParticipantRequest", "GetParticipantSessionRequest", "GetRecordingRequest", @@ -80,6 +90,8 @@ "GetTranscriptRequest", "ListConferenceRecordsRequest", "ListConferenceRecordsResponse", + "ListMembersRequest", + "ListMembersResponse", "ListParticipantSessionsRequest", "ListParticipantSessionsResponse", "ListParticipantsRequest", diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py b/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py index 32c5c6475313..9a9c4db43153 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/types/resource.py @@ -26,6 +26,7 @@ "Space", "ActiveConference", "SpaceConfig", + "Member", "ConferenceRecord", "Participant", "ParticipantSession", @@ -42,28 +43,42 @@ class Space(proto.Message): - r"""`Developer - Preview `__. - Virtual place where conferences are held. Only one active conference - can be held in one space at any given time. + r"""Virtual place where conferences are held. Only one active + conference can be held in one space at any given time. Attributes: name (str): - Immutable. Resource name of the space. Format: - ``spaces/{space}`` + Immutable. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. It's a + unique, server-generated ID and is case sensitive. For + example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. meeting_uri (str): - Output only. URI used to join meeting, such as + Output only. URI used to join meetings consisting of + ``https://meet.google.com/`` followed by the + ``meeting_code``. For example, ``https://meet.google.com/abc-mnop-xyz``. meeting_code (str): - Output only. Type friendly code to join the meeting. Format: - ``[a-z]+-[a-z]+-[a-z]+`` such as ``abc-mnop-xyz``. The - maximum length is 128 characters. Can ONLY be used as alias - of the space ID to get the space. + Output only. Type friendly unique string used to join the + meeting. + + Format: ``[a-z]+-[a-z]+-[a-z]+``. For example, + ``abc-mnop-xyz``. + + The maximum length is 128 characters. + + Can only be used as an alias of the space name to get the + space. config (google.apps.meet_v2beta.types.SpaceConfig): Configuration pertaining to the meeting space. active_conference (google.apps.meet_v2beta.types.ActiveConference): - Active conference if it exists. + Active conference, if it exists. """ name: str = proto.Field( @@ -97,7 +112,7 @@ class ActiveConference(proto.Message): conference_record (str): Output only. Reference to 'ConferenceRecord' resource. Format: ``conferenceRecords/{conference_record}`` where - ``{conference_record}`` is a unique id for each instance of + ``{conference_record}`` is a unique ID for each instance of a call within a space. """ @@ -121,6 +136,26 @@ class SpaceConfig(proto.Message): Defines the entry points that can be used to join meetings hosted in this meeting space. Default: EntryPointAccess.ALL + moderation (google.apps.meet_v2beta.types.SpaceConfig.Moderation): + `Developer + Preview `__: + The pre-configured moderation mode for the Meeting. Default: + Controlled by the user's policies. + moderation_restrictions (google.apps.meet_v2beta.types.SpaceConfig.ModerationRestrictions): + `Developer + Preview `__: + When moderation.ON, these restrictions go into effect for + the meeting. When moderation.OFF, will be reset to default + ModerationRestrictions. + attendance_report_generation_type (google.apps.meet_v2beta.types.SpaceConfig.AttendanceReportGenerationType): + `Developer + Preview `__: + Whether attendance report is enabled for the meeting space. + artifact_config (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig): + `Developer + Preview `__: + Configuration pertaining to the auto-generated artifacts + that the meeting supports. """ class AccessType(proto.Enum): @@ -150,7 +185,8 @@ class AccessType(proto.Enum): class EntryPointAccess(proto.Enum): r"""Entry points that can be used to join a meeting. Example: - ``meet.google.com``, the Embed SDK Web, or a mobile application. + ``meet.google.com``, the Meet Embed SDK Web, or a mobile + application. Values: ENTRY_POINT_ACCESS_UNSPECIFIED (0): @@ -160,14 +196,236 @@ class EntryPointAccess(proto.Enum): CREATOR_APP_ONLY (2): Only entry points owned by the Google Cloud project that created the space can be used to - join meetings in this space. Apps can use the - Embed SDK Web or mobile Meet SDKs to create + join meetings in this space. Apps can use the + Meet Embed SDK Web or mobile Meet SDKs to create owned entry points. """ ENTRY_POINT_ACCESS_UNSPECIFIED = 0 ALL = 1 CREATOR_APP_ONLY = 2 + class Moderation(proto.Enum): + r"""The moderation mode for a meeting. When the moderation mode + is on, the meeting owner has more control over the meeting with + features such as co-host management (see message Member) and + feature restrictions (see message ModerationRestrictions). + + Values: + MODERATION_UNSPECIFIED (0): + Moderation type is not specified. This is + used to indicate the user hasn't specified any + value as the user does not intend to update the + state. Users are not allowed to set the value as + unspecified. + OFF (1): + Moderation is off. + ON (2): + Moderation is on. + """ + MODERATION_UNSPECIFIED = 0 + OFF = 1 + ON = 2 + + class AttendanceReportGenerationType(proto.Enum): + r"""Possible states of whether attendance report is enabled for + the meeting space. + + Values: + ATTENDANCE_REPORT_GENERATION_TYPE_UNSPECIFIED (0): + Default value specified by user policy. + This should never be returned. + GENERATE_REPORT (1): + Attendance report will be generated and sent + to drive/email. + DO_NOT_GENERATE (2): + Attendance report will not be generated. + """ + ATTENDANCE_REPORT_GENERATION_TYPE_UNSPECIFIED = 0 + GENERATE_REPORT = 1 + DO_NOT_GENERATE = 2 + + class ModerationRestrictions(proto.Message): + r"""Defines restrictions for features when the meeting is + moderated. + + Attributes: + chat_restriction (google.apps.meet_v2beta.types.SpaceConfig.ModerationRestrictions.RestrictionType): + Defines who has permission to send chat + messages in the meeting space. + reaction_restriction (google.apps.meet_v2beta.types.SpaceConfig.ModerationRestrictions.RestrictionType): + Defines who has permission to send reactions + in the meeting space. + present_restriction (google.apps.meet_v2beta.types.SpaceConfig.ModerationRestrictions.RestrictionType): + Defines who has permission to share their + screen in the meeting space. + default_join_as_viewer_type (google.apps.meet_v2beta.types.SpaceConfig.ModerationRestrictions.DefaultJoinAsViewerType): + Defines whether to restrict the default role + assigned to users as viewer. + """ + + class RestrictionType(proto.Enum): + r"""Determines who has permission to use a particular feature. + + Values: + RESTRICTION_TYPE_UNSPECIFIED (0): + Default value specified by user policy. + This should never be returned. + HOSTS_ONLY (1): + Meeting owner and co-host have the + permission. + NO_RESTRICTION (2): + All Participants have permissions. + """ + RESTRICTION_TYPE_UNSPECIFIED = 0 + HOSTS_ONLY = 1 + NO_RESTRICTION = 2 + + class DefaultJoinAsViewerType(proto.Enum): + r"""By default users will join as contributors. Hosts can + restrict users to join as viewers. + Note: If an explicit role is set for a users in the Member + resource, the user will join as that role. + + Values: + DEFAULT_JOIN_AS_VIEWER_TYPE_UNSPECIFIED (0): + Default value specified by user policy. + This should never be returned. + ON (1): + Users will by default join as viewers. + OFF (2): + Users will by default join as contributors. + """ + DEFAULT_JOIN_AS_VIEWER_TYPE_UNSPECIFIED = 0 + ON = 1 + OFF = 2 + + chat_restriction: "SpaceConfig.ModerationRestrictions.RestrictionType" = ( + proto.Field( + proto.ENUM, + number=1, + enum="SpaceConfig.ModerationRestrictions.RestrictionType", + ) + ) + reaction_restriction: "SpaceConfig.ModerationRestrictions.RestrictionType" = ( + proto.Field( + proto.ENUM, + number=2, + enum="SpaceConfig.ModerationRestrictions.RestrictionType", + ) + ) + present_restriction: "SpaceConfig.ModerationRestrictions.RestrictionType" = ( + proto.Field( + proto.ENUM, + number=3, + enum="SpaceConfig.ModerationRestrictions.RestrictionType", + ) + ) + default_join_as_viewer_type: "SpaceConfig.ModerationRestrictions.DefaultJoinAsViewerType" = proto.Field( + proto.ENUM, + number=4, + enum="SpaceConfig.ModerationRestrictions.DefaultJoinAsViewerType", + ) + + class ArtifactConfig(proto.Message): + r"""Configuration related to meeting artifacts potentially + generated by this meeting space. + + Attributes: + recording_config (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig.RecordingConfig): + Configuration for recording. + transcription_config (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig.TranscriptionConfig): + Configuration for auto-transcript. + smart_notes_config (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig.SmartNotesConfig): + Configuration for auto-smart-notes. + """ + + class AutoGenerationType(proto.Enum): + r"""Determines whether an artifact can be automatically generated + in the meeting space. + + Values: + AUTO_GENERATION_TYPE_UNSPECIFIED (0): + Default value specified by user policy. + This should never be returned. + ON (1): + The artifact is generated automatically. + OFF (2): + The artifact is not generated automatically. + """ + AUTO_GENERATION_TYPE_UNSPECIFIED = 0 + ON = 1 + OFF = 2 + + class RecordingConfig(proto.Message): + r"""Configuration related to recording in a meeting space. + + Attributes: + auto_recording_generation (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig.AutoGenerationType): + Defines whether a meeting space is + automatically recorded when someone with the + privilege to record joins the meeting. + """ + + auto_recording_generation: "SpaceConfig.ArtifactConfig.AutoGenerationType" = proto.Field( + proto.ENUM, + number=2, + enum="SpaceConfig.ArtifactConfig.AutoGenerationType", + ) + + class TranscriptionConfig(proto.Message): + r"""Configuration related to transcription in a meeting space. + + Attributes: + auto_transcription_generation (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig.AutoGenerationType): + Defines whether the content of a meeting is + automatically transcribed when someone with the + privilege to transcribe joins the meeting. + """ + + auto_transcription_generation: "SpaceConfig.ArtifactConfig.AutoGenerationType" = proto.Field( + proto.ENUM, + number=2, + enum="SpaceConfig.ArtifactConfig.AutoGenerationType", + ) + + class SmartNotesConfig(proto.Message): + r"""Configuration related to smart notes in a meeting space. More + details about smart notes + https://support.google.com/meet/answer/14754931?hl=en. + + Attributes: + auto_smart_notes_generation (google.apps.meet_v2beta.types.SpaceConfig.ArtifactConfig.AutoGenerationType): + Defines whether to automatically generate a + summary and recap of the meeting for all + invitees in the organization when someone with + the privilege to enable smart notes joins the + meeting. + """ + + auto_smart_notes_generation: "SpaceConfig.ArtifactConfig.AutoGenerationType" = proto.Field( + proto.ENUM, + number=2, + enum="SpaceConfig.ArtifactConfig.AutoGenerationType", + ) + + recording_config: "SpaceConfig.ArtifactConfig.RecordingConfig" = proto.Field( + proto.MESSAGE, + number=1, + message="SpaceConfig.ArtifactConfig.RecordingConfig", + ) + transcription_config: "SpaceConfig.ArtifactConfig.TranscriptionConfig" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="SpaceConfig.ArtifactConfig.TranscriptionConfig", + ) + ) + smart_notes_config: "SpaceConfig.ArtifactConfig.SmartNotesConfig" = proto.Field( + proto.MESSAGE, + number=3, + message="SpaceConfig.ArtifactConfig.SmartNotesConfig", + ) + access_type: AccessType = proto.Field( proto.ENUM, number=1, @@ -178,31 +436,108 @@ class EntryPointAccess(proto.Enum): number=2, enum=EntryPointAccess, ) + moderation: Moderation = proto.Field( + proto.ENUM, + number=3, + enum=Moderation, + ) + moderation_restrictions: ModerationRestrictions = proto.Field( + proto.MESSAGE, + number=4, + message=ModerationRestrictions, + ) + attendance_report_generation_type: AttendanceReportGenerationType = proto.Field( + proto.ENUM, + number=6, + enum=AttendanceReportGenerationType, + ) + artifact_config: ArtifactConfig = proto.Field( + proto.MESSAGE, + number=7, + message=ArtifactConfig, + ) + + +class Member(proto.Message): + r"""Users who are configured to have a role in the space. These + users can join the space without knocking. + + Attributes: + name (str): + Identifier. Resource name of the member. + Format: spaces/{space}/members/{member} + email (str): + Email for the member. This is required for + creating the member. + role (google.apps.meet_v2beta.types.Member.Role): + The meeting role assigned to the member. + user (str): + `Developer + Preview `__: + Unique name for the user. Interoperable with Admin SDK API + and People API. This will be empty for non google users. + Setting both user and email in request will result in error. + Format: ``users/{user}`` + """ + + class Role(proto.Enum): + r"""Role of this member in the space. + + Values: + ROLE_UNSPECIFIED (0): + This is used to indicate the user hasn't + specified any value and the user’s role will be + determined upon joining the meetings between + 'contributor' and 'viewer' role depending on + meeting configuration. More details about viewer + role + https://support.google.com/meet/answer/13658394?hl=en. + COHOST (1): + Co-host role. + """ + ROLE_UNSPECIFIED = 0 + COHOST = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + email: str = proto.Field( + proto.STRING, + number=2, + ) + role: Role = proto.Field( + proto.ENUM, + number=3, + enum=Role, + ) + user: str = proto.Field( + proto.STRING, + number=4, + ) class ConferenceRecord(proto.Message): - r"""`Developer - Preview `__. Single - instance of a meeting held in a space. + r"""Single instance of a meeting held in a space. Attributes: name (str): Identifier. Resource name of the conference record. Format: ``conferenceRecords/{conference_record}`` where - ``{conference_record}`` is a unique id for each instance of + ``{conference_record}`` is a unique ID for each instance of a call within a space. start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when the conference - started, always set. + started. Always set. end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when the conference ended. Set for past conferences. Unset if the conference is ongoing. expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Server enforced expire time for - when this conference record resource is deleted. - The resource is deleted 30 days after the - conference ends. + Output only. Server enforced expiration time + for when this conference record resource is + deleted. The resource is deleted 30 days after + the conference ends. space (str): Output only. The space where the conference was held. @@ -234,9 +569,7 @@ class ConferenceRecord(proto.Message): class Participant(proto.Message): - r"""`Developer - Preview `__. User - who attended or is attending a conference. + r"""User who attended or is attending a conference. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -255,19 +588,19 @@ class Participant(proto.Message): This field is a member of `oneof`_ ``user``. phone_user (google.apps.meet_v2beta.types.PhoneUser): - User who calls in from their phone. + User calling from their phone. This field is a member of `oneof`_ ``user``. name (str): Output only. Resource name of the participant. Format: ``conferenceRecords/{conference_record}/participants/{participant}`` earliest_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the participant joined - the meeting for the first time. + Output only. Time when the participant first + joined the meeting. latest_end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the participant left the meeting for the last time. This can be null - if it is an active meeting. + if it's an active meeting. """ signedin_user: "SignedinUser" = proto.Field( @@ -305,23 +638,22 @@ class Participant(proto.Message): class ParticipantSession(proto.Message): - r"""`Developer - Preview `__. Refers - to each unique join/leave session when a user joins a conference - from a device. Note that any time a user joins the conference a new - unique ID is assigned. That means if a user joins a space multiple - times from the same device, they're assigned different IDs, and are - also be treated as different participant sessions. + r"""Refers to each unique join or leave session when a user joins + a conference from a device. Note that any time a user joins the + conference a new unique ID is assigned. That means if a user + joins a space multiple times from the same device, they're + assigned different IDs, and are also be treated as different + participant sessions. Attributes: name (str): Identifier. Session id. start_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when the user session - started. + starts. end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when the user session - ended. Unset if the user session hasn’t ended. + ends. Unset if the user session hasn’t ended. """ name: str = proto.Field( @@ -353,9 +685,9 @@ class SignedinUser(proto.Message): Admin SDK API and People API. Format: ``users/{user}`` display_name (str): Output only. For a personal device, it's the - user's first and last name. For a robot account, - it's the admin specified device name. For - example, "Altostrat Room". + user's first name and last name. For a robot + account, it's the administrator-specified device + name. For example, "Altostrat Room". """ user: str = proto.Field( @@ -391,7 +723,7 @@ class PhoneUser(proto.Message): Attributes: display_name (str): Output only. Partially redacted user's phone - number when they call in. + number when calling. """ display_name: str = proto.Field( @@ -401,16 +733,13 @@ class PhoneUser(proto.Message): class Recording(proto.Message): - r"""`Developer - Preview `__. - Metadata about a recording created during a conference. - + r"""Metadata about a recording created during a conference. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: drive_destination (google.apps.meet_v2beta.types.DriveDestination): - Output only. Recording is saved to Google Drive as an mp4 + Output only. Recording is saved to Google Drive as an MP4 file. The ``drive_destination`` includes the Drive ``fileId`` that can be used to download the file using the ``files.get`` method of the Drive API. @@ -506,11 +835,9 @@ class DriveDestination(proto.Message): class Transcript(proto.Message): - r"""`Developer - Preview `__. - Metadata for a transcript generated from a conference. It refers to - the ASR (Automatic Speech Recognition) result of user's speech - during the conference. + r"""Metadata for a transcript generated from a conference. It + refers to the ASR (Automatic Speech Recognition) result of + user's speech during the conference. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -611,16 +938,15 @@ class DocsDestination(proto.Message): class TranscriptEntry(proto.Message): - r"""`Developer - Preview `__. Single - entry for one user’s speech during a transcript session. + r"""Single entry for one user’s speech during a transcript + session. Attributes: name (str): Output only. Resource name of the entry. Format: "conferenceRecords/{conference_record}/transcripts/{transcript}/entries/{entry}". participant (str): - Output only. Refer to the participant who + Output only. Refers to the participant who speaks. text (str): Output only. The transcribed text of the diff --git a/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py b/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py index 9fee3426ad28..db59c5172554 100644 --- a/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py +++ b/packages/google-apps-meet/google/apps/meet_v2beta/types/service.py @@ -29,6 +29,11 @@ "GetSpaceRequest", "UpdateSpaceRequest", "EndActiveConferenceRequest", + "CreateMemberRequest", + "GetMemberRequest", + "ListMembersRequest", + "ListMembersResponse", + "DeleteMemberRequest", "GetConferenceRecordRequest", "ListConferenceRecordsRequest", "ListConferenceRecordsResponse", @@ -75,6 +80,27 @@ class GetSpaceRequest(proto.Message): Attributes: name (str): Required. Resource name of the space. + + Format: ``spaces/{space}`` or ``spaces/{meetingCode}``. + + ``{space}`` is the resource identifier for the space. It's a + unique, server-generated ID and is case sensitive. For + example, ``jQCFfuBOdN5z``. + + ``{meetingCode}`` is an alias for the space. It's a + typeable, unique character string and is non-case sensitive. + For example, ``abc-mnop-xyz``. The maximum length is 128 + characters. + + A ``meetingCode`` shouldn't be stored long term as it can + become dissociated from a meeting space and can be reused + for different meeting spaces in the future. Generally, a + ``meetingCode`` expires 365 days after last use. For more + information, see `Learn about meeting codes in Google + Meet `__. + + For more information, see `How Meet identifies a meeting + space `__. """ name: str = proto.Field( @@ -91,9 +117,11 @@ class UpdateSpaceRequest(proto.Message): Required. Space to be updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask used to specify the fields to be - updated in the space. If update_mask isn't provided, it - defaults to '*' and updates all fields provided in the - request, including deleting fields not set in the request. + updated in the space. If update_mask isn't provided(not set, + set with empty paths, or only has "" as paths), it defaults + to update all fields provided with values in the request. + Using "*" as update_mask will update all fields, including + deleting fields not set in the request. """ space: resource.Space = proto.Field( @@ -114,6 +142,125 @@ class EndActiveConferenceRequest(proto.Message): Attributes: name (str): Required. Resource name of the space. + + Format: ``spaces/{space}``. + + ``{space}`` is the resource identifier for the space. It's a + unique, server-generated ID and is case sensitive. For + example, ``jQCFfuBOdN5z``. + + For more information, see `How Meet identifies a meeting + space `__. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateMemberRequest(proto.Message): + r"""Request to create a member for a space. + + Attributes: + parent (str): + Required. Format: spaces/{space} + member (google.apps.meet_v2beta.types.Member): + Required. The member to be created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + member: resource.Member = proto.Field( + proto.MESSAGE, + number=2, + message=resource.Member, + ) + + +class GetMemberRequest(proto.Message): + r"""Request to get a member from a space. + + Attributes: + name (str): + Required. Format: + “spaces/{space}/members/{member}” + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListMembersRequest(proto.Message): + r"""Request to list all members of a space. + + Attributes: + parent (str): + Required. Format: spaces/{space} + page_size (int): + Optional. Maximum number of members to + return. The service might return fewer than this + value. If unspecified, at most 25 members are + returned. The maximum value is 100; values above + 100 are coerced to 100. Maximum might change in + the future. + page_token (str): + Optional. Page token returned from previous + List Call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListMembersResponse(proto.Message): + r"""Response of list members. + + Attributes: + members (MutableSequence[google.apps.meet_v2beta.types.Member]): + The list of members for the current page. + next_page_token (str): + Token to be circulated back for further list + call if current list doesn't include all the + members. Unset if all members are returned. + """ + + @property + def raw_page(self): + return self + + members: MutableSequence[resource.Member] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resource.Member, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteMemberRequest(proto.Message): + r"""Request to delete a member from a space. + + Attributes: + name (str): + Required. Format: + “spaces/{space}/members/{member}” """ name: str = proto.Field( @@ -151,7 +298,8 @@ class ListConferenceRecordsRequest(proto.Message): Optional. Page token returned from previous List Call. filter (str): - Optional. User specified filtering condition in EBNF format. + Optional. User specified filtering condition in `EBNF + format `__. The following are the filterable fields: - ``space.meeting_code`` @@ -159,7 +307,12 @@ class ListConferenceRecordsRequest(proto.Message): - ``start_time`` - ``end_time`` - For example, ``space.meeting_code = "abc-mnop-xyz"``. + For example, consider the following filters: + + - ``space.name = "spaces/NAME"`` + - ``space.meeting_code = "abc-mnop-xyz"`` + - ``start_time>="2024-01-01T00:00:00.000Z" AND start_time<="2024-01-02T00:00:00.000Z"`` + - ``end_time IS NULL`` """ page_size: int = proto.Field( @@ -207,7 +360,7 @@ def raw_page(self): class GetParticipantRequest(proto.Message): - r"""Request to get a Participant. + r"""Request to get a participant. Attributes: name (str): @@ -221,7 +374,7 @@ class GetParticipantRequest(proto.Message): class ListParticipantsRequest(proto.Message): - r"""Request to fetch list of participant per conference. + r"""Request to fetch list of participants per conference. Attributes: parent (str): @@ -236,7 +389,8 @@ class ListParticipantsRequest(proto.Message): page_token (str): Page token returned from previous List Call. filter (str): - Optional. User specified filtering condition in EBNF format. + Optional. User specified filtering condition in `EBNF + format `__. The following are the filterable fields: - ``earliest_start_time`` @@ -317,7 +471,7 @@ class GetParticipantSessionRequest(proto.Message): class ListParticipantSessionsRequest(proto.Message): r"""Request to fetch list of participant sessions per conference - record per participant. + record, per participant. Attributes: parent (str): @@ -334,7 +488,8 @@ class ListParticipantSessionsRequest(proto.Message): Optional. Page token returned from previous List Call. filter (str): - Optional. User specified filtering condition in EBNF format. + Optional. User specified filtering condition in `EBNF + format `__. The following are the filterable fields: - ``start_time`` @@ -585,7 +740,7 @@ class ListTranscriptEntriesRequest(proto.Message): class ListTranscriptEntriesResponse(proto.Message): - r"""Response for ListTranscriptEntries method + r"""Response for ListTranscriptEntries method. Attributes: transcript_entries (MutableSequence[google.apps.meet_v2beta.types.TranscriptEntry]): diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_member_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_member_async.py new file mode 100644 index 000000000000..b5f29bde0108 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_member_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMember +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_CreateMember_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_create_member(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.CreateMemberRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_member(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_CreateMember_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_member_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_member_sync.py new file mode 100644 index 000000000000..f2555e57edba --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_create_member_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMember +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_CreateMember_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_create_member(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.CreateMemberRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_member(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_CreateMember_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_delete_member_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_delete_member_async.py new file mode 100644 index 000000000000..ac56a869f58a --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_delete_member_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMember +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_DeleteMember_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_delete_member(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.DeleteMemberRequest( + name="name_value", + ) + + # Make the request + await client.delete_member(request=request) + + +# [END meet_v2beta_generated_SpacesService_DeleteMember_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_delete_member_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_delete_member_sync.py new file mode 100644 index 000000000000..18904c938f20 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_delete_member_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMember +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_DeleteMember_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_delete_member(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.DeleteMemberRequest( + name="name_value", + ) + + # Make the request + client.delete_member(request=request) + + +# [END meet_v2beta_generated_SpacesService_DeleteMember_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_member_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_member_async.py new file mode 100644 index 000000000000..7af04f8f0e77 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_member_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMember +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_GetMember_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_get_member(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.GetMemberRequest( + name="name_value", + ) + + # Make the request + response = await client.get_member(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_GetMember_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_member_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_member_sync.py new file mode 100644 index 000000000000..d0dd69435a71 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_get_member_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMember +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_GetMember_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_get_member(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.GetMemberRequest( + name="name_value", + ) + + # Make the request + response = client.get_member(request=request) + + # Handle the response + print(response) + +# [END meet_v2beta_generated_SpacesService_GetMember_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_list_members_async.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_list_members_async.py new file mode 100644 index 000000000000..ed0de3e6cb8c --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_list_members_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMembers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_ListMembers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +async def sample_list_members(): + # Create a client + client = meet_v2beta.SpacesServiceAsyncClient() + + # Initialize request argument(s) + request = meet_v2beta.ListMembersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_members(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END meet_v2beta_generated_SpacesService_ListMembers_async] diff --git a/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_list_members_sync.py b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_list_members_sync.py new file mode 100644 index 000000000000..91c534b156c3 --- /dev/null +++ b/packages/google-apps-meet/samples/generated_samples/meet_v2beta_generated_spaces_service_list_members_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMembers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-meet + + +# [START meet_v2beta_generated_SpacesService_ListMembers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import meet_v2beta + + +def sample_list_members(): + # Create a client + client = meet_v2beta.SpacesServiceClient() + + # Initialize request argument(s) + request = meet_v2beta.ListMembersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_members(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END meet_v2beta_generated_SpacesService_ListMembers_sync] diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json index a0690b563972..d6514628195e 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.11" + "version": "0.1.13" }, "snippets": [ { diff --git a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json index e40b3c920125..4b01ed7641f1 100644 --- a/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json +++ b/packages/google-apps-meet/samples/generated_samples/snippet_metadata_google.apps.meet.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-apps-meet", - "version": "0.1.11" + "version": "0.1.13" }, "snippets": [ { @@ -1935,6 +1935,175 @@ ], "title": "meet_v2beta_generated_conference_records_service_list_transcripts_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.create_member", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.CreateMember", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "CreateMember" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.CreateMemberRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "member", + "type": "google.apps.meet_v2beta.types.Member" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Member", + "shortName": "create_member" + }, + "description": "Sample for CreateMember", + "file": "meet_v2beta_generated_spaces_service_create_member_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_CreateMember_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_create_member_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.create_member", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.CreateMember", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "CreateMember" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.CreateMemberRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "member", + "type": "google.apps.meet_v2beta.types.Member" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Member", + "shortName": "create_member" + }, + "description": "Sample for CreateMember", + "file": "meet_v2beta_generated_spaces_service_create_member_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_CreateMember_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_create_member_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2104,19 +2273,19 @@ "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", "shortName": "SpacesServiceAsyncClient" }, - "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.end_active_conference", + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.delete_member", "method": { - "fullName": "google.apps.meet.v2beta.SpacesService.EndActiveConference", + "fullName": "google.apps.meet.v2beta.SpacesService.DeleteMember", "service": { "fullName": "google.apps.meet.v2beta.SpacesService", "shortName": "SpacesService" }, - "shortName": "EndActiveConference" + "shortName": "DeleteMember" }, "parameters": [ { "name": "request", - "type": "google.apps.meet_v2beta.types.EndActiveConferenceRequest" + "type": "google.apps.meet_v2beta.types.DeleteMemberRequest" }, { "name": "name", @@ -2135,13 +2304,13 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "end_active_conference" + "shortName": "delete_member" }, - "description": "Sample for EndActiveConference", - "file": "meet_v2beta_generated_spaces_service_end_active_conference_async.py", + "description": "Sample for DeleteMember", + "file": "meet_v2beta_generated_spaces_service_delete_member_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "meet_v2beta_generated_SpacesService_EndActiveConference_async", + "regionTag": "meet_v2beta_generated_SpacesService_DeleteMember_async", "segments": [ { "end": 49, @@ -2172,7 +2341,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "meet_v2beta_generated_spaces_service_end_active_conference_async.py" + "title": "meet_v2beta_generated_spaces_service_delete_member_async.py" }, { "canonical": true, @@ -2181,19 +2350,19 @@ "fullName": "google.apps.meet_v2beta.SpacesServiceClient", "shortName": "SpacesServiceClient" }, - "fullName": "google.apps.meet_v2beta.SpacesServiceClient.end_active_conference", + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.delete_member", "method": { - "fullName": "google.apps.meet.v2beta.SpacesService.EndActiveConference", + "fullName": "google.apps.meet.v2beta.SpacesService.DeleteMember", "service": { "fullName": "google.apps.meet.v2beta.SpacesService", "shortName": "SpacesService" }, - "shortName": "EndActiveConference" + "shortName": "DeleteMember" }, "parameters": [ { "name": "request", - "type": "google.apps.meet_v2beta.types.EndActiveConferenceRequest" + "type": "google.apps.meet_v2beta.types.DeleteMemberRequest" }, { "name": "name", @@ -2212,13 +2381,13 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "end_active_conference" + "shortName": "delete_member" }, - "description": "Sample for EndActiveConference", - "file": "meet_v2beta_generated_spaces_service_end_active_conference_sync.py", + "description": "Sample for DeleteMember", + "file": "meet_v2beta_generated_spaces_service_delete_member_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "meet_v2beta_generated_SpacesService_EndActiveConference_sync", + "regionTag": "meet_v2beta_generated_SpacesService_DeleteMember_sync", "segments": [ { "end": 49, @@ -2249,7 +2418,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "meet_v2beta_generated_spaces_service_end_active_conference_sync.py" + "title": "meet_v2beta_generated_spaces_service_delete_member_sync.py" }, { "canonical": true, @@ -2259,19 +2428,19 @@ "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", "shortName": "SpacesServiceAsyncClient" }, - "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.get_space", + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.end_active_conference", "method": { - "fullName": "google.apps.meet.v2beta.SpacesService.GetSpace", + "fullName": "google.apps.meet.v2beta.SpacesService.EndActiveConference", "service": { "fullName": "google.apps.meet.v2beta.SpacesService", "shortName": "SpacesService" }, - "shortName": "GetSpace" + "shortName": "EndActiveConference" }, "parameters": [ { "name": "request", - "type": "google.apps.meet_v2beta.types.GetSpaceRequest" + "type": "google.apps.meet_v2beta.types.EndActiveConferenceRequest" }, { "name": "name", @@ -2290,22 +2459,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.meet_v2beta.types.Space", - "shortName": "get_space" + "shortName": "end_active_conference" }, - "description": "Sample for GetSpace", - "file": "meet_v2beta_generated_spaces_service_get_space_async.py", + "description": "Sample for EndActiveConference", + "file": "meet_v2beta_generated_spaces_service_end_active_conference_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "meet_v2beta_generated_SpacesService_GetSpace_async", + "regionTag": "meet_v2beta_generated_SpacesService_EndActiveConference_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2320,17 +2488,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "meet_v2beta_generated_spaces_service_get_space_async.py" + "title": "meet_v2beta_generated_spaces_service_end_active_conference_async.py" }, { "canonical": true, @@ -2339,19 +2505,19 @@ "fullName": "google.apps.meet_v2beta.SpacesServiceClient", "shortName": "SpacesServiceClient" }, - "fullName": "google.apps.meet_v2beta.SpacesServiceClient.get_space", + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.end_active_conference", "method": { - "fullName": "google.apps.meet.v2beta.SpacesService.GetSpace", + "fullName": "google.apps.meet.v2beta.SpacesService.EndActiveConference", "service": { "fullName": "google.apps.meet.v2beta.SpacesService", "shortName": "SpacesService" }, - "shortName": "GetSpace" + "shortName": "EndActiveConference" }, "parameters": [ { "name": "request", - "type": "google.apps.meet_v2beta.types.GetSpaceRequest" + "type": "google.apps.meet_v2beta.types.EndActiveConferenceRequest" }, { "name": "name", @@ -2370,22 +2536,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.apps.meet_v2beta.types.Space", - "shortName": "get_space" + "shortName": "end_active_conference" }, - "description": "Sample for GetSpace", - "file": "meet_v2beta_generated_spaces_service_get_space_sync.py", + "description": "Sample for EndActiveConference", + "file": "meet_v2beta_generated_spaces_service_end_active_conference_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "meet_v2beta_generated_SpacesService_GetSpace_sync", + "regionTag": "meet_v2beta_generated_SpacesService_EndActiveConference_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2400,17 +2565,498 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "meet_v2beta_generated_spaces_service_get_space_sync.py" + "title": "meet_v2beta_generated_spaces_service_end_active_conference_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.get_member", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.GetMember", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "GetMember" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetMemberRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Member", + "shortName": "get_member" + }, + "description": "Sample for GetMember", + "file": "meet_v2beta_generated_spaces_service_get_member_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_GetMember_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_get_member_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.get_member", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.GetMember", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "GetMember" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetMemberRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Member", + "shortName": "get_member" + }, + "description": "Sample for GetMember", + "file": "meet_v2beta_generated_spaces_service_get_member_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_GetMember_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_get_member_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.get_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.GetSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "GetSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetSpaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "get_space" + }, + "description": "Sample for GetSpace", + "file": "meet_v2beta_generated_spaces_service_get_space_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_GetSpace_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_get_space_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.get_space", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.GetSpace", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "GetSpace" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.GetSpaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.types.Space", + "shortName": "get_space" + }, + "description": "Sample for GetSpace", + "file": "meet_v2beta_generated_spaces_service_get_space_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_GetSpace_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_get_space_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient", + "shortName": "SpacesServiceAsyncClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceAsyncClient.list_members", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.ListMembers", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "ListMembers" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListMembersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.services.spaces_service.pagers.ListMembersAsyncPager", + "shortName": "list_members" + }, + "description": "Sample for ListMembers", + "file": "meet_v2beta_generated_spaces_service_list_members_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_ListMembers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_list_members_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.meet_v2beta.SpacesServiceClient", + "shortName": "SpacesServiceClient" + }, + "fullName": "google.apps.meet_v2beta.SpacesServiceClient.list_members", + "method": { + "fullName": "google.apps.meet.v2beta.SpacesService.ListMembers", + "service": { + "fullName": "google.apps.meet.v2beta.SpacesService", + "shortName": "SpacesService" + }, + "shortName": "ListMembers" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.meet_v2beta.types.ListMembersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.meet_v2beta.services.spaces_service.pagers.ListMembersPager", + "shortName": "list_members" + }, + "description": "Sample for ListMembers", + "file": "meet_v2beta_generated_spaces_service_list_members_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "meet_v2beta_generated_SpacesService_ListMembers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "meet_v2beta_generated_spaces_service_list_members_sync.py" }, { "canonical": true, diff --git a/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py b/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py index f3a99cab82ab..ebdaee60c5d7 100644 --- a/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py +++ b/packages/google-apps-meet/scripts/fixup_meet_v2beta_keywords.py @@ -39,9 +39,12 @@ def partition( class meetCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_member': ('parent', 'member', ), 'create_space': ('space', ), + 'delete_member': ('name', ), 'end_active_conference': ('name', ), 'get_conference_record': ('name', ), + 'get_member': ('name', ), 'get_participant': ('name', ), 'get_participant_session': ('name', ), 'get_recording': ('name', ), @@ -49,6 +52,7 @@ class meetCallTransformer(cst.CSTTransformer): 'get_transcript': ('name', ), 'get_transcript_entry': ('name', ), 'list_conference_records': ('page_size', 'page_token', 'filter', ), + 'list_members': ('parent', 'page_size', 'page_token', ), 'list_participants': ('parent', 'page_size', 'page_token', 'filter', ), 'list_participant_sessions': ('parent', 'page_size', 'page_token', 'filter', ), 'list_recordings': ('parent', 'page_size', 'page_token', ), diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py index 664601f75754..1aaaf0f8a9ad 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_conference_records_service.py @@ -61,6 +61,13 @@ ) from google.apps.meet_v2.types import resource, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConferenceRecordsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConferenceRecordsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9513,10 +9563,14 @@ def test_get_conference_record_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_conference_record" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_conference_record_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_conference_record" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetConferenceRecordRequest.pb( service.GetConferenceRecordRequest() ) @@ -9540,6 +9594,7 @@ def test_get_conference_record_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.ConferenceRecord() + post_with_metadata.return_value = resource.ConferenceRecord(), metadata client.get_conference_record( request, @@ -9551,6 +9606,7 @@ def test_get_conference_record_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conference_records_rest_bad_request( @@ -9636,11 +9692,15 @@ def test_list_conference_records_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_list_conference_records", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_conference_records_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_conference_records", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListConferenceRecordsRequest.pb( service.ListConferenceRecordsRequest() ) @@ -9666,6 +9726,10 @@ def test_list_conference_records_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListConferenceRecordsResponse() + post_with_metadata.return_value = ( + service.ListConferenceRecordsResponse(), + metadata, + ) client.list_conference_records( request, @@ -9677,6 +9741,7 @@ def test_list_conference_records_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_participant_rest_bad_request(request_type=service.GetParticipantRequest): @@ -9759,10 +9824,14 @@ def test_get_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_participant" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_participant_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetParticipantRequest.pb(service.GetParticipantRequest()) transcode.return_value = { "method": "post", @@ -9784,6 +9853,7 @@ def test_get_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Participant() + post_with_metadata.return_value = resource.Participant(), metadata client.get_participant( request, @@ -9795,6 +9865,7 @@ def test_get_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_participants_rest_bad_request( @@ -9881,10 +9952,14 @@ def test_list_participants_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_list_participants" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_participants_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_participants" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListParticipantsRequest.pb( service.ListParticipantsRequest() ) @@ -9910,6 +9985,7 @@ def test_list_participants_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListParticipantsResponse() + post_with_metadata.return_value = service.ListParticipantsResponse(), metadata client.list_participants( request, @@ -9921,6 +9997,7 @@ def test_list_participants_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_participant_session_rest_bad_request( @@ -10010,11 +10087,15 @@ def test_get_participant_session_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_get_participant_session", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_participant_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_participant_session", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetParticipantSessionRequest.pb( service.GetParticipantSessionRequest() ) @@ -10040,6 +10121,7 @@ def test_get_participant_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.ParticipantSession() + post_with_metadata.return_value = resource.ParticipantSession(), metadata client.get_participant_session( request, @@ -10051,6 +10133,7 @@ def test_get_participant_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_participant_sessions_rest_bad_request( @@ -10136,11 +10219,15 @@ def test_list_participant_sessions_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_list_participant_sessions", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_participant_sessions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_participant_sessions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListParticipantSessionsRequest.pb( service.ListParticipantSessionsRequest() ) @@ -10166,6 +10253,10 @@ def test_list_participant_sessions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListParticipantSessionsResponse() + post_with_metadata.return_value = ( + service.ListParticipantSessionsResponse(), + metadata, + ) client.list_participant_sessions( request, @@ -10177,6 +10268,7 @@ def test_list_participant_sessions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_recording_rest_bad_request(request_type=service.GetRecordingRequest): @@ -10261,10 +10353,14 @@ def test_get_recording_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_recording" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_recording_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_recording" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetRecordingRequest.pb(service.GetRecordingRequest()) transcode.return_value = { "method": "post", @@ -10286,6 +10382,7 @@ def test_get_recording_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Recording() + post_with_metadata.return_value = resource.Recording(), metadata client.get_recording( request, @@ -10297,6 +10394,7 @@ def test_get_recording_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_recordings_rest_bad_request(request_type=service.ListRecordingsRequest): @@ -10379,10 +10477,14 @@ def test_list_recordings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_list_recordings" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_recordings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_recordings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListRecordingsRequest.pb(service.ListRecordingsRequest()) transcode.return_value = { "method": "post", @@ -10406,6 +10508,7 @@ def test_list_recordings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListRecordingsResponse() + post_with_metadata.return_value = service.ListRecordingsResponse(), metadata client.list_recordings( request, @@ -10417,6 +10520,7 @@ def test_list_recordings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transcript_rest_bad_request(request_type=service.GetTranscriptRequest): @@ -10501,10 +10605,14 @@ def test_get_transcript_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_transcript" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_transcript_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_transcript" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetTranscriptRequest.pb(service.GetTranscriptRequest()) transcode.return_value = { "method": "post", @@ -10526,6 +10634,7 @@ def test_get_transcript_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Transcript() + post_with_metadata.return_value = resource.Transcript(), metadata client.get_transcript( request, @@ -10537,6 +10646,7 @@ def test_get_transcript_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_transcripts_rest_bad_request(request_type=service.ListTranscriptsRequest): @@ -10619,10 +10729,14 @@ def test_list_transcripts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_list_transcripts" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_transcripts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_transcripts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListTranscriptsRequest.pb(service.ListTranscriptsRequest()) transcode.return_value = { "method": "post", @@ -10646,6 +10760,7 @@ def test_list_transcripts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListTranscriptsResponse() + post_with_metadata.return_value = service.ListTranscriptsResponse(), metadata client.list_transcripts( request, @@ -10657,6 +10772,7 @@ def test_list_transcripts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transcript_entry_rest_bad_request( @@ -10751,10 +10867,14 @@ def test_get_transcript_entry_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_transcript_entry" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_transcript_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_transcript_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetTranscriptEntryRequest.pb( service.GetTranscriptEntryRequest() ) @@ -10778,6 +10898,7 @@ def test_get_transcript_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.TranscriptEntry() + post_with_metadata.return_value = resource.TranscriptEntry(), metadata client.get_transcript_entry( request, @@ -10789,6 +10910,7 @@ def test_get_transcript_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_transcript_entries_rest_bad_request( @@ -10874,11 +10996,15 @@ def test_list_transcript_entries_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_list_transcript_entries", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_transcript_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_transcript_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListTranscriptEntriesRequest.pb( service.ListTranscriptEntriesRequest() ) @@ -10904,6 +11030,10 @@ def test_list_transcript_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListTranscriptEntriesResponse() + post_with_metadata.return_value = ( + service.ListTranscriptEntriesResponse(), + metadata, + ) client.list_transcript_entries( request, @@ -10915,6 +11045,7 @@ def test_list_transcript_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py index 98c5ae2f6c7d..2754e46b20e9 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2/test_spaces_service.py @@ -60,6 +60,13 @@ ) from google.apps.meet_v2.types import resource, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -314,6 +321,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SpacesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SpacesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1048,6 +1098,7 @@ def test_spaces_service_client_create_channel_credentials_file( default_scopes=( "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", ), scopes=None, default_host="meet.googleapis.com", @@ -3410,10 +3461,13 @@ def test_create_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SpacesServiceRestInterceptor, "post_create_space" ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_create_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SpacesServiceRestInterceptor, "pre_create_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSpaceRequest.pb(service.CreateSpaceRequest()) transcode.return_value = { "method": "post", @@ -3435,6 +3489,7 @@ def test_create_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Space() + post_with_metadata.return_value = resource.Space(), metadata client.create_space( request, @@ -3446,6 +3501,7 @@ def test_create_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_space_rest_bad_request(request_type=service.GetSpaceRequest): @@ -3532,10 +3588,13 @@ def test_get_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SpacesServiceRestInterceptor, "post_get_space" ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_get_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SpacesServiceRestInterceptor, "pre_get_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSpaceRequest.pb(service.GetSpaceRequest()) transcode.return_value = { "method": "post", @@ -3557,6 +3616,7 @@ def test_get_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Space() + post_with_metadata.return_value = resource.Space(), metadata client.get_space( request, @@ -3568,6 +3628,7 @@ def test_get_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_space_rest_bad_request(request_type=service.UpdateSpaceRequest): @@ -3728,10 +3789,13 @@ def test_update_space_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SpacesServiceRestInterceptor, "post_update_space" ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_update_space_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SpacesServiceRestInterceptor, "pre_update_space" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateSpaceRequest.pb(service.UpdateSpaceRequest()) transcode.return_value = { "method": "post", @@ -3753,6 +3817,7 @@ def test_update_space_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Space() + post_with_metadata.return_value = resource.Space(), metadata client.update_space( request, @@ -3764,6 +3829,7 @@ def test_update_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_end_active_conference_rest_bad_request( @@ -4037,6 +4103,7 @@ def test_spaces_service_base_transport_with_credentials_file(): default_scopes=( "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", ), quota_project_id="octopus", ) @@ -4063,6 +4130,7 @@ def test_spaces_service_auth_adc(): default_scopes=( "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", ), quota_project_id=None, ) @@ -4086,6 +4154,7 @@ def test_spaces_service_transport_auth_adc(transport_class): default_scopes=( "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", ), quota_project_id="octopus", ) @@ -4141,6 +4210,7 @@ def test_spaces_service_transport_create_channel(transport_class, grpc_helpers): default_scopes=( "https://www.googleapis.com/auth/meetings.space.created", "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", ), scopes=["1", "2"], default_host="meet.googleapis.com", diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py index ce682e867b8b..5a1ef4c3d853 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_conference_records_service.py @@ -61,6 +61,13 @@ ) from google.apps.meet_v2beta.types import resource, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConferenceRecordsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConferenceRecordsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1120,7 +1170,10 @@ def test_conference_records_service_client_create_channel_credentials_file( credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + ), scopes=None, default_host="meet.googleapis.com", ssl_credentials=None, @@ -9516,10 +9569,14 @@ def test_get_conference_record_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_conference_record" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_conference_record_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_conference_record" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetConferenceRecordRequest.pb( service.GetConferenceRecordRequest() ) @@ -9543,6 +9600,7 @@ def test_get_conference_record_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.ConferenceRecord() + post_with_metadata.return_value = resource.ConferenceRecord(), metadata client.get_conference_record( request, @@ -9554,6 +9612,7 @@ def test_get_conference_record_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conference_records_rest_bad_request( @@ -9639,11 +9698,15 @@ def test_list_conference_records_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_list_conference_records", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_conference_records_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_conference_records", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListConferenceRecordsRequest.pb( service.ListConferenceRecordsRequest() ) @@ -9669,6 +9732,10 @@ def test_list_conference_records_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListConferenceRecordsResponse() + post_with_metadata.return_value = ( + service.ListConferenceRecordsResponse(), + metadata, + ) client.list_conference_records( request, @@ -9680,6 +9747,7 @@ def test_list_conference_records_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_participant_rest_bad_request(request_type=service.GetParticipantRequest): @@ -9762,10 +9830,14 @@ def test_get_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_participant" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_participant_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetParticipantRequest.pb(service.GetParticipantRequest()) transcode.return_value = { "method": "post", @@ -9787,6 +9859,7 @@ def test_get_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Participant() + post_with_metadata.return_value = resource.Participant(), metadata client.get_participant( request, @@ -9798,6 +9871,7 @@ def test_get_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_participants_rest_bad_request( @@ -9884,10 +9958,14 @@ def test_list_participants_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_list_participants" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_participants_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_participants" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListParticipantsRequest.pb( service.ListParticipantsRequest() ) @@ -9913,6 +9991,7 @@ def test_list_participants_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListParticipantsResponse() + post_with_metadata.return_value = service.ListParticipantsResponse(), metadata client.list_participants( request, @@ -9924,6 +10003,7 @@ def test_list_participants_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_participant_session_rest_bad_request( @@ -10013,11 +10093,15 @@ def test_get_participant_session_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_get_participant_session", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_participant_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_participant_session", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetParticipantSessionRequest.pb( service.GetParticipantSessionRequest() ) @@ -10043,6 +10127,7 @@ def test_get_participant_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.ParticipantSession() + post_with_metadata.return_value = resource.ParticipantSession(), metadata client.get_participant_session( request, @@ -10054,6 +10139,7 @@ def test_get_participant_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_participant_sessions_rest_bad_request( @@ -10139,11 +10225,15 @@ def test_list_participant_sessions_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_list_participant_sessions", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_participant_sessions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_participant_sessions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListParticipantSessionsRequest.pb( service.ListParticipantSessionsRequest() ) @@ -10169,6 +10259,10 @@ def test_list_participant_sessions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListParticipantSessionsResponse() + post_with_metadata.return_value = ( + service.ListParticipantSessionsResponse(), + metadata, + ) client.list_participant_sessions( request, @@ -10180,6 +10274,7 @@ def test_list_participant_sessions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_recording_rest_bad_request(request_type=service.GetRecordingRequest): @@ -10264,10 +10359,14 @@ def test_get_recording_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_recording" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_recording_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_recording" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetRecordingRequest.pb(service.GetRecordingRequest()) transcode.return_value = { "method": "post", @@ -10289,6 +10388,7 @@ def test_get_recording_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Recording() + post_with_metadata.return_value = resource.Recording(), metadata client.get_recording( request, @@ -10300,6 +10400,7 @@ def test_get_recording_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_recordings_rest_bad_request(request_type=service.ListRecordingsRequest): @@ -10382,10 +10483,14 @@ def test_list_recordings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_list_recordings" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_recordings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_recordings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListRecordingsRequest.pb(service.ListRecordingsRequest()) transcode.return_value = { "method": "post", @@ -10409,6 +10514,7 @@ def test_list_recordings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListRecordingsResponse() + post_with_metadata.return_value = service.ListRecordingsResponse(), metadata client.list_recordings( request, @@ -10420,6 +10526,7 @@ def test_list_recordings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transcript_rest_bad_request(request_type=service.GetTranscriptRequest): @@ -10504,10 +10611,14 @@ def test_get_transcript_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_transcript" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_transcript_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_transcript" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetTranscriptRequest.pb(service.GetTranscriptRequest()) transcode.return_value = { "method": "post", @@ -10529,6 +10640,7 @@ def test_get_transcript_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.Transcript() + post_with_metadata.return_value = resource.Transcript(), metadata client.get_transcript( request, @@ -10540,6 +10652,7 @@ def test_get_transcript_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_transcripts_rest_bad_request(request_type=service.ListTranscriptsRequest): @@ -10622,10 +10735,14 @@ def test_list_transcripts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_list_transcripts" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_transcripts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_transcripts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListTranscriptsRequest.pb(service.ListTranscriptsRequest()) transcode.return_value = { "method": "post", @@ -10649,6 +10766,7 @@ def test_list_transcripts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListTranscriptsResponse() + post_with_metadata.return_value = service.ListTranscriptsResponse(), metadata client.list_transcripts( request, @@ -10660,6 +10778,7 @@ def test_list_transcripts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transcript_entry_rest_bad_request( @@ -10754,10 +10873,14 @@ def test_get_transcript_entry_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "post_get_transcript_entry" ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_get_transcript_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_get_transcript_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetTranscriptEntryRequest.pb( service.GetTranscriptEntryRequest() ) @@ -10781,6 +10904,7 @@ def test_get_transcript_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource.TranscriptEntry() + post_with_metadata.return_value = resource.TranscriptEntry(), metadata client.get_transcript_entry( request, @@ -10792,6 +10916,7 @@ def test_get_transcript_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_transcript_entries_rest_bad_request( @@ -10877,11 +11002,15 @@ def test_list_transcript_entries_rest_interceptors(null_interceptor): transports.ConferenceRecordsServiceRestInterceptor, "post_list_transcript_entries", ) as post, mock.patch.object( + transports.ConferenceRecordsServiceRestInterceptor, + "post_list_transcript_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConferenceRecordsServiceRestInterceptor, "pre_list_transcript_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListTranscriptEntriesRequest.pb( service.ListTranscriptEntriesRequest() ) @@ -10907,6 +11036,10 @@ def test_list_transcript_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListTranscriptEntriesResponse() + post_with_metadata.return_value = ( + service.ListTranscriptEntriesResponse(), + metadata, + ) client.list_transcript_entries( request, @@ -10918,6 +11051,7 @@ def test_list_transcript_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): @@ -11259,7 +11393,10 @@ def test_conference_records_service_base_transport_with_credentials_file(): load_creds.assert_called_once_with( "credentials.json", scopes=None, - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + ), quota_project_id="octopus", ) @@ -11282,7 +11419,10 @@ def test_conference_records_service_auth_adc(): ConferenceRecordsServiceClient() adc.assert_called_once_with( scopes=None, - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + ), quota_project_id=None, ) @@ -11302,7 +11442,10 @@ def test_conference_records_service_transport_auth_adc(transport_class): transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + ), quota_project_id="octopus", ) @@ -11356,7 +11499,10 @@ def test_conference_records_service_transport_create_channel( credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + ), scopes=["1", "2"], default_host="meet.googleapis.com", ssl_credentials=None, diff --git a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py index b4775ce136ba..93101b467265 100644 --- a/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py +++ b/packages/google-apps-meet/tests/unit/gapic/meet_v2beta/test_spaces_service.py @@ -56,10 +56,18 @@ from google.apps.meet_v2beta.services.spaces_service import ( SpacesServiceAsyncClient, SpacesServiceClient, + pagers, transports, ) from google.apps.meet_v2beta.types import resource, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -314,6 +322,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SpacesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SpacesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1045,7 +1096,11 @@ def test_spaces_service_client_create_channel_credentials_file( credentials=file_creds, credentials_file=None, quota_project_id=None, - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", + ), scopes=None, default_host="meet.googleapis.com", ssl_credentials=None, @@ -2308,13 +2363,83 @@ async def test_end_active_conference_flattened_error_async(): ) -def test_create_space_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + service.CreateMemberRequest, + dict, + ], +) +def test_create_member(request_type, transport: str = "grpc"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Member( + name="name_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", + ) + response = client.create_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateMemberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Member) + assert response.name == "name_value" + assert response.email == "email_value" + assert response.role == resource.Member.Role.COHOST + assert response.user == "user_value" + + +def test_create_member_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateMemberRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_member(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateMemberRequest( + parent="parent_value", + ) + + +def test_create_member_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2322,91 +2447,37 @@ def test_create_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_space in client._transport._wrapped_methods + assert client._transport.create_member in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_space] = mock_rpc - + client._transport._wrapped_methods[client._transport.create_member] = mock_rpc request = {} - client.create_space(request) + client.create_member(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_space(request) + client.create_member(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_space_rest_flattened(): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resource.Space() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - space=resource.Space(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_space(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2beta/spaces" % client.transport._host, args[1] - ) - - -def test_create_space_rest_flattened_error(transport: str = "rest"): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_space( - service.CreateSpaceRequest(), - space=resource.Space(name="name_value"), - ) - - -def test_get_space_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_create_member_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) # Should wrap all calls on client creation @@ -2414,173 +2485,302 @@ def test_get_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_space in client._transport._wrapped_methods + assert ( + client._client._transport.create_member + in client._client._transport._wrapped_methods + ) # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.get_space] = mock_rpc + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_member + ] = mock_rpc request = {} - client.get_space(request) + await client.create_member(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_space(request) + await client.create_member(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_space_rest_required_fields(request_type=service.GetSpaceRequest): - transport_class = transports.SpacesServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_create_member_async( + transport: str = "grpc_asyncio", request_type=service.CreateMemberRequest +): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_space._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Member( + name="name_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", + ) + ) + response = await client.create_member(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateMemberRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Member) + assert response.name == "name_value" + assert response.email == "email_value" + assert response.role == resource.Member.Role.COHOST + assert response.user == "user_value" - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_space._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_create_member_async_from_dict(): + await test_create_member_async(request_type=dict) + +def test_create_member_field_headers(): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = resource.Space() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateMemberRequest() - # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + call.return_value = resource.Member() + client.create_member(request) - response = client.get_space(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_get_space_rest_unset_required_fields(): - transport = transports.SpacesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_create_member_field_headers_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.get_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateMemberRequest() + request.parent = "parent_value" -def test_get_space_rest_flattened(): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Member()) + await client.create_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_member_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resource.Space() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Member() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_member( + parent="parent_value", + member=resource.Member(name="name_value"), + ) - # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].member + mock_val = resource.Member(name="name_value") + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + +def test_create_member_flattened_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_member( + service.CreateMemberRequest(), + parent="parent_value", + member=resource.Member(name="name_value"), ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space(**mock_args) +@pytest.mark.asyncio +async def test_create_member_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Member() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Member()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_member( + parent="parent_value", + member=resource.Member(name="name_value"), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2beta/{name=spaces/*}" % client.transport._host, args[1] + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].member + mock_val = resource.Member(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_member_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_member( + service.CreateMemberRequest(), + parent="parent_value", + member=resource.Member(name="name_value"), ) -def test_get_space_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + service.GetMemberRequest, + dict, + ], +) +def test_get_member(request_type, transport: str = "grpc"): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_space( - service.GetSpaceRequest(), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Member( name="name_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", ) + response = client.get_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.GetMemberRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Member) + assert response.name == "name_value" + assert response.email == "email_value" + assert response.role == resource.Member.Role.COHOST + assert response.user == "user_value" -def test_update_space_rest_use_cached_wrapped_rpc(): + +def test_get_member_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.GetMemberRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_member(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetMemberRequest( + name="name_value", + ) + + +def test_get_member_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2588,173 +2788,324 @@ def test_update_space_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_space in client._transport._wrapped_methods + assert client._transport.get_member in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_space] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_member] = mock_rpc request = {} - client.update_space(request) + client.get_member(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_space(request) + client.get_member(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_space_rest_required_fields(request_type=service.UpdateSpaceRequest): - transport_class = transports.SpacesServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) +@pytest.mark.asyncio +async def test_get_member_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_space._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.get_member + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_member + ] = mock_rpc - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_space._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) - jsonified_request.update(unset_fields) + request = {} + await client.get_member(request) - # verify required fields with non-default values are left alone + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.get_member(request) - # Designate an appropriate value for the returned response. - return_value = resource.Space() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_get_member_async( + transport: str = "grpc_asyncio", request_type=service.GetMemberRequest +): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.update_space(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Member( + name="name_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", + ) + ) + response = await client.get_member(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.GetMemberRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Member) + assert response.name == "name_value" + assert response.email == "email_value" + assert response.role == resource.Member.Role.COHOST + assert response.user == "user_value" -def test_update_space_rest_unset_required_fields(): - transport = transports.SpacesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.update_space._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) +@pytest.mark.asyncio +async def test_get_member_async_from_dict(): + await test_get_member_async(request_type=dict) -def test_update_space_rest_flattened(): +def test_get_member_field_headers(): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resource.Space() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetMemberRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"space": {"name": "spaces/sample1"}} + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - space=resource.Space(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + call.return_value = resource.Member() + client.get_member(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.update_space(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_member_field_headers_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetMemberRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Member()) + await client.get_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_member_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Member() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_member( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2beta/{space.name=spaces/*}" % client.transport._host, args[1] - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_update_space_rest_flattened_error(transport: str = "rest"): +def test_get_member_flattened_error(): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_space( - service.UpdateSpaceRequest(), - space=resource.Space(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_member( + service.GetMemberRequest(), + name="name_value", ) -def test_end_active_conference_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_get_member_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resource.Member() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource.Member()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_member( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_member_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_member( + service.GetMemberRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + service.ListMembersRequest, + dict, + ], +) +def test_list_members(request_type, transport: str = "grpc"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListMembersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_members(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.ListMembersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMembersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_members_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.ListMembersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_members(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListMembersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_members_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -2762,501 +3113,3347 @@ def test_end_active_conference_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.end_active_conference - in client._transport._wrapped_methods - ) + assert client._transport.list_members in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.end_active_conference - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.list_members] = mock_rpc request = {} - client.end_active_conference(request) + client.list_members(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.end_active_conference(request) + client.list_members(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_end_active_conference_rest_required_fields( - request_type=service.EndActiveConferenceRequest, +@pytest.mark.asyncio +async def test_list_members_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.SpacesServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_members + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).end_active_conference._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_members + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_members(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).end_active_conference._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.list_members(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_list_members_async( + transport: str = "grpc_asyncio", request_type=service.ListMembersRequest +): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListMembersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_members(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.ListMembersRequest() + assert args[0] == request - response = client.end_active_conference(request) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMembersAsyncPager) + assert response.next_page_token == "next_page_token_value" - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_members_async_from_dict(): + await test_list_members_async(request_type=dict) -def test_end_active_conference_rest_unset_required_fields(): - transport = transports.SpacesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_list_members_field_headers(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.end_active_conference._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListMembersRequest() + request.parent = "parent_value" -def test_end_active_conference_rest_flattened(): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + call.return_value = service.ListMembersResponse() + client.list_members(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_members_field_headers_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListMembersRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"name": "spaces/sample1"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListMembersResponse() ) - mock_args.update(sample_request) + await client.list_members(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.end_active_conference(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_members_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListMembersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_members( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v2beta/{name=spaces/*}:endActiveConference" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_end_active_conference_rest_flattened_error(transport: str = "rest"): +def test_list_members_flattened_error(): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.end_active_conference( - service.EndActiveConferenceRequest(), - name="name_value", + client.list_members( + service.ListMembersRequest(), + parent="parent_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SpacesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.asyncio +async def test_list_members_flattened_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - with pytest.raises(ValueError): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListMembersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListMembersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_members( + parent="parent_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.SpacesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_members_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. with pytest.raises(ValueError): - client = SpacesServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + await client.list_members( + service.ListMembersRequest(), + parent="parent_value", ) - # It is an error to provide an api_key and a transport instance. - transport = transports.SpacesServiceGrpcTransport( + +def test_list_members_pager(transport_name: str = "grpc"): + client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SpacesServiceClient( - client_options=options, - transport=transport, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + resource.Member(), + ], + next_page_token="abc", + ), + service.ListMembersResponse( + members=[], + next_page_token="def", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + ], + next_page_token="ghi", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + ], + ), + RuntimeError, ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SpacesServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) + pager = client.list_members(request={}, retry=retry, timeout=timeout) - # It is an error to provide scopes and a transport instance. - transport = transports.SpacesServiceGrpcTransport( + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Member) for i in results) + + +def test_list_members_pages(transport_name: str = "grpc"): + client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - with pytest.raises(ValueError): - client = SpacesServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + resource.Member(), + ], + next_page_token="abc", + ), + service.ListMembersResponse( + members=[], + next_page_token="def", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + ], + next_page_token="ghi", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + ], + ), + RuntimeError, ) + pages = list(client.list_members(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpacesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.asyncio +async def test_list_members_async_pager(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - client = SpacesServiceClient(transport=transport) - assert client.transport is transport + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_members), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + resource.Member(), + ], + next_page_token="abc", + ), + service.ListMembersResponse( + members=[], + next_page_token="def", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + ], + next_page_token="ghi", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_members( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SpacesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + assert len(responses) == 6 + assert all(isinstance(i, resource.Member) for i in responses) - transport = transports.SpacesServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + +@pytest.mark.asyncio +async def test_list_members_async_pages(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - channel = transport.grpc_channel - assert channel + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_members), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + resource.Member(), + ], + next_page_token="abc", + ), + service.ListMembersResponse( + members=[], + next_page_token="def", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + ], + next_page_token="ghi", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_members(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.SpacesServiceGrpcTransport, - transports.SpacesServiceGrpcAsyncIOTransport, - transports.SpacesServiceRestTransport, + service.DeleteMemberRequest, + dict, ], ) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -def test_transport_kind_grpc(): - transport = SpacesServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_space_empty_call_grpc(): +def test_delete_member(request_type, transport: str = "grpc"): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_space), "__call__") as call: - call.return_value = resource.Space() - client.create_space(request=None) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Establish that the underlying stub method was called. - call.assert_called() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = service.CreateSpaceRequest() + request = service.DeleteMemberRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert response is None -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_space_empty_call_grpc(): +def test_delete_member_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_space), "__call__") as call: - call.return_value = resource.Space() - client.get_space(request=None) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.DeleteMemberRequest( + name="name_value", + ) - # Establish that the underlying stub method was called. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_member(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = service.GetSpaceRequest() + assert args[0] == service.DeleteMemberRequest( + name="name_value", + ) - assert args[0] == request_msg +def test_delete_member_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_space_empty_call_grpc(): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_space), "__call__") as call: - call.return_value = resource.Space() - client.update_space(request=None) + # Ensure method has been cached + assert client._transport.delete_member in client._transport._wrapped_methods - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateSpaceRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_member] = mock_rpc + request = {} + client.delete_member(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.delete_member(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_end_active_conference_empty_call_grpc(): - client = SpacesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.end_active_conference), "__call__" - ) as call: - call.return_value = None - client.end_active_conference(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.EndActiveConferenceRequest() +@pytest.mark.asyncio +async def test_delete_member_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - assert args[0] == request_msg + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + # Ensure method has been cached + assert ( + client._client._transport.delete_member + in client._client._transport._wrapped_methods + ) -def test_transport_kind_grpc_asyncio(): - transport = SpacesServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_member + ] = mock_rpc + request = {} + await client.delete_member(request) -def test_initialize_client_w_grpc_asyncio(): - client = SpacesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_member(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_space_empty_call_grpc_asyncio(): +async def test_delete_member_async( + transport: str = "grpc_asyncio", request_type=service.DeleteMemberRequest +): client = SpacesServiceAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource.Space( - name="name_value", - meeting_uri="meeting_uri_value", - meeting_code="meeting_code_value", - ) - ) - await client.create_space(request=None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_member(request) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = service.CreateSpaceRequest() + request = service.DeleteMemberRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert response is None -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_space_empty_call_grpc_asyncio(): - client = SpacesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +async def test_delete_member_async_from_dict(): + await test_delete_member_async(request_type=dict) + + +def test_delete_member_field_headers(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource.Space( - name="name_value", - meeting_uri="meeting_uri_value", - meeting_code="meeting_code_value", - ) - ) - await client.get_space(request=None) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteMemberRequest() - # Establish that the underlying stub method was called. - call.assert_called() + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + call.return_value = None + client.delete_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request_msg = service.GetSpaceRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_space_empty_call_grpc_asyncio(): +async def test_delete_member_field_headers_async(): client = SpacesServiceAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_space), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resource.Space( - name="name_value", - meeting_uri="meeting_uri_value", - meeting_code="meeting_code_value", - ) - ) - await client.update_space(request=None) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteMemberRequest() - # Establish that the underlying stub method was called. - call.assert_called() + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_member(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = service.UpdateSpaceRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_member_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_member( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_member_flattened_error(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_member( + service.DeleteMemberRequest(), + name="name_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_end_active_conference_empty_call_grpc_asyncio(): +async def test_delete_member_flattened_async(): client = SpacesServiceAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.end_active_conference), "__call__" - ) as call: + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: # Designate an appropriate return value for the call. + call.return_value = None + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.end_active_conference(request=None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_member( + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = service.EndActiveConferenceRequest() + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - assert args[0] == request_msg +@pytest.mark.asyncio +async def test_delete_member_flattened_error_async(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_transport_kind_rest(): - transport = SpacesServiceClient.get_transport_class("rest")( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_member( + service.DeleteMemberRequest(), + name="name_value", + ) + + +def test_create_space_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_space in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_space] = mock_rpc + + request = {} + client.create_space(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_space(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_space_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + space=resource.Space(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_space(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/spaces" % client.transport._host, args[1] + ) + + +def test_create_space_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_space( + service.CreateSpaceRequest(), + space=resource.Space(name="name_value"), + ) + + +def test_get_space_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_space in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_space] = mock_rpc + + request = {} + client.get_space(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_space(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_space_rest_required_fields(request_type=service.GetSpaceRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() + ).get_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == "rest" + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Space() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_space(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_space_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_space_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_space(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=spaces/*}" % client.transport._host, args[1] + ) + + +def test_get_space_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_space( + service.GetSpaceRequest(), + name="name_value", + ) + + +def test_update_space_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_space in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_space] = mock_rpc + + request = {} + client.update_space(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_space(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_space_rest_required_fields(request_type=service.UpdateSpaceRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_space._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Space() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_space(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_space_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_space._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("space",))) + + +def test_update_space_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space() + + # get arguments that satisfy an http rule for this method + sample_request = {"space": {"name": "spaces/sample1"}} + + # get truthy value for each flattened field + mock_args = dict( + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_space(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{space.name=spaces/*}" % client.transport._host, args[1] + ) + + +def test_update_space_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_space( + service.UpdateSpaceRequest(), + space=resource.Space(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_end_active_conference_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.end_active_conference + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.end_active_conference + ] = mock_rpc + + request = {} + client.end_active_conference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.end_active_conference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_end_active_conference_rest_required_fields( + request_type=service.EndActiveConferenceRequest, +): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).end_active_conference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).end_active_conference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.end_active_conference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_end_active_conference_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.end_active_conference._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_end_active_conference_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.end_active_conference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=spaces/*}:endActiveConference" % client.transport._host, + args[1], + ) + + +def test_end_active_conference_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.end_active_conference( + service.EndActiveConferenceRequest(), + name="name_value", + ) + + +def test_create_member_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_member in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_member] = mock_rpc + + request = {} + client.create_member(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_member(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_member_rest_required_fields(request_type=service.CreateMemberRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_member._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_member._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Member() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Member.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_member(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_member_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_member._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "member", + ) + ) + ) + + +def test_create_member_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Member() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + member=resource.Member(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Member.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_member(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=spaces/*}/members" % client.transport._host, args[1] + ) + + +def test_create_member_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_member( + service.CreateMemberRequest(), + parent="parent_value", + member=resource.Member(name="name_value"), + ) + + +def test_get_member_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_member in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_member] = mock_rpc + + request = {} + client.get_member(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_member(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_member_rest_required_fields(request_type=service.GetMemberRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_member._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_member._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resource.Member() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Member.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_member(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_member_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_member._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_member_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Member() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1/members/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resource.Member.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_member(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=spaces/*/members/*}" % client.transport._host, args[1] + ) + + +def test_get_member_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_member( + service.GetMemberRequest(), + name="name_value", + ) + + +def test_list_members_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_members in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_members] = mock_rpc + + request = {} + client.list_members(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_members(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_members_rest_required_fields(request_type=service.ListMembersRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_members._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_members._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListMembersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListMembersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_members(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_members_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_members._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_members_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListMembersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "spaces/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListMembersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_members(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{parent=spaces/*}/members" % client.transport._host, args[1] + ) + + +def test_list_members_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_members( + service.ListMembersRequest(), + parent="parent_value", + ) + + +def test_list_members_rest_pager(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + resource.Member(), + ], + next_page_token="abc", + ), + service.ListMembersResponse( + members=[], + next_page_token="def", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + ], + next_page_token="ghi", + ), + service.ListMembersResponse( + members=[ + resource.Member(), + resource.Member(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListMembersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "spaces/sample1"} + + pager = client.list_members(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resource.Member) for i in results) + + pages = list(client.list_members(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_member_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_member in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_member] = mock_rpc + + request = {} + client.delete_member(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_member(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_member_rest_required_fields(request_type=service.DeleteMemberRequest): + transport_class = transports.SpacesServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_member._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_member._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_member(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_member_rest_unset_required_fields(): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_member._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_member_rest_flattened(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "spaces/sample1/members/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_member(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta/{name=spaces/*/members/*}" % client.transport._host, args[1] + ) + + +def test_delete_member_rest_flattened_error(transport: str = "rest"): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_member( + service.DeleteMemberRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SpacesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SpacesServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SpacesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SpacesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SpacesServiceGrpcTransport, + transports.SpacesServiceGrpcAsyncIOTransport, + transports.SpacesServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SpacesServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_space_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + call.return_value = resource.Space() + client.create_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_space_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + call.return_value = resource.Space() + client.get_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_space_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + call.return_value = resource.Space() + client.update_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_end_active_conference_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + call.return_value = None + client.end_active_conference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.EndActiveConferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_member_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + call.return_value = resource.Member() + client.create_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateMemberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_member_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + call.return_value = resource.Member() + client.get_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetMemberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_members_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + call.return_value = service.ListMembersResponse() + client.list_members(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListMembersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_member_empty_call_grpc(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + call.return_value = None + client.delete_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteMemberRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SpacesServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_space_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + ) + await client.create_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_space_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + ) + await client.get_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_space_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_space), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + ) + await client.update_space(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.UpdateSpaceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_end_active_conference_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.end_active_conference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.end_active_conference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.EndActiveConferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_member_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Member( + name="name_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", + ) + ) + await client.create_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateMemberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_member_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resource.Member( + name="name_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", + ) + ) + await client.get_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetMemberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_members_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListMembersResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_members(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListMembersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_member_empty_call_grpc_asyncio(): + client = SpacesServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteMemberRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SpacesServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_space_rest_bad_request(request_type=service.CreateSpaceRequest): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_space(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.CreateSpaceRequest, + dict, + ], +) +def test_create_space_rest_call_success(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["space"] = { + "name": "name_value", + "meeting_uri": "meeting_uri_value", + "meeting_code": "meeting_code_value", + "config": { + "access_type": 1, + "entry_point_access": 1, + "moderation": 1, + "moderation_restrictions": { + "chat_restriction": 1, + "reaction_restriction": 1, + "present_restriction": 1, + "default_join_as_viewer_type": 1, + }, + "attendance_report_generation_type": 1, + "artifact_config": { + "recording_config": {"auto_recording_generation": 1}, + "transcription_config": {"auto_transcription_generation": 1}, + "smart_notes_config": {"auto_smart_notes_generation": 1}, + }, + }, + "active_conference": {"conference_record": "conference_record_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateSpaceRequest.meta.fields["space"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["space"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["space"][field])): + del request_init["space"][field][i][subfield] + else: + del request_init["space"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_space(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_space_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_create_space" + ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_create_space_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_create_space" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.CreateSpaceRequest.pb(service.CreateSpaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resource.Space.to_json(resource.Space()) + req.return_value.content = return_value + + request = service.CreateSpaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Space() + post_with_metadata.return_value = resource.Space(), metadata + + client.create_space( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_space_rest_bad_request(request_type=service.GetSpaceRequest): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_space(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.GetSpaceRequest, + dict, + ], +) +def test_get_space_rest_call_success(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_space(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_space_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_get_space" + ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_get_space_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_get_space" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.GetSpaceRequest.pb(service.GetSpaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resource.Space.to_json(resource.Space()) + req.return_value.content = return_value + + request = service.GetSpaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Space() + post_with_metadata.return_value = resource.Space(), metadata + + client.get_space( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_space_rest_bad_request(request_type=service.UpdateSpaceRequest): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"space": {"name": "spaces/sample1"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_space(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.UpdateSpaceRequest, + dict, + ], +) +def test_update_space_rest_call_success(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"space": {"name": "spaces/sample1"}} + request_init["space"] = { + "name": "spaces/sample1", + "meeting_uri": "meeting_uri_value", + "meeting_code": "meeting_code_value", + "config": { + "access_type": 1, + "entry_point_access": 1, + "moderation": 1, + "moderation_restrictions": { + "chat_restriction": 1, + "reaction_restriction": 1, + "present_restriction": 1, + "default_join_as_viewer_type": 1, + }, + "attendance_report_generation_type": 1, + "artifact_config": { + "recording_config": {"auto_recording_generation": 1}, + "transcription_config": {"auto_transcription_generation": 1}, + "smart_notes_config": {"auto_smart_notes_generation": 1}, + }, + }, + "active_conference": {"conference_record": "conference_record_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.UpdateSpaceRequest.meta.fields["space"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["space"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["space"][field])): + del request_init["space"][field][i][subfield] + else: + del request_init["space"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resource.Space( + name="name_value", + meeting_uri="meeting_uri_value", + meeting_code="meeting_code_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resource.Space.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_space(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resource.Space) + assert response.name == "name_value" + assert response.meeting_uri == "meeting_uri_value" + assert response.meeting_code == "meeting_code_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_space_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_update_space" + ) as post, mock.patch.object( + transports.SpacesServiceRestInterceptor, "post_update_space_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_update_space" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateSpaceRequest.pb(service.UpdateSpaceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resource.Space.to_json(resource.Space()) + req.return_value.content = return_value + + request = service.UpdateSpaceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resource.Space() + post_with_metadata.return_value = resource.Space(), metadata + + client.update_space( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_end_active_conference_rest_bad_request( + request_type=service.EndActiveConferenceRequest, +): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.end_active_conference(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.EndActiveConferenceRequest, + dict, + ], +) +def test_end_active_conference_rest_call_success(request_type): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "spaces/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.end_active_conference(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_end_active_conference_rest_interceptors(null_interceptor): + transport = transports.SpacesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SpacesServiceRestInterceptor(), + ) + client = SpacesServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_end_active_conference" + ) as pre: + pre.assert_not_called() + pb_message = service.EndActiveConferenceRequest.pb( + service.EndActiveConferenceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = service.EndActiveConferenceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.end_active_conference( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() -def test_create_space_rest_bad_request(request_type=service.CreateSpaceRequest): +def test_create_member_rest_bad_request(request_type=service.CreateMemberRequest): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} + request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3271,36 +6468,35 @@ def test_create_space_rest_bad_request(request_type=service.CreateSpaceRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_space(request) + client.create_member(request) @pytest.mark.parametrize( "request_type", [ - service.CreateSpaceRequest, + service.CreateMemberRequest, dict, ], ) -def test_create_space_rest_call_success(request_type): +def test_create_member_rest_call_success(request_type): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {} - request_init["space"] = { + request_init = {"parent": "spaces/sample1"} + request_init["member"] = { "name": "name_value", - "meeting_uri": "meeting_uri_value", - "meeting_code": "meeting_code_value", - "config": {"access_type": 1, "entry_point_access": 1}, - "active_conference": {"conference_record": "conference_record_value"}, + "email": "email_value", + "role": 1, + "user": "user_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = service.CreateSpaceRequest.meta.fields["space"] + test_field = service.CreateMemberRequest.meta.fields["member"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -3328,7 +6524,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["space"].items(): # pragma: NO COVER + for field, value in request_init["member"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -3358,19 +6554,20 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["space"][field])): - del request_init["space"][field][i][subfield] + for i in range(0, len(request_init["member"][field])): + del request_init["member"][field][i][subfield] else: - del request_init["space"][field][subfield] + del request_init["member"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resource.Space( + return_value = resource.Member( name="name_value", - meeting_uri="meeting_uri_value", - meeting_code="meeting_code_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", ) # Wrap the value into a proper Response obj @@ -3378,22 +6575,23 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) + return_value = resource.Member.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_space(request) + response = client.create_member(request) # Establish that the response is the type that we expect. - assert isinstance(response, resource.Space) + assert isinstance(response, resource.Member) assert response.name == "name_value" - assert response.meeting_uri == "meeting_uri_value" - assert response.meeting_code == "meeting_code_value" + assert response.email == "email_value" + assert response.role == resource.Member.Role.COHOST + assert response.user == "user_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_space_rest_interceptors(null_interceptor): +def test_create_member_rest_interceptors(null_interceptor): transport = transports.SpacesServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3407,13 +6605,16 @@ def test_create_space_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.SpacesServiceRestInterceptor, "post_create_space" + transports.SpacesServiceRestInterceptor, "post_create_member" ) as post, mock.patch.object( - transports.SpacesServiceRestInterceptor, "pre_create_space" + transports.SpacesServiceRestInterceptor, "post_create_member_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_create_member" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.CreateSpaceRequest.pb(service.CreateSpaceRequest()) + post_with_metadata.assert_not_called() + pb_message = service.CreateMemberRequest.pb(service.CreateMemberRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3424,18 +6625,19 @@ def test_create_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resource.Space.to_json(resource.Space()) + return_value = resource.Member.to_json(resource.Member()) req.return_value.content = return_value - request = service.CreateSpaceRequest() + request = service.CreateMemberRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resource.Space() + post.return_value = resource.Member() + post_with_metadata.return_value = resource.Member(), metadata - client.create_space( + client.create_member( request, metadata=[ ("key", "val"), @@ -3445,14 +6647,15 @@ def test_create_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_space_rest_bad_request(request_type=service.GetSpaceRequest): +def test_get_member_rest_bad_request(request_type=service.GetMemberRequest): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"name": "spaces/sample1/members/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3467,32 +6670,33 @@ def test_get_space_rest_bad_request(request_type=service.GetSpaceRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_space(request) + client.get_member(request) @pytest.mark.parametrize( "request_type", [ - service.GetSpaceRequest, + service.GetMemberRequest, dict, ], ) -def test_get_space_rest_call_success(request_type): +def test_get_member_rest_call_success(request_type): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"name": "spaces/sample1/members/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resource.Space( + return_value = resource.Member( name="name_value", - meeting_uri="meeting_uri_value", - meeting_code="meeting_code_value", + email="email_value", + role=resource.Member.Role.COHOST, + user="user_value", ) # Wrap the value into a proper Response obj @@ -3500,22 +6704,23 @@ def test_get_space_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) + return_value = resource.Member.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_space(request) + response = client.get_member(request) # Establish that the response is the type that we expect. - assert isinstance(response, resource.Space) + assert isinstance(response, resource.Member) assert response.name == "name_value" - assert response.meeting_uri == "meeting_uri_value" - assert response.meeting_code == "meeting_code_value" + assert response.email == "email_value" + assert response.role == resource.Member.Role.COHOST + assert response.user == "user_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_space_rest_interceptors(null_interceptor): +def test_get_member_rest_interceptors(null_interceptor): transport = transports.SpacesServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3529,13 +6734,16 @@ def test_get_space_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.SpacesServiceRestInterceptor, "post_get_space" + transports.SpacesServiceRestInterceptor, "post_get_member" ) as post, mock.patch.object( - transports.SpacesServiceRestInterceptor, "pre_get_space" + transports.SpacesServiceRestInterceptor, "post_get_member_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_get_member" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.GetSpaceRequest.pb(service.GetSpaceRequest()) + post_with_metadata.assert_not_called() + pb_message = service.GetMemberRequest.pb(service.GetMemberRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3546,18 +6754,19 @@ def test_get_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resource.Space.to_json(resource.Space()) + return_value = resource.Member.to_json(resource.Member()) req.return_value.content = return_value - request = service.GetSpaceRequest() + request = service.GetMemberRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resource.Space() + post.return_value = resource.Member() + post_with_metadata.return_value = resource.Member(), metadata - client.get_space( + client.get_member( request, metadata=[ ("key", "val"), @@ -3567,14 +6776,15 @@ def test_get_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_update_space_rest_bad_request(request_type=service.UpdateSpaceRequest): +def test_list_members_rest_bad_request(request_type=service.ListMembersRequest): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"space": {"name": "spaces/sample1"}} + request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3589,106 +6799,30 @@ def test_update_space_rest_bad_request(request_type=service.UpdateSpaceRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_space(request) + client.list_members(request) @pytest.mark.parametrize( "request_type", [ - service.UpdateSpaceRequest, + service.ListMembersRequest, dict, ], ) -def test_update_space_rest_call_success(request_type): +def test_list_members_rest_call_success(request_type): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"space": {"name": "spaces/sample1"}} - request_init["space"] = { - "name": "spaces/sample1", - "meeting_uri": "meeting_uri_value", - "meeting_code": "meeting_code_value", - "config": {"access_type": 1, "entry_point_access": 1}, - "active_conference": {"conference_record": "conference_record_value"}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateSpaceRequest.meta.fields["space"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["space"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["space"][field])): - del request_init["space"][field][i][subfield] - else: - del request_init["space"][field][subfield] + request_init = {"parent": "spaces/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resource.Space( - name="name_value", - meeting_uri="meeting_uri_value", - meeting_code="meeting_code_value", + return_value = service.ListMembersResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -3696,22 +6830,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = resource.Space.pb(return_value) + return_value = service.ListMembersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_space(request) + response = client.list_members(request) # Establish that the response is the type that we expect. - assert isinstance(response, resource.Space) - assert response.name == "name_value" - assert response.meeting_uri == "meeting_uri_value" - assert response.meeting_code == "meeting_code_value" + assert isinstance(response, pagers.ListMembersPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_space_rest_interceptors(null_interceptor): +def test_list_members_rest_interceptors(null_interceptor): transport = transports.SpacesServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3725,13 +6857,16 @@ def test_update_space_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.SpacesServiceRestInterceptor, "post_update_space" + transports.SpacesServiceRestInterceptor, "post_list_members" ) as post, mock.patch.object( - transports.SpacesServiceRestInterceptor, "pre_update_space" + transports.SpacesServiceRestInterceptor, "post_list_members_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SpacesServiceRestInterceptor, "pre_list_members" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = service.UpdateSpaceRequest.pb(service.UpdateSpaceRequest()) + post_with_metadata.assert_not_called() + pb_message = service.ListMembersRequest.pb(service.ListMembersRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3742,18 +6877,21 @@ def test_update_space_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resource.Space.to_json(resource.Space()) + return_value = service.ListMembersResponse.to_json( + service.ListMembersResponse() + ) req.return_value.content = return_value - request = service.UpdateSpaceRequest() + request = service.ListMembersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resource.Space() + post.return_value = service.ListMembersResponse() + post_with_metadata.return_value = service.ListMembersResponse(), metadata - client.update_space( + client.list_members( request, metadata=[ ("key", "val"), @@ -3763,16 +6901,15 @@ def test_update_space_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_end_active_conference_rest_bad_request( - request_type=service.EndActiveConferenceRequest, -): +def test_delete_member_rest_bad_request(request_type=service.DeleteMemberRequest): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"name": "spaces/sample1/members/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3787,23 +6924,23 @@ def test_end_active_conference_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.end_active_conference(request) + client.delete_member(request) @pytest.mark.parametrize( "request_type", [ - service.EndActiveConferenceRequest, + service.DeleteMemberRequest, dict, ], ) -def test_end_active_conference_rest_call_success(request_type): +def test_delete_member_rest_call_success(request_type): client = SpacesServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "spaces/sample1"} + request_init = {"name": "spaces/sample1/members/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3818,14 +6955,14 @@ def test_end_active_conference_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.end_active_conference(request) + response = client.delete_member(request) # Establish that the response is the type that we expect. assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_end_active_conference_rest_interceptors(null_interceptor): +def test_delete_member_rest_interceptors(null_interceptor): transport = transports.SpacesServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3839,12 +6976,10 @@ def test_end_active_conference_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.SpacesServiceRestInterceptor, "pre_end_active_conference" + transports.SpacesServiceRestInterceptor, "pre_delete_member" ) as pre: pre.assert_not_called() - pb_message = service.EndActiveConferenceRequest.pb( - service.EndActiveConferenceRequest() - ) + pb_message = service.DeleteMemberRequest.pb(service.DeleteMemberRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3856,14 +6991,14 @@ def test_end_active_conference_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - request = service.EndActiveConferenceRequest() + request = service.DeleteMemberRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.end_active_conference( + client.delete_member( request, metadata=[ ("key", "val"), @@ -3963,6 +7098,86 @@ def test_end_active_conference_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_member_empty_call_rest(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_member), "__call__") as call: + client.create_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateMemberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_member_empty_call_rest(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_member), "__call__") as call: + client.get_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.GetMemberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_members_empty_call_rest(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_members), "__call__") as call: + client.list_members(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.ListMembersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_member_empty_call_rest(): + client = SpacesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_member), "__call__") as call: + client.delete_member(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.DeleteMemberRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = SpacesServiceClient( @@ -4000,6 +7215,10 @@ def test_spaces_service_base_transport(): "get_space", "update_space", "end_active_conference", + "create_member", + "get_member", + "list_members", + "delete_member", ) for method in methods: with pytest.raises(NotImplementedError): @@ -4033,7 +7252,11 @@ def test_spaces_service_base_transport_with_credentials_file(): load_creds.assert_called_once_with( "credentials.json", scopes=None, - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", + ), quota_project_id="octopus", ) @@ -4056,7 +7279,11 @@ def test_spaces_service_auth_adc(): SpacesServiceClient() adc.assert_called_once_with( scopes=None, - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", + ), quota_project_id=None, ) @@ -4076,7 +7303,11 @@ def test_spaces_service_transport_auth_adc(transport_class): transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", + ), quota_project_id="octopus", ) @@ -4128,7 +7359,11 @@ def test_spaces_service_transport_create_channel(transport_class, grpc_helpers): credentials=creds, credentials_file=None, quota_project_id="octopus", - default_scopes=(), + default_scopes=( + "https://www.googleapis.com/auth/meetings.space.created", + "https://www.googleapis.com/auth/meetings.space.readonly", + "https://www.googleapis.com/auth/meetings.space.settings", + ), scopes=["1", "2"], default_host="meet.googleapis.com", ssl_credentials=None, @@ -4268,6 +7503,18 @@ def test_spaces_service_client_transport_session_collision(transport_name): session1 = client1.transport.end_active_conference._session session2 = client2.transport.end_active_conference._session assert session1 != session2 + session1 = client1.transport.create_member._session + session2 = client2.transport.create_member._session + assert session1 != session2 + session1 = client1.transport.get_member._session + session2 = client2.transport.get_member._session + assert session1 != session2 + session1 = client1.transport.list_members._session + session2 = client2.transport.list_members._session + assert session1 != session2 + session1 = client1.transport.delete_member._session + session2 = client2.transport.delete_member._session + assert session1 != session2 def test_spaces_service_grpc_transport_channel(): @@ -4414,8 +7661,31 @@ def test_parse_conference_record_path(): assert expected == actual -def test_space_path(): +def test_member_path(): space = "whelk" + member = "octopus" + expected = "spaces/{space}/members/{member}".format( + space=space, + member=member, + ) + actual = SpacesServiceClient.member_path(space, member) + assert expected == actual + + +def test_parse_member_path(): + expected = { + "space": "oyster", + "member": "nudibranch", + } + path = SpacesServiceClient.member_path(**expected) + + # Check that the path construction is reversible. + actual = SpacesServiceClient.parse_member_path(path) + assert expected == actual + + +def test_space_path(): + space = "cuttlefish" expected = "spaces/{space}".format( space=space, ) @@ -4425,7 +7695,7 @@ def test_space_path(): def test_parse_space_path(): expected = { - "space": "octopus", + "space": "mussel", } path = SpacesServiceClient.space_path(**expected) @@ -4435,7 +7705,7 @@ def test_parse_space_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4445,7 +7715,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "nautilus", } path = SpacesServiceClient.common_billing_account_path(**expected) @@ -4455,7 +7725,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -4465,7 +7735,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "abalone", } path = SpacesServiceClient.common_folder_path(**expected) @@ -4475,7 +7745,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -4485,7 +7755,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "clam", } path = SpacesServiceClient.common_organization_path(**expected) @@ -4495,7 +7765,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -4505,7 +7775,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "octopus", } path = SpacesServiceClient.common_project_path(**expected) @@ -4515,8 +7785,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -4527,8 +7797,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "cuttlefish", + "location": "mussel", } path = SpacesServiceClient.common_location_path(**expected) diff --git a/packages/google-apps-script-type/CHANGELOG.md b/packages/google-apps-script-type/CHANGELOG.md index 17d4adb4933b..aa7a06a63a52 100644 --- a/packages/google-apps-script-type/CHANGELOG.md +++ b/packages/google-apps-script-type/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-apps-script-type-v0.3.12...google-apps-script-type-v0.3.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + +## [0.3.12](https://github.com/googleapis/google-cloud-python/compare/google-apps-script-type-v0.3.11...google-apps-script-type-v0.3.12) (2025-01-27) + + +### Documentation + +* [google-apps-script-type] Minor documentation edits ([#13464](https://github.com/googleapis/google-cloud-python/issues/13464)) ([c45e8e9](https://github.com/googleapis/google-cloud-python/commit/c45e8e9bb4efbeeb2e1ffb1b4e9847364c33d76a)) + ## [0.3.11](https://github.com/googleapis/google-cloud-python/compare/google-apps-script-type-v0.3.10...google-apps-script-type-v0.3.11) (2024-10-24) diff --git a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/calendar/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/docs/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/drive/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/gmail/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/sheets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py index 075108786e34..fb3463bbb3c2 100644 --- a/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py +++ b/packages/google-apps-script-type/google/apps/script/type/slides/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.13" # {x-release-please-version} diff --git a/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py b/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py index 26012eb12fad..18f81ba5d09c 100644 --- a/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py +++ b/packages/google-apps-script-type/google/apps/script/type/types/script_manifest.py @@ -41,7 +41,7 @@ class HttpAuthorizationHeader(proto.Enum): Default value, equivalent to ``SYSTEM_ID_TOKEN`` SYSTEM_ID_TOKEN (1): Send an ID token for the project-specific - Google Workspace Add-ons system service account + Google Workspace add-ons system service account (default) USER_ID_TOKEN (2): Send an ID token for the end user diff --git a/packages/google-area120-tables/CHANGELOG.md b/packages/google-area120-tables/CHANGELOG.md index e102aacbcade..8e0f5965ff04 100644 --- a/packages/google-area120-tables/CHANGELOG.md +++ b/packages/google-area120-tables/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.11.15](https://github.com/googleapis/google-cloud-python/compare/google-area120-tables-v0.11.14...google-area120-tables-v0.11.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.11.14](https://github.com/googleapis/google-cloud-python/compare/google-area120-tables-v0.11.13...google-area120-tables-v0.11.14) (2024-12-12) diff --git a/packages/google-area120-tables/google/area120/tables/gapic_version.py b/packages/google-area120-tables/google/area120/tables/gapic_version.py index 9d5fd39a4c03..53cb05e18cb5 100644 --- a/packages/google-area120-tables/google/area120/tables/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.14" # {x-release-please-version} +__version__ = "0.11.15" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py index 9d5fd39a4c03..53cb05e18cb5 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.14" # {x-release-please-version} +__version__ = "0.11.15" # {x-release-please-version} diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py index 7bd8ef7f2863..7ffe8fdd3ff9 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -523,6 +525,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py index deb037e7c0af..5037e83a8806 100644 --- a/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py +++ b/packages/google-area120-tables/google/area120/tables_v1alpha1/services/tables_service/transports/rest.py @@ -180,12 +180,35 @@ def post_batch_create_rows( ) -> tables.BatchCreateRowsResponse: """Post-rpc interceptor for batch_create_rows - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_rows` interceptor runs + before the `post_batch_create_rows_with_metadata` interceptor. """ return response + def post_batch_create_rows_with_metadata( + self, + response: tables.BatchCreateRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tables.BatchCreateRowsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_batch_create_rows_with_metadata` + interceptor in new development instead of the `post_batch_create_rows` interceptor. + When both interceptors are used, this `post_batch_create_rows_with_metadata` interceptor runs after the + `post_batch_create_rows` interceptor. The (possibly modified) response returned by + `post_batch_create_rows` will be passed to + `post_batch_create_rows_with_metadata`. + """ + return response, metadata + def pre_batch_delete_rows( self, request: tables.BatchDeleteRowsRequest, @@ -215,12 +238,35 @@ def post_batch_update_rows( ) -> tables.BatchUpdateRowsResponse: """Post-rpc interceptor for batch_update_rows - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_rows` interceptor runs + before the `post_batch_update_rows_with_metadata` interceptor. """ return response + def post_batch_update_rows_with_metadata( + self, + response: tables.BatchUpdateRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tables.BatchUpdateRowsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_batch_update_rows_with_metadata` + interceptor in new development instead of the `post_batch_update_rows` interceptor. + When both interceptors are used, this `post_batch_update_rows_with_metadata` interceptor runs after the + `post_batch_update_rows` interceptor. The (possibly modified) response returned by + `post_batch_update_rows` will be passed to + `post_batch_update_rows_with_metadata`. + """ + return response, metadata + def pre_create_row( self, request: tables.CreateRowRequest, @@ -236,12 +282,33 @@ def pre_create_row( def post_create_row(self, response: tables.Row) -> tables.Row: """Post-rpc interceptor for create_row - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_row_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_create_row` interceptor runs + before the `post_create_row_with_metadata` interceptor. """ return response + def post_create_row_with_metadata( + self, response: tables.Row, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tables.Row, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_row + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_create_row_with_metadata` + interceptor in new development instead of the `post_create_row` interceptor. + When both interceptors are used, this `post_create_row_with_metadata` interceptor runs after the + `post_create_row` interceptor. The (possibly modified) response returned by + `post_create_row` will be passed to + `post_create_row_with_metadata`. + """ + return response, metadata + def pre_delete_row( self, request: tables.DeleteRowRequest, @@ -269,12 +336,33 @@ def pre_get_row( def post_get_row(self, response: tables.Row) -> tables.Row: """Post-rpc interceptor for get_row - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_row_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_get_row` interceptor runs + before the `post_get_row_with_metadata` interceptor. """ return response + def post_get_row_with_metadata( + self, response: tables.Row, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tables.Row, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_row + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_get_row_with_metadata` + interceptor in new development instead of the `post_get_row` interceptor. + When both interceptors are used, this `post_get_row_with_metadata` interceptor runs after the + `post_get_row` interceptor. The (possibly modified) response returned by + `post_get_row` will be passed to + `post_get_row_with_metadata`. + """ + return response, metadata + def pre_get_table( self, request: tables.GetTableRequest, @@ -290,12 +378,33 @@ def pre_get_table( def post_get_table(self, response: tables.Table) -> tables.Table: """Post-rpc interceptor for get_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_get_table` interceptor runs + before the `post_get_table_with_metadata` interceptor. """ return response + def post_get_table_with_metadata( + self, response: tables.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tables.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_get_table_with_metadata` + interceptor in new development instead of the `post_get_table` interceptor. + When both interceptors are used, this `post_get_table_with_metadata` interceptor runs after the + `post_get_table` interceptor. The (possibly modified) response returned by + `post_get_table` will be passed to + `post_get_table_with_metadata`. + """ + return response, metadata + def pre_get_workspace( self, request: tables.GetWorkspaceRequest, @@ -311,12 +420,35 @@ def pre_get_workspace( def post_get_workspace(self, response: tables.Workspace) -> tables.Workspace: """Post-rpc interceptor for get_workspace - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workspace_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_get_workspace` interceptor runs + before the `post_get_workspace_with_metadata` interceptor. """ return response + def post_get_workspace_with_metadata( + self, + response: tables.Workspace, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tables.Workspace, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workspace + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_get_workspace_with_metadata` + interceptor in new development instead of the `post_get_workspace` interceptor. + When both interceptors are used, this `post_get_workspace_with_metadata` interceptor runs after the + `post_get_workspace` interceptor. The (possibly modified) response returned by + `post_get_workspace` will be passed to + `post_get_workspace_with_metadata`. + """ + return response, metadata + def pre_list_rows( self, request: tables.ListRowsRequest, @@ -334,12 +466,35 @@ def post_list_rows( ) -> tables.ListRowsResponse: """Post-rpc interceptor for list_rows - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_list_rows` interceptor runs + before the `post_list_rows_with_metadata` interceptor. """ return response + def post_list_rows_with_metadata( + self, + response: tables.ListRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tables.ListRowsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_list_rows_with_metadata` + interceptor in new development instead of the `post_list_rows` interceptor. + When both interceptors are used, this `post_list_rows_with_metadata` interceptor runs after the + `post_list_rows` interceptor. The (possibly modified) response returned by + `post_list_rows` will be passed to + `post_list_rows_with_metadata`. + """ + return response, metadata + def pre_list_tables( self, request: tables.ListTablesRequest, @@ -357,12 +512,35 @@ def post_list_tables( ) -> tables.ListTablesResponse: """Post-rpc interceptor for list_tables - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tables_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_list_tables` interceptor runs + before the `post_list_tables_with_metadata` interceptor. """ return response + def post_list_tables_with_metadata( + self, + response: tables.ListTablesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tables.ListTablesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tables + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_list_tables_with_metadata` + interceptor in new development instead of the `post_list_tables` interceptor. + When both interceptors are used, this `post_list_tables_with_metadata` interceptor runs after the + `post_list_tables` interceptor. The (possibly modified) response returned by + `post_list_tables` will be passed to + `post_list_tables_with_metadata`. + """ + return response, metadata + def pre_list_workspaces( self, request: tables.ListWorkspacesRequest, @@ -380,12 +558,35 @@ def post_list_workspaces( ) -> tables.ListWorkspacesResponse: """Post-rpc interceptor for list_workspaces - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workspaces_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_list_workspaces` interceptor runs + before the `post_list_workspaces_with_metadata` interceptor. """ return response + def post_list_workspaces_with_metadata( + self, + response: tables.ListWorkspacesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tables.ListWorkspacesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_workspaces + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_list_workspaces_with_metadata` + interceptor in new development instead of the `post_list_workspaces` interceptor. + When both interceptors are used, this `post_list_workspaces_with_metadata` interceptor runs after the + `post_list_workspaces` interceptor. The (possibly modified) response returned by + `post_list_workspaces` will be passed to + `post_list_workspaces_with_metadata`. + """ + return response, metadata + def pre_update_row( self, request: tables.UpdateRowRequest, @@ -401,12 +602,33 @@ def pre_update_row( def post_update_row(self, response: tables.Row) -> tables.Row: """Post-rpc interceptor for update_row - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_row_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TablesService server but before - it is returned to user code. + it is returned to user code. This `post_update_row` interceptor runs + before the `post_update_row_with_metadata` interceptor. """ return response + def post_update_row_with_metadata( + self, response: tables.Row, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tables.Row, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_row + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TablesService server but before it is returned to user code. + + We recommend only using this `post_update_row_with_metadata` + interceptor in new development instead of the `post_update_row` interceptor. + When both interceptors are used, this `post_update_row_with_metadata` interceptor runs after the + `post_update_row` interceptor. The (possibly modified) response returned by + `post_update_row` will be passed to + `post_update_row_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TablesServiceRestStub: @@ -635,6 +857,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_rows_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -901,6 +1127,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_rows_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1054,6 +1284,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_row(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_row_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1308,6 +1542,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_row(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_row_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1453,6 +1691,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1594,6 +1836,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workspace(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workspace_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1741,6 +1987,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_rows_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1888,6 +2138,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tables(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tables_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2031,6 +2285,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workspaces(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workspaces_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2184,6 +2442,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_row(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_row_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json index c10d607058a5..e0a7d3776f47 100644 --- a/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json +++ b/packages/google-area120-tables/samples/generated_samples/snippet_metadata_google.area120.tables.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-area120-tables", - "version": "0.11.14" + "version": "0.11.15" }, "snippets": [ { diff --git a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py index 42cb49517258..baab63d7465b 100644 --- a/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py +++ b/packages/google-area120-tables/tests/unit/gapic/tables_v1alpha1/test_tables_service.py @@ -62,6 +62,13 @@ ) from google.area120.tables_v1alpha1.types import tables +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -316,6 +323,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TablesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TablesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7679,10 +7729,13 @@ def test_get_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_get_table" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_get_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_get_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.GetTableRequest.pb(tables.GetTableRequest()) transcode.return_value = { "method": "post", @@ -7704,6 +7757,7 @@ def test_get_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.Table() + post_with_metadata.return_value = tables.Table(), metadata client.get_table( request, @@ -7715,6 +7769,7 @@ def test_get_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tables_rest_bad_request(request_type=tables.ListTablesRequest): @@ -7797,10 +7852,13 @@ def test_list_tables_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_list_tables" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_list_tables_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_list_tables" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.ListTablesRequest.pb(tables.ListTablesRequest()) transcode.return_value = { "method": "post", @@ -7822,6 +7880,7 @@ def test_list_tables_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.ListTablesResponse() + post_with_metadata.return_value = tables.ListTablesResponse(), metadata client.list_tables( request, @@ -7833,6 +7892,7 @@ def test_list_tables_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_workspace_rest_bad_request(request_type=tables.GetWorkspaceRequest): @@ -7917,10 +7977,13 @@ def test_get_workspace_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_get_workspace" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_get_workspace_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_get_workspace" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.GetWorkspaceRequest.pb(tables.GetWorkspaceRequest()) transcode.return_value = { "method": "post", @@ -7942,6 +8005,7 @@ def test_get_workspace_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.Workspace() + post_with_metadata.return_value = tables.Workspace(), metadata client.get_workspace( request, @@ -7953,6 +8017,7 @@ def test_get_workspace_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workspaces_rest_bad_request(request_type=tables.ListWorkspacesRequest): @@ -8035,10 +8100,13 @@ def test_list_workspaces_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_list_workspaces" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_list_workspaces_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_list_workspaces" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.ListWorkspacesRequest.pb(tables.ListWorkspacesRequest()) transcode.return_value = { "method": "post", @@ -8062,6 +8130,7 @@ def test_list_workspaces_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.ListWorkspacesResponse() + post_with_metadata.return_value = tables.ListWorkspacesResponse(), metadata client.list_workspaces( request, @@ -8073,6 +8142,7 @@ def test_list_workspaces_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_row_rest_bad_request(request_type=tables.GetRowRequest): @@ -8155,10 +8225,13 @@ def test_get_row_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_get_row" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_get_row_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_get_row" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.GetRowRequest.pb(tables.GetRowRequest()) transcode.return_value = { "method": "post", @@ -8180,6 +8253,7 @@ def test_get_row_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.Row() + post_with_metadata.return_value = tables.Row(), metadata client.get_row( request, @@ -8191,6 +8265,7 @@ def test_get_row_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rows_rest_bad_request(request_type=tables.ListRowsRequest): @@ -8273,10 +8348,13 @@ def test_list_rows_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_list_rows" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_list_rows_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_list_rows" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.ListRowsRequest.pb(tables.ListRowsRequest()) transcode.return_value = { "method": "post", @@ -8298,6 +8376,7 @@ def test_list_rows_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.ListRowsResponse() + post_with_metadata.return_value = tables.ListRowsResponse(), metadata client.list_rows( request, @@ -8309,6 +8388,7 @@ def test_list_rows_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_row_rest_bad_request(request_type=tables.CreateRowRequest): @@ -8459,10 +8539,13 @@ def test_create_row_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_create_row" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_create_row_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_create_row" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.CreateRowRequest.pb(tables.CreateRowRequest()) transcode.return_value = { "method": "post", @@ -8484,6 +8567,7 @@ def test_create_row_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.Row() + post_with_metadata.return_value = tables.Row(), metadata client.create_row( request, @@ -8495,6 +8579,7 @@ def test_create_row_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_rows_rest_bad_request(request_type=tables.BatchCreateRowsRequest): @@ -8574,10 +8659,13 @@ def test_batch_create_rows_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_batch_create_rows" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_batch_create_rows_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_batch_create_rows" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.BatchCreateRowsRequest.pb(tables.BatchCreateRowsRequest()) transcode.return_value = { "method": "post", @@ -8601,6 +8689,7 @@ def test_batch_create_rows_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.BatchCreateRowsResponse() + post_with_metadata.return_value = tables.BatchCreateRowsResponse(), metadata client.batch_create_rows( request, @@ -8612,6 +8701,7 @@ def test_batch_create_rows_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_row_rest_bad_request(request_type=tables.UpdateRowRequest): @@ -8762,10 +8852,13 @@ def test_update_row_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_update_row" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_update_row_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_update_row" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.UpdateRowRequest.pb(tables.UpdateRowRequest()) transcode.return_value = { "method": "post", @@ -8787,6 +8880,7 @@ def test_update_row_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.Row() + post_with_metadata.return_value = tables.Row(), metadata client.update_row( request, @@ -8798,6 +8892,7 @@ def test_update_row_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_rows_rest_bad_request(request_type=tables.BatchUpdateRowsRequest): @@ -8877,10 +8972,13 @@ def test_batch_update_rows_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TablesServiceRestInterceptor, "post_batch_update_rows" ) as post, mock.patch.object( + transports.TablesServiceRestInterceptor, "post_batch_update_rows_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TablesServiceRestInterceptor, "pre_batch_update_rows" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tables.BatchUpdateRowsRequest.pb(tables.BatchUpdateRowsRequest()) transcode.return_value = { "method": "post", @@ -8904,6 +9002,7 @@ def test_batch_update_rows_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tables.BatchUpdateRowsResponse() + post_with_metadata.return_value = tables.BatchUpdateRowsResponse(), metadata client.batch_update_rows( request, @@ -8915,6 +9014,7 @@ def test_batch_update_rows_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_row_rest_bad_request(request_type=tables.DeleteRowRequest): diff --git a/packages/google-cloud-access-approval/CHANGELOG.md b/packages/google-cloud-access-approval/CHANGELOG.md index df099bd013ee..c5fc14fef7a8 100644 --- a/packages/google-cloud-access-approval/CHANGELOG.md +++ b/packages/google-cloud-access-approval/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-access-approval-v1.15.0...google-cloud-access-approval-v1.16.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-access-approval-v1.14.1...google-cloud-access-approval-v1.15.0) (2024-12-12) diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py index 118cb667f0e3..daa3028e3f36 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -550,6 +552,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/transports/rest.py b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/transports/rest.py index 3b84b1ddbf03..8aa6456a71db 100644 --- a/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/transports/rest.py +++ b/packages/google-cloud-access-approval/google/cloud/accessapproval_v1/services/access_approval/transports/rest.py @@ -163,12 +163,35 @@ def post_approve_approval_request( ) -> accessapproval.ApprovalRequest: """Post-rpc interceptor for approve_approval_request - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_approve_approval_request_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_approve_approval_request` interceptor runs + before the `post_approve_approval_request_with_metadata` interceptor. """ return response + def post_approve_approval_request_with_metadata( + self, + response: accessapproval.ApprovalRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[accessapproval.ApprovalRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for approve_approval_request + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_approve_approval_request_with_metadata` + interceptor in new development instead of the `post_approve_approval_request` interceptor. + When both interceptors are used, this `post_approve_approval_request_with_metadata` interceptor runs after the + `post_approve_approval_request` interceptor. The (possibly modified) response returned by + `post_approve_approval_request` will be passed to + `post_approve_approval_request_with_metadata`. + """ + return response, metadata + def pre_delete_access_approval_settings( self, request: accessapproval.DeleteAccessApprovalSettingsMessage, @@ -204,12 +227,35 @@ def post_dismiss_approval_request( ) -> accessapproval.ApprovalRequest: """Post-rpc interceptor for dismiss_approval_request - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_dismiss_approval_request_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_dismiss_approval_request` interceptor runs + before the `post_dismiss_approval_request_with_metadata` interceptor. """ return response + def post_dismiss_approval_request_with_metadata( + self, + response: accessapproval.ApprovalRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[accessapproval.ApprovalRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for dismiss_approval_request + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_dismiss_approval_request_with_metadata` + interceptor in new development instead of the `post_dismiss_approval_request` interceptor. + When both interceptors are used, this `post_dismiss_approval_request_with_metadata` interceptor runs after the + `post_dismiss_approval_request` interceptor. The (possibly modified) response returned by + `post_dismiss_approval_request` will be passed to + `post_dismiss_approval_request_with_metadata`. + """ + return response, metadata + def pre_get_access_approval_service_account( self, request: accessapproval.GetAccessApprovalServiceAccountMessage, @@ -230,12 +276,38 @@ def post_get_access_approval_service_account( ) -> accessapproval.AccessApprovalServiceAccount: """Post-rpc interceptor for get_access_approval_service_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_access_approval_service_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_get_access_approval_service_account` interceptor runs + before the `post_get_access_approval_service_account_with_metadata` interceptor. """ return response + def post_get_access_approval_service_account_with_metadata( + self, + response: accessapproval.AccessApprovalServiceAccount, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + accessapproval.AccessApprovalServiceAccount, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_access_approval_service_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_get_access_approval_service_account_with_metadata` + interceptor in new development instead of the `post_get_access_approval_service_account` interceptor. + When both interceptors are used, this `post_get_access_approval_service_account_with_metadata` interceptor runs after the + `post_get_access_approval_service_account` interceptor. The (possibly modified) response returned by + `post_get_access_approval_service_account` will be passed to + `post_get_access_approval_service_account_with_metadata`. + """ + return response, metadata + def pre_get_access_approval_settings( self, request: accessapproval.GetAccessApprovalSettingsMessage, @@ -256,12 +328,37 @@ def post_get_access_approval_settings( ) -> accessapproval.AccessApprovalSettings: """Post-rpc interceptor for get_access_approval_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_access_approval_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_get_access_approval_settings` interceptor runs + before the `post_get_access_approval_settings_with_metadata` interceptor. """ return response + def post_get_access_approval_settings_with_metadata( + self, + response: accessapproval.AccessApprovalSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + accessapproval.AccessApprovalSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_access_approval_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_get_access_approval_settings_with_metadata` + interceptor in new development instead of the `post_get_access_approval_settings` interceptor. + When both interceptors are used, this `post_get_access_approval_settings_with_metadata` interceptor runs after the + `post_get_access_approval_settings` interceptor. The (possibly modified) response returned by + `post_get_access_approval_settings` will be passed to + `post_get_access_approval_settings_with_metadata`. + """ + return response, metadata + def pre_get_approval_request( self, request: accessapproval.GetApprovalRequestMessage, @@ -282,12 +379,35 @@ def post_get_approval_request( ) -> accessapproval.ApprovalRequest: """Post-rpc interceptor for get_approval_request - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_approval_request_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_get_approval_request` interceptor runs + before the `post_get_approval_request_with_metadata` interceptor. """ return response + def post_get_approval_request_with_metadata( + self, + response: accessapproval.ApprovalRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[accessapproval.ApprovalRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_approval_request + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_get_approval_request_with_metadata` + interceptor in new development instead of the `post_get_approval_request` interceptor. + When both interceptors are used, this `post_get_approval_request_with_metadata` interceptor runs after the + `post_get_approval_request` interceptor. The (possibly modified) response returned by + `post_get_approval_request` will be passed to + `post_get_approval_request_with_metadata`. + """ + return response, metadata + def pre_invalidate_approval_request( self, request: accessapproval.InvalidateApprovalRequestMessage, @@ -308,12 +428,35 @@ def post_invalidate_approval_request( ) -> accessapproval.ApprovalRequest: """Post-rpc interceptor for invalidate_approval_request - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_invalidate_approval_request_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_invalidate_approval_request` interceptor runs + before the `post_invalidate_approval_request_with_metadata` interceptor. """ return response + def post_invalidate_approval_request_with_metadata( + self, + response: accessapproval.ApprovalRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[accessapproval.ApprovalRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for invalidate_approval_request + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_invalidate_approval_request_with_metadata` + interceptor in new development instead of the `post_invalidate_approval_request` interceptor. + When both interceptors are used, this `post_invalidate_approval_request_with_metadata` interceptor runs after the + `post_invalidate_approval_request` interceptor. The (possibly modified) response returned by + `post_invalidate_approval_request` will be passed to + `post_invalidate_approval_request_with_metadata`. + """ + return response, metadata + def pre_list_approval_requests( self, request: accessapproval.ListApprovalRequestsMessage, @@ -334,12 +477,38 @@ def post_list_approval_requests( ) -> accessapproval.ListApprovalRequestsResponse: """Post-rpc interceptor for list_approval_requests - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_approval_requests_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_list_approval_requests` interceptor runs + before the `post_list_approval_requests_with_metadata` interceptor. """ return response + def post_list_approval_requests_with_metadata( + self, + response: accessapproval.ListApprovalRequestsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + accessapproval.ListApprovalRequestsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_approval_requests + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_list_approval_requests_with_metadata` + interceptor in new development instead of the `post_list_approval_requests` interceptor. + When both interceptors are used, this `post_list_approval_requests_with_metadata` interceptor runs after the + `post_list_approval_requests` interceptor. The (possibly modified) response returned by + `post_list_approval_requests` will be passed to + `post_list_approval_requests_with_metadata`. + """ + return response, metadata + def pre_update_access_approval_settings( self, request: accessapproval.UpdateAccessApprovalSettingsMessage, @@ -360,12 +529,37 @@ def post_update_access_approval_settings( ) -> accessapproval.AccessApprovalSettings: """Post-rpc interceptor for update_access_approval_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_access_approval_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AccessApproval server but before - it is returned to user code. + it is returned to user code. This `post_update_access_approval_settings` interceptor runs + before the `post_update_access_approval_settings_with_metadata` interceptor. """ return response + def post_update_access_approval_settings_with_metadata( + self, + response: accessapproval.AccessApprovalSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + accessapproval.AccessApprovalSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_access_approval_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessApproval server but before it is returned to user code. + + We recommend only using this `post_update_access_approval_settings_with_metadata` + interceptor in new development instead of the `post_update_access_approval_settings` interceptor. + When both interceptors are used, this `post_update_access_approval_settings_with_metadata` interceptor runs after the + `post_update_access_approval_settings` interceptor. The (possibly modified) response returned by + `post_update_access_approval_settings` will be passed to + `post_update_access_approval_settings_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AccessApprovalRestStub: @@ -618,6 +812,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_approve_approval_request(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_approve_approval_request_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -885,6 +1083,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_dismiss_approval_request(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_dismiss_approval_request_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1036,6 +1238,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_access_approval_service_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_access_approval_service_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1188,6 +1397,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_access_approval_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_access_approval_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1334,6 +1547,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_approval_request(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_approval_request_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1489,6 +1706,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_invalidate_approval_request(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_invalidate_approval_request_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1634,6 +1855,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_approval_requests(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_approval_requests_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1792,6 +2017,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_access_approval_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_access_approval_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json index b5873343671b..5de0669a30c2 100644 --- a/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json +++ b/packages/google-cloud-access-approval/samples/generated_samples/snippet_metadata_google.cloud.accessapproval.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-access-approval", - "version": "1.15.0" + "version": "1.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py index f81e6a845b87..25a31f555255 100644 --- a/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py +++ b/packages/google-cloud-access-approval/tests/unit/gapic/accessapproval_v1/test_access_approval.py @@ -62,6 +62,13 @@ ) from google.cloud.accessapproval_v1.types import accessapproval +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AccessApprovalClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AccessApprovalClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5615,10 +5665,14 @@ def test_list_approval_requests_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_list_approval_requests" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_list_approval_requests_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_list_approval_requests" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.ListApprovalRequestsMessage.pb( accessapproval.ListApprovalRequestsMessage() ) @@ -5644,6 +5698,10 @@ def test_list_approval_requests_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.ListApprovalRequestsResponse() + post_with_metadata.return_value = ( + accessapproval.ListApprovalRequestsResponse(), + metadata, + ) client.list_approval_requests( request, @@ -5655,6 +5713,7 @@ def test_list_approval_requests_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_approval_request_rest_bad_request( @@ -5741,10 +5800,14 @@ def test_get_approval_request_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_get_approval_request" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_get_approval_request_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_get_approval_request" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.GetApprovalRequestMessage.pb( accessapproval.GetApprovalRequestMessage() ) @@ -5770,6 +5833,7 @@ def test_get_approval_request_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.ApprovalRequest() + post_with_metadata.return_value = accessapproval.ApprovalRequest(), metadata client.get_approval_request( request, @@ -5781,6 +5845,7 @@ def test_get_approval_request_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_approve_approval_request_rest_bad_request( @@ -5867,10 +5932,14 @@ def test_approve_approval_request_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_approve_approval_request" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_approve_approval_request_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_approve_approval_request" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.ApproveApprovalRequestMessage.pb( accessapproval.ApproveApprovalRequestMessage() ) @@ -5896,6 +5965,7 @@ def test_approve_approval_request_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.ApprovalRequest() + post_with_metadata.return_value = accessapproval.ApprovalRequest(), metadata client.approve_approval_request( request, @@ -5907,6 +5977,7 @@ def test_approve_approval_request_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_dismiss_approval_request_rest_bad_request( @@ -5993,10 +6064,14 @@ def test_dismiss_approval_request_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_dismiss_approval_request" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_dismiss_approval_request_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_dismiss_approval_request" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.DismissApprovalRequestMessage.pb( accessapproval.DismissApprovalRequestMessage() ) @@ -6022,6 +6097,7 @@ def test_dismiss_approval_request_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.ApprovalRequest() + post_with_metadata.return_value = accessapproval.ApprovalRequest(), metadata client.dismiss_approval_request( request, @@ -6033,6 +6109,7 @@ def test_dismiss_approval_request_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_invalidate_approval_request_rest_bad_request( @@ -6119,10 +6196,14 @@ def test_invalidate_approval_request_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_invalidate_approval_request" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_invalidate_approval_request_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_invalidate_approval_request" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.InvalidateApprovalRequestMessage.pb( accessapproval.InvalidateApprovalRequestMessage() ) @@ -6148,6 +6229,7 @@ def test_invalidate_approval_request_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.ApprovalRequest() + post_with_metadata.return_value = accessapproval.ApprovalRequest(), metadata client.invalidate_approval_request( request, @@ -6159,6 +6241,7 @@ def test_invalidate_approval_request_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_access_approval_settings_rest_bad_request( @@ -6253,10 +6336,14 @@ def test_get_access_approval_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_get_access_approval_settings" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_get_access_approval_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_get_access_approval_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.GetAccessApprovalSettingsMessage.pb( accessapproval.GetAccessApprovalSettingsMessage() ) @@ -6282,6 +6369,10 @@ def test_get_access_approval_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.AccessApprovalSettings() + post_with_metadata.return_value = ( + accessapproval.AccessApprovalSettings(), + metadata, + ) client.get_access_approval_settings( request, @@ -6293,6 +6384,7 @@ def test_get_access_approval_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_access_approval_settings_rest_bad_request( @@ -6470,10 +6562,14 @@ def test_update_access_approval_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AccessApprovalRestInterceptor, "post_update_access_approval_settings" ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_update_access_approval_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_update_access_approval_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.UpdateAccessApprovalSettingsMessage.pb( accessapproval.UpdateAccessApprovalSettingsMessage() ) @@ -6499,6 +6595,10 @@ def test_update_access_approval_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = accessapproval.AccessApprovalSettings() + post_with_metadata.return_value = ( + accessapproval.AccessApprovalSettings(), + metadata, + ) client.update_access_approval_settings( request, @@ -6510,6 +6610,7 @@ def test_update_access_approval_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_access_approval_settings_rest_bad_request( @@ -6706,11 +6807,15 @@ def test_get_access_approval_service_account_rest_interceptors(null_interceptor) transports.AccessApprovalRestInterceptor, "post_get_access_approval_service_account", ) as post, mock.patch.object( + transports.AccessApprovalRestInterceptor, + "post_get_access_approval_service_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AccessApprovalRestInterceptor, "pre_get_access_approval_service_account", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = accessapproval.GetAccessApprovalServiceAccountMessage.pb( accessapproval.GetAccessApprovalServiceAccountMessage() ) @@ -6736,6 +6841,10 @@ def test_get_access_approval_service_account_rest_interceptors(null_interceptor) ] pre.return_value = request, metadata post.return_value = accessapproval.AccessApprovalServiceAccount() + post_with_metadata.return_value = ( + accessapproval.AccessApprovalServiceAccount(), + metadata, + ) client.get_access_approval_service_account( request, @@ -6747,6 +6856,7 @@ def test_get_access_approval_service_account_rest_interceptors(null_interceptor) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-access-context-manager/.OwlBot.yaml b/packages/google-cloud-access-context-manager/.OwlBot.yaml new file mode 100644 index 000000000000..783efc8a3bc0 --- /dev/null +++ b/packages/google-cloud-access-context-manager/.OwlBot.yaml @@ -0,0 +1,28 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/identity/accesscontextmanager/type/(accesscontextmanager-type-py)/(device_resources.*) + dest: /owl-bot-staging/google-cloud-access-context-manager/$1/google/identity/accesscontextmanager/type/$2 + - source: /google/identity/accesscontextmanager/v1/(identity-accesscontextmanager-v1-py)/(.*access.*) + dest: /owl-bot-staging/google-cloud-access-context-manager/$1/google/identity/accesscontextmanager/v1/$2 + - source: /google/identity/accesscontextmanager/v1/(identity-accesscontextmanager-v1-py)/(service_perimeter.*) + dest: /owl-bot-staging/google-cloud-access-context-manager/$1/google/identity/accesscontextmanager/v1/$2 + +begin-after-commit-hash: d7c95df3ab1ea1b4c22a4542bad4924cc46d1388 + diff --git a/packages/google-cloud-access-context-manager/.flake8 b/packages/google-cloud-access-context-manager/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-cloud-access-context-manager/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-access-context-manager/.gitignore b/packages/google-cloud-access-context-manager/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-access-context-manager/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-access-context-manager/.pre-commit-config.yaml b/packages/google-cloud-access-context-manager/.pre-commit-config.yaml new file mode 100644 index 000000000000..1d74695f70b6 --- /dev/null +++ b/packages/google-cloud-access-context-manager/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black +- repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 diff --git a/packages/google-cloud-access-context-manager/.repo-metadata.json b/packages/google-cloud-access-context-manager/.repo-metadata.json new file mode 100644 index 000000000000..dbad94d6f701 --- /dev/null +++ b/packages/google-cloud-access-context-manager/.repo-metadata.json @@ -0,0 +1,15 @@ +{ + "name": "accesscontextmanager", + "name_pretty": "Access Context Manager", + "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager", + "product_documentation": "https://cloud.google.com/access-context-manager/docs/overview", + "issue_tracker": "https://github.com/googleapis/google-cloud-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-access-context-manager", + "default_version": "apiVersion", + "codeowner_team": "", + "api_shortname": "accesscontextmanager" +} diff --git a/packages/google-cloud-access-context-manager/CHANGELOG.md b/packages/google-cloud-access-context-manager/CHANGELOG.md new file mode 100644 index 000000000000..e85daa5abc46 --- /dev/null +++ b/packages/google-cloud-access-context-manager/CHANGELOG.md @@ -0,0 +1,157 @@ +# Changelog + +## [0.2.1](https://github.com/googleapis/python-access-context-manager/compare/v0.2.0...v0.2.1) (2024-08-14) + + +### Bug Fixes + +* **deps:** Require protobuf>=3.20.2, protobuf<6 ([ef6827b](https://github.com/googleapis/python-access-context-manager/commit/ef6827b2aa9519abf760ddd20cd9ea0f228c4272)) +* Regenerate pb2 files for compatibility with protobuf 5.x ([ef6827b](https://github.com/googleapis/python-access-context-manager/commit/ef6827b2aa9519abf760ddd20cd9ea0f228c4272)) + +## [0.2.0](https://github.com/googleapis/python-access-context-manager/compare/v0.1.16...v0.2.0) (2024-02-15) + + +### Features + +* Add `egress_policies` and `ingress_policies` fields to `ServicePerimeter` [f31ec7d](https://github.com/googleapis/googleapis/commit/f31ec7d4d1f27fd76594165ae41a344465e9f228) ([eabf473](https://github.com/googleapis/python-access-context-manager/commit/eabf4737124074f8107a0dac716cb68134edf721)) +* Add `google/identity/accesscontextmanager/v1/access_context_manager_pb2.py` [0b261de](https://github.com/googleapis/googleapis/commit/0b261def1cb4c61a9ddbb8bf14b103ce6add1bce) ([eabf473](https://github.com/googleapis/python-access-context-manager/commit/eabf4737124074f8107a0dac716cb68134edf721)) +* Add support for Python 3.12 ([#188](https://github.com/googleapis/python-access-context-manager/issues/188)) ([c379c57](https://github.com/googleapis/python-access-context-manager/commit/c379c573573207ef6e6f355aa569e19149ef2824)) +* Introduce compatibility with native namespace packages ([#187](https://github.com/googleapis/python-access-context-manager/issues/187)) ([8ffdcd3](https://github.com/googleapis/python-access-context-manager/commit/8ffdcd31ecc4a4b8e18b8534257ff02c391339f7)) + + +### Bug Fixes + +* **deps:** Require google-api-core >= 1.34.1 ([eabf473](https://github.com/googleapis/python-access-context-manager/commit/eabf4737124074f8107a0dac716cb68134edf721)) +* Migrate to native namespace packages ([#193](https://github.com/googleapis/python-access-context-manager/issues/193)) ([5840132](https://github.com/googleapis/python-access-context-manager/commit/58401328a92f418cb43492459971a7f2b7d712f2)) + +## [0.1.16](https://github.com/googleapis/python-access-context-manager/compare/v0.1.15...v0.1.16) (2023-02-27) + + +### Bug Fixes + +* **deps:** Require google-api-core>=1.34.0,>=2.11.0 ([a835c8e](https://github.com/googleapis/python-access-context-manager/commit/a835c8ef404c56b06fd446ee177bcb59daf0353a)) + +## [0.1.15](https://github.com/googleapis/python-access-context-manager/compare/v0.1.14...v0.1.15) (2022-12-15) + + +### Bug Fixes + +* **deps:** Require protobuf >=3.19.5 ([09d0f03](https://github.com/googleapis/python-access-context-manager/commit/09d0f0342ef60981a57458228435626d69129f70)) + +## [0.1.14](https://github.com/googleapis/python-access-context-manager/compare/v0.1.13...v0.1.14) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#138](https://github.com/googleapis/python-access-context-manager/issues/138)) ([3aec32e](https://github.com/googleapis/python-access-context-manager/commit/3aec32e555659b0581afdc2c2d2ed67c7de26c0d)) + +## [0.1.13](https://github.com/googleapis/python-access-context-manager/compare/v0.1.12...v0.1.13) (2022-07-18) + + +### Bug Fixes + +* require python 3.7+ ([#134](https://github.com/googleapis/python-access-context-manager/issues/134)) ([27a1467](https://github.com/googleapis/python-access-context-manager/commit/27a1467e68d440b7c2a73a6b5dbcfe6bd199dd05)) +* require google-api-core >=1.32.0, >=2.8.0 + + +## [0.1.12](https://github.com/googleapis/python-access-context-manager/compare/v0.1.11...v0.1.12) (2022-06-06) + + +### Documentation + +* fix changelog header to consistent size ([#128](https://github.com/googleapis/python-access-context-manager/issues/128)) ([e295ae6](https://github.com/googleapis/python-access-context-manager/commit/e295ae6b031d86543c54562f1011560272753898)) + +## [0.1.11](https://github.com/googleapis/python-access-context-manager/compare/v0.1.10...v0.1.11) (2022-05-26) + + +### Bug Fixes + +* **deps:** require protobuf>= 3.12.0, <4.0.0dev ([#124](https://github.com/googleapis/python-access-context-manager/issues/124)) ([5c7837e](https://github.com/googleapis/python-access-context-manager/commit/5c7837eca6e49f465df306275f96ebece076364b)) + +## [0.1.10](https://github.com/googleapis/python-access-context-manager/compare/v0.1.9...v0.1.10) (2022-03-04) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#110](https://github.com/googleapis/python-access-context-manager/issues/110)) ([386dc8d](https://github.com/googleapis/python-access-context-manager/commit/386dc8dccbfa4ffee275ae92543b83e9dfc6f05e)) +* regenerate pb2 files ([#108](https://github.com/googleapis/python-access-context-manager/issues/108)) ([f3ae216](https://github.com/googleapis/python-access-context-manager/commit/f3ae216524db604166447ccec2d646fb038ce3bb)) + +## [0.1.9](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.8...v0.1.9) (2021-11-12) + + +### Bug Fixes + +* **deps:** require google-api-core >= 1.28.0 ([8845855](https://www.github.com/googleapis/python-access-context-manager/commit/8845855497454dbf62edd65dee958057a959db41)) + +## [0.1.8](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.7...v0.1.8) (2021-10-04) + + +### Bug Fixes + +* update pin on 'google-api-core' to allow current versions ([#89](https://www.github.com/googleapis/python-access-context-manager/issues/89)) ([1f7b73b](https://www.github.com/googleapis/python-access-context-manager/commit/1f7b73b947011999b82976027ade8218d58ac788)) + +## [0.1.7](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.6...v0.1.7) (2021-08-23) + + +### Documentation + +* migrate to main branch ([#76](https://www.github.com/googleapis/python-access-context-manager/issues/76)) ([4f36a1d](https://www.github.com/googleapis/python-access-context-manager/commit/4f36a1dad07554ef676c7b01a9f1bd0e132bdb01)) + +## [0.1.6](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.5...v0.1.6) (2021-07-27) + + +### Documentation + +* add Samples section to CONTRIBUTING.rst ([#65](https://www.github.com/googleapis/python-access-context-manager/issues/65)) ([6091f99](https://www.github.com/googleapis/python-access-context-manager/commit/6091f999347e91f24842030bf1fb2e528cd4a6b5)) + + +### Miscellaneous Chores + +* release as 0.1.6 ([#68](https://www.github.com/googleapis/python-access-context-manager/issues/68)) ([647e651](https://www.github.com/googleapis/python-access-context-manager/commit/647e6513cef26eabb593c6f3e7a41780bc20648c)) + +## [0.1.5](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.4...v0.1.5) (2021-07-07) + + +### Bug Fixes + +* require google-api-core >= 1.26.0 ([#57](https://www.github.com/googleapis/python-access-context-manager/issues/57)) ([12ddfa5](https://www.github.com/googleapis/python-access-context-manager/commit/12ddfa58a5c4951da5753858701a83b297d38be2)) + +## [0.1.4](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.3...v0.1.4) (2021-06-22) + + +### Bug Fixes + +* **deps:** require python 3.6 ([#46](https://www.github.com/googleapis/python-access-context-manager/issues/46)) ([3a4c5de](https://www.github.com/googleapis/python-access-context-manager/commit/3a4c5def322acc5bd16bdbeafef6d3235b5eadab)) + + +### Documentation + +* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-access-context-manager/issues/1127)) ([#44](https://www.github.com/googleapis/python-access-context-manager/issues/44)) ([5bd362e](https://www.github.com/googleapis/python-access-context-manager/commit/5bd362e10d1fd84f31bca28345560dbb9f71437f)) + +## [0.1.3](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.2...v0.1.3) (2021-04-14) + + +### Bug Fixes + +* add create_key to protos ([#25](https://www.github.com/googleapis/python-access-context-manager/issues/25)) ([166c54c](https://www.github.com/googleapis/python-access-context-manager/commit/166c54cd73d2cfac6d45df2a676389f252fd73e3)) + +## [0.1.2](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.1...v0.1.2) (2020-05-08) + + +### Bug Fixes + +* add missing __init__.py ([5b0036f](https://www.github.com/googleapis/python-access-context-manager/commit/5b0036f6155ea90a7501076487cb048ce1640e0e)) + +## [0.1.1](https://www.github.com/googleapis/python-access-context-manager/compare/v0.1.0...v0.1.1) (2020-05-08) + + +### Bug Fixes + +* fix setup.py ([b2facea](https://www.github.com/googleapis/python-access-context-manager/commit/b2faceabc0aab7a3e1a590d71fef3ede1113a08b)) + +## 0.1.0 (2020-05-07) + + +### Features + +* generate v1 ([a13e3de](https://www.github.com/googleapis/python-access-context-manager/commit/a13e3de91ee249d6d0640977315881f3bef0c844)) diff --git a/packages/google-cloud-access-context-manager/CODE_OF_CONDUCT.md b/packages/google-cloud-access-context-manager/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-access-context-manager/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/CONTRIBUTING.rst b/packages/google-cloud-access-context-manager/CONTRIBUTING.rst new file mode 100644 index 000000000000..a5cea26033a8 --- /dev/null +++ b/packages/google-cloud-access-context-manager/CONTRIBUTING.rst @@ -0,0 +1,273 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.13 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.13 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-access-context-manager + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-access-context-manager/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-access-context-manager/LICENSE b/packages/google-cloud-access-context-manager/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-access-context-manager/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-access-context-manager/MANIFEST.in b/packages/google-cloud-access-context-manager/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-cloud-access-context-manager/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-access-context-manager/README.rst b/packages/google-cloud-access-context-manager/README.rst new file mode 100644 index 000000000000..beca9e68fdc2 --- /dev/null +++ b/packages/google-cloud-access-context-manager/README.rst @@ -0,0 +1,108 @@ +Python Client for Access Context Manager +======================================== + +|preview| |pypi| |versions| + +`Access Context Manager`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-access-context-manager.svg + :target: https://pypi.org/project/google-cloud-access-context-manager/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-access-context-manager.svg + :target: https://pypi.org/project/google-cloud-access-context-manager/ +.. _Access Context Manager: https://cloud.google.com/access-context-manager/docs/overview +.. _Client Library Documentation: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager +.. _Product Documentation: https://cloud.google.com/access-context-manager/docs/overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Access Context Manager.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Access Context Manager.: https://cloud.google.com/access-context-manager/docs/overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-access-context-manager/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-access-context-manager + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-access-context-manager + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Access Context Manager + to see other available methods on the client. +- Read the `Access Context Manager Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Access Context Manager Product documentation: https://cloud.google.com/access-context-manager/docs/overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-access-context-manager/SECURITY.md b/packages/google-cloud-access-context-manager/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-access-context-manager/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-access-context-manager/docs/CHANGELOG.md b/packages/google-cloud-access-context-manager/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/docs/README.rst b/packages/google-cloud-access-context-manager/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/docs/_static/custom.css b/packages/google-cloud-access-context-manager/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-access-context-manager/docs/_templates/layout.html b/packages/google-cloud-access-context-manager/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-access-context-manager/docs/conf.py b/packages/google-cloud-access-context-manager/docs/conf.py new file mode 100644 index 000000000000..54626f1a0a76 --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-access-context-manager documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-access-context-manager" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-access-context-manager", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-access-context-manager-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-access-context-manager.tex", + "google-cloud-access-context-manager Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-access-context-manager", + "google-cloud-access-context-manager Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-access-context-manager", + "google-cloud-access-context-manager Documentation", + author, + "google-cloud-access-context-manager", + "google-cloud-access-context-manager Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-access-context-manager/docs/index.rst b/packages/google-cloud-access-context-manager/docs/index.rst new file mode 100644 index 000000000000..a739bfeaea26 --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/index.rst @@ -0,0 +1,23 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + type + v1 + + +Changelog +--------- + +For a list of all ``google-cloud-access-context-manager`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG diff --git a/packages/google-cloud-access-context-manager/docs/multiprocessing.rst b/packages/google-cloud-access-context-manager/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-access-context-manager/docs/type.rst b/packages/google-cloud-access-context-manager/docs/type.rst new file mode 100644 index 000000000000..ff90e2ca782d --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/type.rst @@ -0,0 +1,6 @@ +google.identity.accesscontextmanager.type +========================================= + +.. automodule:: google.identity.accesscontextmanager.type + :members: + :inherited-members: diff --git a/packages/google-cloud-access-context-manager/docs/v1.rst b/packages/google-cloud-access-context-manager/docs/v1.rst new file mode 100644 index 000000000000..23a51630d79a --- /dev/null +++ b/packages/google-cloud-access-context-manager/docs/v1.rst @@ -0,0 +1,7 @@ +google.identity.accesscontextmanager.v1 +======================================== + +.. automodule:: google.identity.accesscontextmanager.v1 + :members: + :inherited-members: + diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/type/__init__.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/type/__init__.py new file mode 100644 index 000000000000..436b8cc575a5 --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/type/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.api_core.protobuf_helpers import get_messages + +from google.identity.accesscontextmanager.type import device_resources_pb2 + +_modules = [device_resources_pb2] + +names = [] + +for module in _modules: + for name, message in get_messages(module).items(): + message.__module__ = module.__name__ diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/type/device_resources_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/type/device_resources_pb2.py new file mode 100644 index 000000000000..594894e11a6d --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/type/device_resources_pb2.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/identity/accesscontextmanager/type/device_resources.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b"\n@google/identity/accesscontextmanager/type/device_resources.proto\x12)google.identity.accesscontextmanager.type*p\n\x16\x44\x65viceEncryptionStatus\x12\x1a\n\x16\x45NCRYPTION_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x45NCRYPTION_UNSUPPORTED\x10\x01\x12\x0f\n\x0bUNENCRYPTED\x10\x02\x12\r\n\tENCRYPTED\x10\x03*\x82\x01\n\x06OsType\x12\x12\n\x0eOS_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x44\x45SKTOP_MAC\x10\x01\x12\x13\n\x0f\x44\x45SKTOP_WINDOWS\x10\x02\x12\x11\n\rDESKTOP_LINUX\x10\x03\x12\x15\n\x11\x44\x45SKTOP_CHROME_OS\x10\x06\x12\x0b\n\x07\x41NDROID\x10\x04\x12\x07\n\x03IOS\x10\x05*V\n\x15\x44\x65viceManagementLevel\x12\x1a\n\x16MANAGEMENT_UNSPECIFIED\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\t\n\x05\x42\x41SIC\x10\x02\x12\x0c\n\x08\x43OMPLETE\x10\x03\x42\x8d\x02\n-com.google.identity.accesscontextmanager.typeB\tTypeProtoP\x01ZHgoogle.golang.org/genproto/googleapis/identity/accesscontextmanager/type\xaa\x02)Google.Identity.AccessContextManager.Type\xca\x02)Google\\Identity\\AccessContextManager\\Type\xea\x02,Google::Identity::AccessContextManager::Typeb\x06proto3" +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "google.identity.accesscontextmanager.type.device_resources_pb2", + _globals, +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n-com.google.identity.accesscontextmanager.typeB\tTypeProtoP\001ZHgoogle.golang.org/genproto/googleapis/identity/accesscontextmanager/type\252\002)Google.Identity.AccessContextManager.Type\312\002)Google\\Identity\\AccessContextManager\\Type\352\002,Google::Identity::AccessContextManager::Type" + _globals["_DEVICEENCRYPTIONSTATUS"]._serialized_start = 111 + _globals["_DEVICEENCRYPTIONSTATUS"]._serialized_end = 223 + _globals["_OSTYPE"]._serialized_start = 226 + _globals["_OSTYPE"]._serialized_end = 356 + _globals["_DEVICEMANAGEMENTLEVEL"]._serialized_start = 358 + _globals["_DEVICEMANAGEMENTLEVEL"]._serialized_end = 444 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/__init__.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/__init__.py new file mode 100644 index 000000000000..90c8729b3e6c --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.api_core.protobuf_helpers import get_messages + +from google.identity.accesscontextmanager.v1 import ( + access_level_pb2, + access_policy_pb2, + service_perimeter_pb2, +) + +_modules = [access_level_pb2, access_policy_pb2, service_perimeter_pb2] + +names = [] + +for module in _modules: + for name, message in get_messages(module).items(): + message.__module__ = module.__name__ diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py new file mode 100644 index 000000000000..a8fc8ebd2d3b --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_context_manager_pb2.py @@ -0,0 +1,420 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/identity/accesscontextmanager/v1/access_context_manager.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + +from google.identity.accesscontextmanager.v1 import ( + access_level_pb2 as google_dot_identity_dot_accesscontextmanager_dot_v1_dot_access__level__pb2, +) +from google.identity.accesscontextmanager.v1 import ( + access_policy_pb2 as google_dot_identity_dot_accesscontextmanager_dot_v1_dot_access__policy__pb2, +) +from google.identity.accesscontextmanager.v1 import ( + gcp_user_access_binding_pb2 as google_dot_identity_dot_accesscontextmanager_dot_v1_dot_gcp__user__access__binding__pb2, +) +from google.identity.accesscontextmanager.v1 import ( + service_perimeter_pb2 as google_dot_identity_dot_accesscontextmanager_dot_v1_dot_service__perimeter__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\nDgoogle/identity/accesscontextmanager/v1/access_context_manager.proto\x12\'google.identity.accesscontextmanager.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a:google/identity/accesscontextmanager/v1/access_level.proto\x1a;google/identity/accesscontextmanager/v1/access_policy.proto\x1a\x45google/identity/accesscontextmanager/v1/gcp_user_access_binding.proto\x1a?google/identity/accesscontextmanager/v1/service_perimeter.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto"\x8c\x01\n\x19ListAccessPoliciesRequest\x12H\n\x06parent\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0cloudresourcemanager.googleapis.com/Organization\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x85\x01\n\x1aListAccessPoliciesResponse\x12N\n\x0f\x61\x63\x63\x65ss_policies\x18\x01 \x03(\x0b\x32\x35.google.identity.accesscontextmanager.v1.AccessPolicy\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"`\n\x16GetAccessPolicyRequest\x12\x46\n\x04name\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0accesscontextmanager.googleapis.com/AccessPolicy"\x9d\x01\n\x19UpdateAccessPolicyRequest\x12J\n\x06policy\x18\x01 \x01(\x0b\x32\x35.google.identity.accesscontextmanager.v1.AccessPolicyB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"c\n\x19\x44\x65leteAccessPolicyRequest\x12\x46\n\x04name\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0accesscontextmanager.googleapis.com/AccessPolicy"\xdc\x01\n\x17ListAccessLevelsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\x12/accesscontextmanager.googleapis.com/AccessLevel\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12Q\n\x13\x61\x63\x63\x65ss_level_format\x18\x04 \x01(\x0e\x32\x34.google.identity.accesscontextmanager.v1.LevelFormat"\x80\x01\n\x18ListAccessLevelsResponse\x12K\n\raccess_levels\x18\x01 \x03(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevel\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xb1\x01\n\x15GetAccessLevelRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/accesscontextmanager.googleapis.com/AccessLevel\x12Q\n\x13\x61\x63\x63\x65ss_level_format\x18\x02 \x01(\x0e\x32\x34.google.identity.accesscontextmanager.v1.LevelFormat"\xb4\x01\n\x18\x43reateAccessLevelRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\x12/accesscontextmanager.googleapis.com/AccessLevel\x12O\n\x0c\x61\x63\x63\x65ss_level\x18\x02 \x01(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevelB\x03\xe0\x41\x02"\xa1\x01\n\x18UpdateAccessLevelRequest\x12O\n\x0c\x61\x63\x63\x65ss_level\x18\x01 \x01(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevelB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"a\n\x18\x44\x65leteAccessLevelRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/accesscontextmanager.googleapis.com/AccessLevel"\xc5\x01\n\x1aReplaceAccessLevelsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\x12/accesscontextmanager.googleapis.com/AccessLevel\x12P\n\raccess_levels\x18\x02 \x03(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevelB\x03\xe0\x41\x02\x12\x0c\n\x04\x65tag\x18\x04 \x01(\t"j\n\x1bReplaceAccessLevelsResponse\x12K\n\raccess_levels\x18\x01 \x03(\x0b\x32\x34.google.identity.accesscontextmanager.v1.AccessLevel"\x93\x01\n\x1cListServicePerimetersRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x8f\x01\n\x1dListServicePerimetersResponse\x12U\n\x12service_perimeters\x18\x01 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeter\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"h\n\x1aGetServicePerimeterRequest\x12J\n\x04name\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\n4accesscontextmanager.googleapis.com/ServicePerimeter"\xc8\x01\n\x1d\x43reateServicePerimeterRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12Y\n\x11service_perimeter\x18\x02 \x01(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeterB\x03\xe0\x41\x02"\xb0\x01\n\x1dUpdateServicePerimeterRequest\x12Y\n\x11service_perimeter\x18\x01 \x01(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeterB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"k\n\x1d\x44\x65leteServicePerimeterRequest\x12J\n\x04name\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\n4accesscontextmanager.googleapis.com/ServicePerimeter"\xd9\x01\n\x1fReplaceServicePerimetersRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12Z\n\x12service_perimeters\x18\x02 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeterB\x03\xe0\x41\x02\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t"y\n ReplaceServicePerimetersResponse\x12U\n\x12service_perimeters\x18\x01 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeter"|\n\x1e\x43ommitServicePerimetersRequest\x12L\n\x06parent\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\x12\x34\x61\x63\x63\x65sscontextmanager.googleapis.com/ServicePerimeter\x12\x0c\n\x04\x65tag\x18\x02 \x01(\t"x\n\x1f\x43ommitServicePerimetersResponse\x12U\n\x12service_perimeters\x18\x01 \x03(\x0b\x32\x39.google.identity.accesscontextmanager.v1.ServicePerimeter"\x9d\x01\n ListGcpUserAccessBindingsRequest\x12H\n\x06parent\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0cloudresourcemanager.googleapis.com/Organization\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x9d\x01\n!ListGcpUserAccessBindingsResponse\x12_\n\x18gcp_user_access_bindings\x18\x01 \x03(\x0b\x32=.google.identity.accesscontextmanager.v1.GcpUserAccessBinding\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"p\n\x1eGetGcpUserAccessBindingRequest\x12N\n\x04name\x18\x01 \x01(\tB@\xe0\x41\x02\xfa\x41:\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding"\xd2\x01\n!CreateGcpUserAccessBindingRequest\x12H\n\x06parent\x18\x01 \x01(\tB8\xe0\x41\x02\xfa\x41\x32\n0cloudresourcemanager.googleapis.com/Organization\x12\x63\n\x17gcp_user_access_binding\x18\x02 \x01(\x0b\x32=.google.identity.accesscontextmanager.v1.GcpUserAccessBindingB\x03\xe0\x41\x02"\xbe\x01\n!UpdateGcpUserAccessBindingRequest\x12\x63\n\x17gcp_user_access_binding\x18\x01 \x01(\x0b\x32=.google.identity.accesscontextmanager.v1.GcpUserAccessBindingB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"s\n!DeleteGcpUserAccessBindingRequest\x12N\n\x04name\x18\x01 \x01(\tB@\xe0\x41\x02\xfa\x41:\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding"\'\n%GcpUserAccessBindingOperationMetadata"\'\n%AccessContextManagerOperationMetadata*D\n\x0bLevelFormat\x12\x1c\n\x18LEVEL_FORMAT_UNSPECIFIED\x10\x00\x12\x0e\n\nAS_DEFINED\x10\x01\x12\x07\n\x03\x43\x45L\x10\x02\x32\xf1\x32\n\x14\x41\x63\x63\x65ssContextManager\x12\xb9\x01\n\x12ListAccessPolicies\x12\x42.google.identity.accesscontextmanager.v1.ListAccessPoliciesRequest\x1a\x43.google.identity.accesscontextmanager.v1.ListAccessPoliciesResponse"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/accessPolicies\x12\xb5\x01\n\x0fGetAccessPolicy\x12?.google.identity.accesscontextmanager.v1.GetAccessPolicyRequest\x1a\x35.google.identity.accesscontextmanager.v1.AccessPolicy"*\xda\x41\x04name\x82\xd3\xe4\x93\x02\x1d\x12\x1b/v1/{name=accessPolicies/*}\x12\xc1\x01\n\x12\x43reateAccessPolicy\x12\x35.google.identity.accesscontextmanager.v1.AccessPolicy\x1a\x1d.google.longrunning.Operation"U\xca\x41\x35\n\x0c\x41\x63\x63\x65ssPolicy\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02\x17"\x12/v1/accessPolicies:\x01*\x12\xf8\x01\n\x12UpdateAccessPolicy\x12\x42.google.identity.accesscontextmanager.v1.UpdateAccessPolicyRequest\x1a\x1d.google.longrunning.Operation"\x7f\xca\x41\x35\n\x0c\x41\x63\x63\x65ssPolicy\x12%AccessContextManagerOperationMetadata\xda\x41\x12policy,update_mask\x82\xd3\xe4\x93\x02,2"/v1/{policy.name=accessPolicies/*}:\x06policy\x12\xe4\x01\n\x12\x44\x65leteAccessPolicy\x12\x42.google.identity.accesscontextmanager.v1.DeleteAccessPolicyRequest\x1a\x1d.google.longrunning.Operation"k\xca\x41>\n\x15google.protobuf.Empty\x12%AccessContextManagerOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02\x1d*\x1b/v1/{name=accessPolicies/*}\x12\xd4\x01\n\x10ListAccessLevels\x12@.google.identity.accesscontextmanager.v1.ListAccessLevelsRequest\x1a\x41.google.identity.accesscontextmanager.v1.ListAccessLevelsResponse";\xda\x41\x06parent\x82\xd3\xe4\x93\x02,\x12*/v1/{parent=accessPolicies/*}/accessLevels\x12\xc1\x01\n\x0eGetAccessLevel\x12>.google.identity.accesscontextmanager.v1.GetAccessLevelRequest\x1a\x34.google.identity.accesscontextmanager.v1.AccessLevel"9\xda\x41\x04name\x82\xd3\xe4\x93\x02,\x12*/v1/{name=accessPolicies/*/accessLevels/*}\x12\x85\x02\n\x11\x43reateAccessLevel\x12\x41.google.identity.accesscontextmanager.v1.CreateAccessLevelRequest\x1a\x1d.google.longrunning.Operation"\x8d\x01\xca\x41\x34\n\x0b\x41\x63\x63\x65ssLevel\x12%AccessContextManagerOperationMetadata\xda\x41\x13parent,access_level\x82\xd3\xe4\x93\x02:"*/v1/{parent=accessPolicies/*}/accessLevels:\x0c\x61\x63\x63\x65ss_level\x12\x97\x02\n\x11UpdateAccessLevel\x12\x41.google.identity.accesscontextmanager.v1.UpdateAccessLevelRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\xca\x41\x34\n\x0b\x41\x63\x63\x65ssLevel\x12%AccessContextManagerOperationMetadata\xda\x41\x18\x61\x63\x63\x65ss_level,update_mask\x82\xd3\xe4\x93\x02G27/v1/{access_level.name=accessPolicies/*/accessLevels/*}:\x0c\x61\x63\x63\x65ss_level\x12\xf1\x01\n\x11\x44\x65leteAccessLevel\x12\x41.google.identity.accesscontextmanager.v1.DeleteAccessLevelRequest\x1a\x1d.google.longrunning.Operation"z\xca\x41>\n\x15google.protobuf.Empty\x12%AccessContextManagerOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02,**/v1/{name=accessPolicies/*/accessLevels/*}\x12\x83\x02\n\x13ReplaceAccessLevels\x12\x43.google.identity.accesscontextmanager.v1.ReplaceAccessLevelsRequest\x1a\x1d.google.longrunning.Operation"\x87\x01\xca\x41\x44\n\x1bReplaceAccessLevelsResponse\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02:"5/v1/{parent=accessPolicies/*}/accessLevels:replaceAll:\x01*\x12\xe8\x01\n\x15ListServicePerimeters\x12\x45.google.identity.accesscontextmanager.v1.ListServicePerimetersRequest\x1a\x46.google.identity.accesscontextmanager.v1.ListServicePerimetersResponse"@\xda\x41\x06parent\x82\xd3\xe4\x93\x02\x31\x12//v1/{parent=accessPolicies/*}/servicePerimeters\x12\xd5\x01\n\x13GetServicePerimeter\x12\x43.google.identity.accesscontextmanager.v1.GetServicePerimeterRequest\x1a\x39.google.identity.accesscontextmanager.v1.ServicePerimeter">\xda\x41\x04name\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=accessPolicies/*/servicePerimeters/*}\x12\xa3\x02\n\x16\x43reateServicePerimeter\x12\x46.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest\x1a\x1d.google.longrunning.Operation"\xa1\x01\xca\x41\x39\n\x10ServicePerimeter\x12%AccessContextManagerOperationMetadata\xda\x41\x18parent,service_perimeter\x82\xd3\xe4\x93\x02\x44"//v1/{parent=accessPolicies/*}/servicePerimeters:\x11service_perimeter\x12\xba\x02\n\x16UpdateServicePerimeter\x12\x46.google.identity.accesscontextmanager.v1.UpdateServicePerimeterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\xca\x41\x39\n\x10ServicePerimeter\x12%AccessContextManagerOperationMetadata\xda\x41\x1dservice_perimeter,update_mask\x82\xd3\xe4\x93\x02V2A/v1/{service_perimeter.name=accessPolicies/*/servicePerimeters/*}:\x11service_perimeter\x12\x80\x02\n\x16\x44\x65leteServicePerimeter\x12\x46.google.identity.accesscontextmanager.v1.DeleteServicePerimeterRequest\x1a\x1d.google.longrunning.Operation"\x7f\xca\x41>\n\x15google.protobuf.Empty\x12%AccessContextManagerOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02\x31*//v1/{name=accessPolicies/*/servicePerimeters/*}\x12\x97\x02\n\x18ReplaceServicePerimeters\x12H.google.identity.accesscontextmanager.v1.ReplaceServicePerimetersRequest\x1a\x1d.google.longrunning.Operation"\x91\x01\xca\x41I\n ReplaceServicePerimetersResponse\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02?":/v1/{parent=accessPolicies/*}/servicePerimeters:replaceAll:\x01*\x12\x90\x02\n\x17\x43ommitServicePerimeters\x12G.google.identity.accesscontextmanager.v1.CommitServicePerimetersRequest\x1a\x1d.google.longrunning.Operation"\x8c\x01\xca\x41H\n\x1f\x43ommitServicePerimetersResponse\x12%AccessContextManagerOperationMetadata\x82\xd3\xe4\x93\x02;"6/v1/{parent=accessPolicies/*}/servicePerimeters:commit:\x01*\x12\xf7\x01\n\x19ListGcpUserAccessBindings\x12I.google.identity.accesscontextmanager.v1.ListGcpUserAccessBindingsRequest\x1aJ.google.identity.accesscontextmanager.v1.ListGcpUserAccessBindingsResponse"C\xda\x41\x06parent\x82\xd3\xe4\x93\x02\x34\x12\x32/v1/{parent=organizations/*}/gcpUserAccessBindings\x12\xe4\x01\n\x17GetGcpUserAccessBinding\x12G.google.identity.accesscontextmanager.v1.GetGcpUserAccessBindingRequest\x1a=.google.identity.accesscontextmanager.v1.GcpUserAccessBinding"A\xda\x41\x04name\x82\xd3\xe4\x93\x02\x34\x12\x32/v1/{name=organizations/*/gcpUserAccessBindings/*}\x12\xbe\x02\n\x1a\x43reateGcpUserAccessBinding\x12J.google.identity.accesscontextmanager.v1.CreateGcpUserAccessBindingRequest\x1a\x1d.google.longrunning.Operation"\xb4\x01\xca\x41=\n\x14GcpUserAccessBinding\x12%GcpUserAccessBindingOperationMetadata\xda\x41\x1eparent,gcp_user_access_binding\x82\xd3\xe4\x93\x02M"2/v1/{parent=organizations/*}/gcpUserAccessBindings:\x17gcp_user_access_binding\x12\xdb\x02\n\x1aUpdateGcpUserAccessBinding\x12J.google.identity.accesscontextmanager.v1.UpdateGcpUserAccessBindingRequest\x1a\x1d.google.longrunning.Operation"\xd1\x01\xca\x41=\n\x14GcpUserAccessBinding\x12%GcpUserAccessBindingOperationMetadata\xda\x41#gcp_user_access_binding,update_mask\x82\xd3\xe4\x93\x02\x65\x32J/v1/{gcp_user_access_binding.name=organizations/*/gcpUserAccessBindings/*}:\x17gcp_user_access_binding\x12\x8c\x02\n\x1a\x44\x65leteGcpUserAccessBinding\x12J.google.identity.accesscontextmanager.v1.DeleteGcpUserAccessBindingRequest\x1a\x1d.google.longrunning.Operation"\x82\x01\xca\x41>\n\x15google.protobuf.Empty\x12%GcpUserAccessBindingOperationMetadata\xda\x41\x04name\x82\xd3\xe4\x93\x02\x34*2/v1/{name=organizations/*/gcpUserAccessBindings/*}\x12\x82\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"7\x82\xd3\xe4\x93\x02\x31",/v1/{resource=accessPolicies/*}:setIamPolicy:\x01*\x12\x82\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"7\x82\xd3\xe4\x93\x02\x31",/v1/{resource=accessPolicies/*}:getIamPolicy:\x01*\x12\xbf\x02\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"\xd3\x01\x82\xd3\xe4\x93\x02\xcc\x01"2/v1/{resource=accessPolicies/*}:testIamPermissions:\x01*ZF"A/v1/{resource=accessPolicies/*/accessLevels/*}:testIamPermissions:\x01*ZK"F/v1/{resource=accessPolicies/*/servicePerimeters/*}:testIamPermissions:\x01*\x1aW\xca\x41#accesscontextmanager.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb0\x02\n+com.google.identity.accesscontextmanager.v1B\x19\x41\x63\x63\x65ssContextManagerProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02\'Google.Identity.AccessContextManager.V1\xca\x02\'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "google.identity.accesscontextmanager.v1.access_context_manager_pb2", + _globals, +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n+com.google.identity.accesscontextmanager.v1B\031AccessContextManagerProtoP\001Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\242\002\004GACM\252\002'Google.Identity.AccessContextManager.V1\312\002'Google\\Identity\\AccessContextManager\\V1\352\002*Google::Identity::AccessContextManager::V1" + _LISTACCESSPOLICIESREQUEST.fields_by_name["parent"]._options = None + _LISTACCESSPOLICIESREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A2\n0cloudresourcemanager.googleapis.com/Organization" + ) + _GETACCESSPOLICYREQUEST.fields_by_name["name"]._options = None + _GETACCESSPOLICYREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A2\n0accesscontextmanager.googleapis.com/AccessPolicy" + ) + _UPDATEACCESSPOLICYREQUEST.fields_by_name["policy"]._options = None + _UPDATEACCESSPOLICYREQUEST.fields_by_name[ + "policy" + ]._serialized_options = b"\340A\002" + _UPDATEACCESSPOLICYREQUEST.fields_by_name["update_mask"]._options = None + _UPDATEACCESSPOLICYREQUEST.fields_by_name[ + "update_mask" + ]._serialized_options = b"\340A\002" + _DELETEACCESSPOLICYREQUEST.fields_by_name["name"]._options = None + _DELETEACCESSPOLICYREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A2\n0accesscontextmanager.googleapis.com/AccessPolicy" + ) + _LISTACCESSLEVELSREQUEST.fields_by_name["parent"]._options = None + _LISTACCESSLEVELSREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A1\022/accesscontextmanager.googleapis.com/AccessLevel" + ) + _GETACCESSLEVELREQUEST.fields_by_name["name"]._options = None + _GETACCESSLEVELREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A1\n/accesscontextmanager.googleapis.com/AccessLevel" + ) + _CREATEACCESSLEVELREQUEST.fields_by_name["parent"]._options = None + _CREATEACCESSLEVELREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A1\022/accesscontextmanager.googleapis.com/AccessLevel" + ) + _CREATEACCESSLEVELREQUEST.fields_by_name["access_level"]._options = None + _CREATEACCESSLEVELREQUEST.fields_by_name[ + "access_level" + ]._serialized_options = b"\340A\002" + _UPDATEACCESSLEVELREQUEST.fields_by_name["access_level"]._options = None + _UPDATEACCESSLEVELREQUEST.fields_by_name[ + "access_level" + ]._serialized_options = b"\340A\002" + _UPDATEACCESSLEVELREQUEST.fields_by_name["update_mask"]._options = None + _UPDATEACCESSLEVELREQUEST.fields_by_name[ + "update_mask" + ]._serialized_options = b"\340A\002" + _DELETEACCESSLEVELREQUEST.fields_by_name["name"]._options = None + _DELETEACCESSLEVELREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A1\n/accesscontextmanager.googleapis.com/AccessLevel" + ) + _REPLACEACCESSLEVELSREQUEST.fields_by_name["parent"]._options = None + _REPLACEACCESSLEVELSREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A1\022/accesscontextmanager.googleapis.com/AccessLevel" + ) + _REPLACEACCESSLEVELSREQUEST.fields_by_name["access_levels"]._options = None + _REPLACEACCESSLEVELSREQUEST.fields_by_name[ + "access_levels" + ]._serialized_options = b"\340A\002" + _LISTSERVICEPERIMETERSREQUEST.fields_by_name["parent"]._options = None + _LISTSERVICEPERIMETERSREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A6\0224accesscontextmanager.googleapis.com/ServicePerimeter" + ) + _GETSERVICEPERIMETERREQUEST.fields_by_name["name"]._options = None + _GETSERVICEPERIMETERREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A6\n4accesscontextmanager.googleapis.com/ServicePerimeter" + ) + _CREATESERVICEPERIMETERREQUEST.fields_by_name["parent"]._options = None + _CREATESERVICEPERIMETERREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A6\0224accesscontextmanager.googleapis.com/ServicePerimeter" + ) + _CREATESERVICEPERIMETERREQUEST.fields_by_name["service_perimeter"]._options = None + _CREATESERVICEPERIMETERREQUEST.fields_by_name[ + "service_perimeter" + ]._serialized_options = b"\340A\002" + _UPDATESERVICEPERIMETERREQUEST.fields_by_name["service_perimeter"]._options = None + _UPDATESERVICEPERIMETERREQUEST.fields_by_name[ + "service_perimeter" + ]._serialized_options = b"\340A\002" + _UPDATESERVICEPERIMETERREQUEST.fields_by_name["update_mask"]._options = None + _UPDATESERVICEPERIMETERREQUEST.fields_by_name[ + "update_mask" + ]._serialized_options = b"\340A\002" + _DELETESERVICEPERIMETERREQUEST.fields_by_name["name"]._options = None + _DELETESERVICEPERIMETERREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A6\n4accesscontextmanager.googleapis.com/ServicePerimeter" + ) + _REPLACESERVICEPERIMETERSREQUEST.fields_by_name["parent"]._options = None + _REPLACESERVICEPERIMETERSREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A6\0224accesscontextmanager.googleapis.com/ServicePerimeter" + ) + _REPLACESERVICEPERIMETERSREQUEST.fields_by_name[ + "service_perimeters" + ]._options = None + _REPLACESERVICEPERIMETERSREQUEST.fields_by_name[ + "service_perimeters" + ]._serialized_options = b"\340A\002" + _COMMITSERVICEPERIMETERSREQUEST.fields_by_name["parent"]._options = None + _COMMITSERVICEPERIMETERSREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A6\0224accesscontextmanager.googleapis.com/ServicePerimeter" + ) + _LISTGCPUSERACCESSBINDINGSREQUEST.fields_by_name["parent"]._options = None + _LISTGCPUSERACCESSBINDINGSREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A2\n0cloudresourcemanager.googleapis.com/Organization" + ) + _LISTGCPUSERACCESSBINDINGSREQUEST.fields_by_name["page_size"]._options = None + _LISTGCPUSERACCESSBINDINGSREQUEST.fields_by_name[ + "page_size" + ]._serialized_options = b"\340A\001" + _LISTGCPUSERACCESSBINDINGSREQUEST.fields_by_name["page_token"]._options = None + _LISTGCPUSERACCESSBINDINGSREQUEST.fields_by_name[ + "page_token" + ]._serialized_options = b"\340A\001" + _GETGCPUSERACCESSBINDINGREQUEST.fields_by_name["name"]._options = None + _GETGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A:\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding" + ) + _CREATEGCPUSERACCESSBINDINGREQUEST.fields_by_name["parent"]._options = None + _CREATEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "parent" + ]._serialized_options = ( + b"\340A\002\372A2\n0cloudresourcemanager.googleapis.com/Organization" + ) + _CREATEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "gcp_user_access_binding" + ]._options = None + _CREATEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "gcp_user_access_binding" + ]._serialized_options = b"\340A\002" + _UPDATEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "gcp_user_access_binding" + ]._options = None + _UPDATEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "gcp_user_access_binding" + ]._serialized_options = b"\340A\002" + _UPDATEGCPUSERACCESSBINDINGREQUEST.fields_by_name["update_mask"]._options = None + _UPDATEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "update_mask" + ]._serialized_options = b"\340A\002" + _DELETEGCPUSERACCESSBINDINGREQUEST.fields_by_name["name"]._options = None + _DELETEGCPUSERACCESSBINDINGREQUEST.fields_by_name[ + "name" + ]._serialized_options = ( + b"\340A\002\372A:\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding" + ) + _ACCESSCONTEXTMANAGER._options = None + _ACCESSCONTEXTMANAGER._serialized_options = b"\312A#accesscontextmanager.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + _ACCESSCONTEXTMANAGER.methods_by_name["ListAccessPolicies"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "ListAccessPolicies" + ]._serialized_options = b"\202\323\344\223\002\024\022\022/v1/accessPolicies" + _ACCESSCONTEXTMANAGER.methods_by_name["GetAccessPolicy"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "GetAccessPolicy" + ]._serialized_options = ( + b"\332A\004name\202\323\344\223\002\035\022\033/v1/{name=accessPolicies/*}" + ) + _ACCESSCONTEXTMANAGER.methods_by_name["CreateAccessPolicy"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "CreateAccessPolicy" + ]._serialized_options = b'\312A5\n\014AccessPolicy\022%AccessContextManagerOperationMetadata\202\323\344\223\002\027"\022/v1/accessPolicies:\001*' + _ACCESSCONTEXTMANAGER.methods_by_name["UpdateAccessPolicy"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "UpdateAccessPolicy" + ]._serialized_options = b'\312A5\n\014AccessPolicy\022%AccessContextManagerOperationMetadata\332A\022policy,update_mask\202\323\344\223\002,2"/v1/{policy.name=accessPolicies/*}:\006policy' + _ACCESSCONTEXTMANAGER.methods_by_name["DeleteAccessPolicy"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "DeleteAccessPolicy" + ]._serialized_options = b"\312A>\n\025google.protobuf.Empty\022%AccessContextManagerOperationMetadata\332A\004name\202\323\344\223\002\035*\033/v1/{name=accessPolicies/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["ListAccessLevels"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "ListAccessLevels" + ]._serialized_options = b"\332A\006parent\202\323\344\223\002,\022*/v1/{parent=accessPolicies/*}/accessLevels" + _ACCESSCONTEXTMANAGER.methods_by_name["GetAccessLevel"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "GetAccessLevel" + ]._serialized_options = b"\332A\004name\202\323\344\223\002,\022*/v1/{name=accessPolicies/*/accessLevels/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["CreateAccessLevel"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "CreateAccessLevel" + ]._serialized_options = b'\312A4\n\013AccessLevel\022%AccessContextManagerOperationMetadata\332A\023parent,access_level\202\323\344\223\002:"*/v1/{parent=accessPolicies/*}/accessLevels:\014access_level' + _ACCESSCONTEXTMANAGER.methods_by_name["UpdateAccessLevel"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "UpdateAccessLevel" + ]._serialized_options = b"\312A4\n\013AccessLevel\022%AccessContextManagerOperationMetadata\332A\030access_level,update_mask\202\323\344\223\002G27/v1/{access_level.name=accessPolicies/*/accessLevels/*}:\014access_level" + _ACCESSCONTEXTMANAGER.methods_by_name["DeleteAccessLevel"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "DeleteAccessLevel" + ]._serialized_options = b"\312A>\n\025google.protobuf.Empty\022%AccessContextManagerOperationMetadata\332A\004name\202\323\344\223\002,**/v1/{name=accessPolicies/*/accessLevels/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["ReplaceAccessLevels"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "ReplaceAccessLevels" + ]._serialized_options = b'\312AD\n\033ReplaceAccessLevelsResponse\022%AccessContextManagerOperationMetadata\202\323\344\223\002:"5/v1/{parent=accessPolicies/*}/accessLevels:replaceAll:\001*' + _ACCESSCONTEXTMANAGER.methods_by_name["ListServicePerimeters"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "ListServicePerimeters" + ]._serialized_options = b"\332A\006parent\202\323\344\223\0021\022//v1/{parent=accessPolicies/*}/servicePerimeters" + _ACCESSCONTEXTMANAGER.methods_by_name["GetServicePerimeter"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "GetServicePerimeter" + ]._serialized_options = b"\332A\004name\202\323\344\223\0021\022//v1/{name=accessPolicies/*/servicePerimeters/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["CreateServicePerimeter"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "CreateServicePerimeter" + ]._serialized_options = b'\312A9\n\020ServicePerimeter\022%AccessContextManagerOperationMetadata\332A\030parent,service_perimeter\202\323\344\223\002D"//v1/{parent=accessPolicies/*}/servicePerimeters:\021service_perimeter' + _ACCESSCONTEXTMANAGER.methods_by_name["UpdateServicePerimeter"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "UpdateServicePerimeter" + ]._serialized_options = b"\312A9\n\020ServicePerimeter\022%AccessContextManagerOperationMetadata\332A\035service_perimeter,update_mask\202\323\344\223\002V2A/v1/{service_perimeter.name=accessPolicies/*/servicePerimeters/*}:\021service_perimeter" + _ACCESSCONTEXTMANAGER.methods_by_name["DeleteServicePerimeter"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "DeleteServicePerimeter" + ]._serialized_options = b"\312A>\n\025google.protobuf.Empty\022%AccessContextManagerOperationMetadata\332A\004name\202\323\344\223\0021*//v1/{name=accessPolicies/*/servicePerimeters/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["ReplaceServicePerimeters"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "ReplaceServicePerimeters" + ]._serialized_options = b'\312AI\n ReplaceServicePerimetersResponse\022%AccessContextManagerOperationMetadata\202\323\344\223\002?":/v1/{parent=accessPolicies/*}/servicePerimeters:replaceAll:\001*' + _ACCESSCONTEXTMANAGER.methods_by_name["CommitServicePerimeters"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "CommitServicePerimeters" + ]._serialized_options = b'\312AH\n\037CommitServicePerimetersResponse\022%AccessContextManagerOperationMetadata\202\323\344\223\002;"6/v1/{parent=accessPolicies/*}/servicePerimeters:commit:\001*' + _ACCESSCONTEXTMANAGER.methods_by_name["ListGcpUserAccessBindings"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "ListGcpUserAccessBindings" + ]._serialized_options = b"\332A\006parent\202\323\344\223\0024\0222/v1/{parent=organizations/*}/gcpUserAccessBindings" + _ACCESSCONTEXTMANAGER.methods_by_name["GetGcpUserAccessBinding"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "GetGcpUserAccessBinding" + ]._serialized_options = b"\332A\004name\202\323\344\223\0024\0222/v1/{name=organizations/*/gcpUserAccessBindings/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["CreateGcpUserAccessBinding"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "CreateGcpUserAccessBinding" + ]._serialized_options = b'\312A=\n\024GcpUserAccessBinding\022%GcpUserAccessBindingOperationMetadata\332A\036parent,gcp_user_access_binding\202\323\344\223\002M"2/v1/{parent=organizations/*}/gcpUserAccessBindings:\027gcp_user_access_binding' + _ACCESSCONTEXTMANAGER.methods_by_name["UpdateGcpUserAccessBinding"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "UpdateGcpUserAccessBinding" + ]._serialized_options = b"\312A=\n\024GcpUserAccessBinding\022%GcpUserAccessBindingOperationMetadata\332A#gcp_user_access_binding,update_mask\202\323\344\223\002e2J/v1/{gcp_user_access_binding.name=organizations/*/gcpUserAccessBindings/*}:\027gcp_user_access_binding" + _ACCESSCONTEXTMANAGER.methods_by_name["DeleteGcpUserAccessBinding"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "DeleteGcpUserAccessBinding" + ]._serialized_options = b"\312A>\n\025google.protobuf.Empty\022%GcpUserAccessBindingOperationMetadata\332A\004name\202\323\344\223\0024*2/v1/{name=organizations/*/gcpUserAccessBindings/*}" + _ACCESSCONTEXTMANAGER.methods_by_name["SetIamPolicy"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "SetIamPolicy" + ]._serialized_options = ( + b'\202\323\344\223\0021",/v1/{resource=accessPolicies/*}:setIamPolicy:\001*' + ) + _ACCESSCONTEXTMANAGER.methods_by_name["GetIamPolicy"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "GetIamPolicy" + ]._serialized_options = ( + b'\202\323\344\223\0021",/v1/{resource=accessPolicies/*}:getIamPolicy:\001*' + ) + _ACCESSCONTEXTMANAGER.methods_by_name["TestIamPermissions"]._options = None + _ACCESSCONTEXTMANAGER.methods_by_name[ + "TestIamPermissions" + ]._serialized_options = b'\202\323\344\223\002\314\001"2/v1/{resource=accessPolicies/*}:testIamPermissions:\001*ZF"A/v1/{resource=accessPolicies/*/accessLevels/*}:testIamPermissions:\001*ZK"F/v1/{resource=accessPolicies/*/servicePerimeters/*}:testIamPermissions:\001*' + _globals["_LEVELFORMAT"]._serialized_start = 5065 + _globals["_LEVELFORMAT"]._serialized_end = 5133 + _globals["_LISTACCESSPOLICIESREQUEST"]._serialized_start = 617 + _globals["_LISTACCESSPOLICIESREQUEST"]._serialized_end = 757 + _globals["_LISTACCESSPOLICIESRESPONSE"]._serialized_start = 760 + _globals["_LISTACCESSPOLICIESRESPONSE"]._serialized_end = 893 + _globals["_GETACCESSPOLICYREQUEST"]._serialized_start = 895 + _globals["_GETACCESSPOLICYREQUEST"]._serialized_end = 991 + _globals["_UPDATEACCESSPOLICYREQUEST"]._serialized_start = 994 + _globals["_UPDATEACCESSPOLICYREQUEST"]._serialized_end = 1151 + _globals["_DELETEACCESSPOLICYREQUEST"]._serialized_start = 1153 + _globals["_DELETEACCESSPOLICYREQUEST"]._serialized_end = 1252 + _globals["_LISTACCESSLEVELSREQUEST"]._serialized_start = 1255 + _globals["_LISTACCESSLEVELSREQUEST"]._serialized_end = 1475 + _globals["_LISTACCESSLEVELSRESPONSE"]._serialized_start = 1478 + _globals["_LISTACCESSLEVELSRESPONSE"]._serialized_end = 1606 + _globals["_GETACCESSLEVELREQUEST"]._serialized_start = 1609 + _globals["_GETACCESSLEVELREQUEST"]._serialized_end = 1786 + _globals["_CREATEACCESSLEVELREQUEST"]._serialized_start = 1789 + _globals["_CREATEACCESSLEVELREQUEST"]._serialized_end = 1969 + _globals["_UPDATEACCESSLEVELREQUEST"]._serialized_start = 1972 + _globals["_UPDATEACCESSLEVELREQUEST"]._serialized_end = 2133 + _globals["_DELETEACCESSLEVELREQUEST"]._serialized_start = 2135 + _globals["_DELETEACCESSLEVELREQUEST"]._serialized_end = 2232 + _globals["_REPLACEACCESSLEVELSREQUEST"]._serialized_start = 2235 + _globals["_REPLACEACCESSLEVELSREQUEST"]._serialized_end = 2432 + _globals["_REPLACEACCESSLEVELSRESPONSE"]._serialized_start = 2434 + _globals["_REPLACEACCESSLEVELSRESPONSE"]._serialized_end = 2540 + _globals["_LISTSERVICEPERIMETERSREQUEST"]._serialized_start = 2543 + _globals["_LISTSERVICEPERIMETERSREQUEST"]._serialized_end = 2690 + _globals["_LISTSERVICEPERIMETERSRESPONSE"]._serialized_start = 2693 + _globals["_LISTSERVICEPERIMETERSRESPONSE"]._serialized_end = 2836 + _globals["_GETSERVICEPERIMETERREQUEST"]._serialized_start = 2838 + _globals["_GETSERVICEPERIMETERREQUEST"]._serialized_end = 2942 + _globals["_CREATESERVICEPERIMETERREQUEST"]._serialized_start = 2945 + _globals["_CREATESERVICEPERIMETERREQUEST"]._serialized_end = 3145 + _globals["_UPDATESERVICEPERIMETERREQUEST"]._serialized_start = 3148 + _globals["_UPDATESERVICEPERIMETERREQUEST"]._serialized_end = 3324 + _globals["_DELETESERVICEPERIMETERREQUEST"]._serialized_start = 3326 + _globals["_DELETESERVICEPERIMETERREQUEST"]._serialized_end = 3433 + _globals["_REPLACESERVICEPERIMETERSREQUEST"]._serialized_start = 3436 + _globals["_REPLACESERVICEPERIMETERSREQUEST"]._serialized_end = 3653 + _globals["_REPLACESERVICEPERIMETERSRESPONSE"]._serialized_start = 3655 + _globals["_REPLACESERVICEPERIMETERSRESPONSE"]._serialized_end = 3776 + _globals["_COMMITSERVICEPERIMETERSREQUEST"]._serialized_start = 3778 + _globals["_COMMITSERVICEPERIMETERSREQUEST"]._serialized_end = 3902 + _globals["_COMMITSERVICEPERIMETERSRESPONSE"]._serialized_start = 3904 + _globals["_COMMITSERVICEPERIMETERSRESPONSE"]._serialized_end = 4024 + _globals["_LISTGCPUSERACCESSBINDINGSREQUEST"]._serialized_start = 4027 + _globals["_LISTGCPUSERACCESSBINDINGSREQUEST"]._serialized_end = 4184 + _globals["_LISTGCPUSERACCESSBINDINGSRESPONSE"]._serialized_start = 4187 + _globals["_LISTGCPUSERACCESSBINDINGSRESPONSE"]._serialized_end = 4344 + _globals["_GETGCPUSERACCESSBINDINGREQUEST"]._serialized_start = 4346 + _globals["_GETGCPUSERACCESSBINDINGREQUEST"]._serialized_end = 4458 + _globals["_CREATEGCPUSERACCESSBINDINGREQUEST"]._serialized_start = 4461 + _globals["_CREATEGCPUSERACCESSBINDINGREQUEST"]._serialized_end = 4671 + _globals["_UPDATEGCPUSERACCESSBINDINGREQUEST"]._serialized_start = 4674 + _globals["_UPDATEGCPUSERACCESSBINDINGREQUEST"]._serialized_end = 4864 + _globals["_DELETEGCPUSERACCESSBINDINGREQUEST"]._serialized_start = 4866 + _globals["_DELETEGCPUSERACCESSBINDINGREQUEST"]._serialized_end = 4981 + _globals["_GCPUSERACCESSBINDINGOPERATIONMETADATA"]._serialized_start = 4983 + _globals["_GCPUSERACCESSBINDINGOPERATIONMETADATA"]._serialized_end = 5022 + _globals["_ACCESSCONTEXTMANAGEROPERATIONMETADATA"]._serialized_start = 5024 + _globals["_ACCESSCONTEXTMANAGEROPERATIONMETADATA"]._serialized_end = 5063 + _globals["_ACCESSCONTEXTMANAGER"]._serialized_start = 5136 + _globals["_ACCESSCONTEXTMANAGER"]._serialized_end = 11649 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py new file mode 100644 index 000000000000..68b139c81757 --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_level_pb2.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/identity/accesscontextmanager/v1/access_level.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import expr_pb2 as google_dot_type_dot_expr__pb2 + +from google.identity.accesscontextmanager.type import ( + device_resources_pb2 as google_dot_identity_dot_accesscontextmanager_dot_type_dot_device__resources__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n:google/identity/accesscontextmanager/v1/access_level.proto\x12\'google.identity.accesscontextmanager.v1\x1a\x19google/api/resource.proto\x1a@google/identity/accesscontextmanager/type/device_resources.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x16google/type/expr.proto"\xaa\x03\n\x0b\x41\x63\x63\x65ssLevel\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x44\n\x05\x62\x61sic\x18\x04 \x01(\x0b\x32\x33.google.identity.accesscontextmanager.v1.BasicLevelH\x00\x12\x46\n\x06\x63ustom\x18\x05 \x01(\x0b\x32\x34.google.identity.accesscontextmanager.v1.CustomLevelH\x00\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp:p\xea\x41m\n/accesscontextmanager.googleapis.com/AccessLevel\x12:accessPolicies/{access_policy}/accessLevels/{access_level}B\x07\n\x05level"\xef\x01\n\nBasicLevel\x12\x46\n\nconditions\x18\x01 \x03(\x0b\x32\x32.google.identity.accesscontextmanager.v1.Condition\x12j\n\x12\x63ombining_function\x18\x02 \x01(\x0e\x32N.google.identity.accesscontextmanager.v1.BasicLevel.ConditionCombiningFunction"-\n\x1a\x43onditionCombiningFunction\x12\x07\n\x03\x41ND\x10\x00\x12\x06\n\x02OR\x10\x01"\xc3\x01\n\tCondition\x12\x16\n\x0eip_subnetworks\x18\x01 \x03(\t\x12L\n\rdevice_policy\x18\x02 \x01(\x0b\x32\x35.google.identity.accesscontextmanager.v1.DevicePolicy\x12\x1e\n\x16required_access_levels\x18\x03 \x03(\t\x12\x0e\n\x06negate\x18\x05 \x01(\x08\x12\x0f\n\x07members\x18\x06 \x03(\t\x12\x0f\n\x07regions\x18\x07 \x03(\t".\n\x0b\x43ustomLevel\x12\x1f\n\x04\x65xpr\x18\x01 \x01(\x0b\x32\x11.google.type.Expr"\x89\x03\n\x0c\x44\x65vicePolicy\x12\x1a\n\x12require_screenlock\x18\x01 \x01(\x08\x12\x66\n\x1b\x61llowed_encryption_statuses\x18\x02 \x03(\x0e\x32\x41.google.identity.accesscontextmanager.type.DeviceEncryptionStatus\x12M\n\x0eos_constraints\x18\x03 \x03(\x0b\x32\x35.google.identity.accesscontextmanager.v1.OsConstraint\x12j\n allowed_device_management_levels\x18\x06 \x03(\x0e\x32@.google.identity.accesscontextmanager.type.DeviceManagementLevel\x12\x1e\n\x16require_admin_approval\x18\x07 \x01(\x08\x12\x1a\n\x12require_corp_owned\x18\x08 \x01(\x08"\x8f\x01\n\x0cOsConstraint\x12\x42\n\x07os_type\x18\x01 \x01(\x0e\x32\x31.google.identity.accesscontextmanager.type.OsType\x12\x17\n\x0fminimum_version\x18\x02 \x01(\t\x12"\n\x1arequire_verified_chrome_os\x18\x03 \x01(\x08\x42\xa7\x02\n+com.google.identity.accesscontextmanager.v1B\x10\x41\x63\x63\x65ssLevelProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02\'Google.Identity.AccessContextManager.V1\xca\x02\'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.identity.accesscontextmanager.v1.access_level_pb2", _globals +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n+com.google.identity.accesscontextmanager.v1B\020AccessLevelProtoP\001Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\242\002\004GACM\252\002'Google.Identity.AccessContextManager.V1\312\002'Google\\Identity\\AccessContextManager\\V1\352\002*Google::Identity::AccessContextManager::V1" + _ACCESSLEVEL._options = None + _ACCESSLEVEL._serialized_options = b"\352Am\n/accesscontextmanager.googleapis.com/AccessLevel\022:accessPolicies/{access_policy}/accessLevels/{access_level}" + _globals["_ACCESSLEVEL"]._serialized_start = 254 + _globals["_ACCESSLEVEL"]._serialized_end = 680 + _globals["_BASICLEVEL"]._serialized_start = 683 + _globals["_BASICLEVEL"]._serialized_end = 922 + _globals["_BASICLEVEL_CONDITIONCOMBININGFUNCTION"]._serialized_start = 877 + _globals["_BASICLEVEL_CONDITIONCOMBININGFUNCTION"]._serialized_end = 922 + _globals["_CONDITION"]._serialized_start = 925 + _globals["_CONDITION"]._serialized_end = 1120 + _globals["_CUSTOMLEVEL"]._serialized_start = 1122 + _globals["_CUSTOMLEVEL"]._serialized_end = 1168 + _globals["_DEVICEPOLICY"]._serialized_start = 1171 + _globals["_DEVICEPOLICY"]._serialized_end = 1564 + _globals["_OSCONSTRAINT"]._serialized_start = 1567 + _globals["_OSCONSTRAINT"]._serialized_end = 1710 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_policy_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_policy_pb2.py new file mode 100644 index 000000000000..5867beec87b4 --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/access_policy_pb2.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/identity/accesscontextmanager/v1/access_policy.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b"\n;google/identity/accesscontextmanager/v1/access_policy.proto\x12'google.identity.accesscontextmanager.v1\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x92\x02\n\x0c\x41\x63\x63\x65ssPolicy\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06parent\x18\x02 \x01(\t\x12\r\n\x05title\x18\x03 \x01(\t\x12\x0e\n\x06scopes\x18\x07 \x03(\t\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0c\n\x04\x65tag\x18\x06 \x01(\t:U\xea\x41R\n0accesscontextmanager.googleapis.com/AccessPolicy\x12\x1e\x61\x63\x63\x65ssPolicies/{access_policy}B\xa2\x02\n+com.google.identity.accesscontextmanager.v1B\x0bPolicyProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02'Google.Identity.AccessContextManager.V1\xca\x02'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3" +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.identity.accesscontextmanager.v1.access_policy_pb2", _globals +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n+com.google.identity.accesscontextmanager.v1B\013PolicyProtoP\001Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\242\002\004GACM\252\002'Google.Identity.AccessContextManager.V1\312\002'Google\\Identity\\AccessContextManager\\V1\352\002*Google::Identity::AccessContextManager::V1" + _ACCESSPOLICY._options = None + _ACCESSPOLICY._serialized_options = b"\352AR\n0accesscontextmanager.googleapis.com/AccessPolicy\022\036accessPolicies/{access_policy}" + _globals["_ACCESSPOLICY"]._serialized_start = 165 + _globals["_ACCESSPOLICY"]._serialized_end = 439 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/gcp_user_access_binding_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/gcp_user_access_binding_pb2.py new file mode 100644 index 000000000000..203b0b30aead --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/gcp_user_access_binding_pb2.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/identity/accesscontextmanager/v1/gcp_user_access_binding.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b"\nEgoogle/identity/accesscontextmanager/v1/gcp_user_access_binding.proto\x12'google.identity.accesscontextmanager.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\"\xa3\x02\n\x14GcpUserAccessBinding\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x05\x12\x19\n\tgroup_key\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x05\x12N\n\raccess_levels\x18\x03 \x03(\tB7\xe0\x41\x02\xfa\x41\x31\n/accesscontextmanager.googleapis.com/AccessLevel:\x8c\x01\xea\x41\x88\x01\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding\x12Lorganizations/{organization}/gcpUserAccessBindings/{gcp_user_access_binding}B\xb0\x02\n+com.google.identity.accesscontextmanager.v1B\x19GcpUserAccessBindingProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02'Google.Identity.AccessContextManager.V1\xca\x02'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3" +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "google.identity.accesscontextmanager.v1.gcp_user_access_binding_pb2", + _globals, +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n+com.google.identity.accesscontextmanager.v1B\031GcpUserAccessBindingProtoP\001Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\242\002\004GACM\252\002'Google.Identity.AccessContextManager.V1\312\002'Google\\Identity\\AccessContextManager\\V1\352\002*Google::Identity::AccessContextManager::V1" + _GCPUSERACCESSBINDING.fields_by_name["name"]._options = None + _GCPUSERACCESSBINDING.fields_by_name["name"]._serialized_options = b"\340A\005" + _GCPUSERACCESSBINDING.fields_by_name["group_key"]._options = None + _GCPUSERACCESSBINDING.fields_by_name[ + "group_key" + ]._serialized_options = b"\340A\002\340A\005" + _GCPUSERACCESSBINDING.fields_by_name["access_levels"]._options = None + _GCPUSERACCESSBINDING.fields_by_name[ + "access_levels" + ]._serialized_options = ( + b"\340A\002\372A1\n/accesscontextmanager.googleapis.com/AccessLevel" + ) + _GCPUSERACCESSBINDING._options = None + _GCPUSERACCESSBINDING._serialized_options = b"\352A\210\001\n8accesscontextmanager.googleapis.com/GcpUserAccessBinding\022Lorganizations/{organization}/gcpUserAccessBindings/{gcp_user_access_binding}" + _globals["_GCPUSERACCESSBINDING"]._serialized_start = 175 + _globals["_GCPUSERACCESSBINDING"]._serialized_end = 466 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/service_perimeter_pb2.py b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/service_perimeter_pb2.py new file mode 100644 index 000000000000..d0aac31c2f4b --- /dev/null +++ b/packages/google-cloud-access-context-manager/google/identity/accesscontextmanager/v1/service_perimeter_pb2.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/identity/accesscontextmanager/v1/service_perimeter.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n?google/identity/accesscontextmanager/v1/service_perimeter.proto\x12\'google.identity.accesscontextmanager.v1\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x93\x05\n\x10ServicePerimeter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12_\n\x0eperimeter_type\x18\x06 \x01(\x0e\x32G.google.identity.accesscontextmanager.v1.ServicePerimeter.PerimeterType\x12O\n\x06status\x18\x07 \x01(\x0b\x32?.google.identity.accesscontextmanager.v1.ServicePerimeterConfig\x12M\n\x04spec\x18\x08 \x01(\x0b\x32?.google.identity.accesscontextmanager.v1.ServicePerimeterConfig\x12!\n\x19use_explicit_dry_run_spec\x18\t \x01(\x08"F\n\rPerimeterType\x12\x1a\n\x16PERIMETER_TYPE_REGULAR\x10\x00\x12\x19\n\x15PERIMETER_TYPE_BRIDGE\x10\x01:\x7f\xea\x41|\n4accesscontextmanager.googleapis.com/ServicePerimeter\x12\x44\x61\x63\x63\x65ssPolicies/{access_policy}/servicePerimeters/{service_perimeter}"\xb5\x0f\n\x16ServicePerimeterConfig\x12\x11\n\tresources\x18\x01 \x03(\t\x12\x15\n\raccess_levels\x18\x02 \x03(\t\x12\x1b\n\x13restricted_services\x18\x04 \x03(\t\x12v\n\x17vpc_accessible_services\x18\n \x01(\x0b\x32U.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.VpcAccessibleServices\x12g\n\x10ingress_policies\x18\x08 \x03(\x0b\x32M.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.IngressPolicy\x12\x65\n\x0f\x65gress_policies\x18\t \x03(\x0b\x32L.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.EgressPolicy\x1aM\n\x15VpcAccessibleServices\x12\x1a\n\x12\x65nable_restriction\x18\x01 \x01(\x08\x12\x18\n\x10\x61llowed_services\x18\x02 \x03(\t\x1a@\n\x0eMethodSelector\x12\x10\n\x06method\x18\x01 \x01(\tH\x00\x12\x14\n\npermission\x18\x02 \x01(\tH\x00\x42\x06\n\x04kind\x1a\x8e\x01\n\x0c\x41piOperation\x12\x14\n\x0cservice_name\x18\x01 \x01(\t\x12h\n\x10method_selectors\x18\x02 \x03(\x0b\x32N.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.MethodSelector\x1a\x45\n\rIngressSource\x12\x16\n\x0c\x61\x63\x63\x65ss_level\x18\x01 \x01(\tH\x00\x12\x12\n\x08resource\x18\x02 \x01(\tH\x00\x42\x08\n\x06source\x1a\xe6\x01\n\x0bIngressFrom\x12^\n\x07sources\x18\x01 \x03(\x0b\x32M.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.IngressSource\x12\x12\n\nidentities\x18\x02 \x03(\t\x12\x63\n\ridentity_type\x18\x03 \x01(\x0e\x32L.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.IdentityType\x1a\x80\x01\n\tIngressTo\x12`\n\noperations\x18\x01 \x03(\x0b\x32L.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.ApiOperation\x12\x11\n\tresources\x18\x02 \x03(\t\x1a\xd1\x01\n\rIngressPolicy\x12\x61\n\x0cingress_from\x18\x01 \x01(\x0b\x32K.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.IngressFrom\x12]\n\ningress_to\x18\x02 \x01(\x0b\x32I.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.IngressTo\x1a\x85\x01\n\nEgressFrom\x12\x12\n\nidentities\x18\x01 \x03(\t\x12\x63\n\ridentity_type\x18\x02 \x01(\x0e\x32L.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.IdentityType\x1a\x9b\x01\n\x08\x45gressTo\x12\x11\n\tresources\x18\x01 \x03(\t\x12`\n\noperations\x18\x02 \x03(\x0b\x32L.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.ApiOperation\x12\x1a\n\x12\x65xternal_resources\x18\x03 \x03(\t\x1a\xcc\x01\n\x0c\x45gressPolicy\x12_\n\x0b\x65gress_from\x18\x01 \x01(\x0b\x32J.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.EgressFrom\x12[\n\tegress_to\x18\x02 \x01(\x0b\x32H.google.identity.accesscontextmanager.v1.ServicePerimeterConfig.EgressTo"n\n\x0cIdentityType\x12\x1d\n\x19IDENTITY_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x41NY_IDENTITY\x10\x01\x12\x14\n\x10\x41NY_USER_ACCOUNT\x10\x02\x12\x17\n\x13\x41NY_SERVICE_ACCOUNT\x10\x03\x42\xac\x02\n+com.google.identity.accesscontextmanager.v1B\x15ServicePerimeterProtoP\x01Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\xa2\x02\x04GACM\xaa\x02\'Google.Identity.AccessContextManager.V1\xca\x02\'Google\\Identity\\AccessContextManager\\V1\xea\x02*Google::Identity::AccessContextManager::V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, + "google.identity.accesscontextmanager.v1.service_perimeter_pb2", + _globals, +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n+com.google.identity.accesscontextmanager.v1B\025ServicePerimeterProtoP\001Z\\cloud.google.com/go/accesscontextmanager/apiv1/accesscontextmanagerpb;accesscontextmanagerpb\242\002\004GACM\252\002'Google.Identity.AccessContextManager.V1\312\002'Google\\Identity\\AccessContextManager\\V1\352\002*Google::Identity::AccessContextManager::V1" + _SERVICEPERIMETER._options = None + _SERVICEPERIMETER._serialized_options = b"\352A|\n4accesscontextmanager.googleapis.com/ServicePerimeter\022DaccessPolicies/{access_policy}/servicePerimeters/{service_perimeter}" + _globals["_SERVICEPERIMETER"]._serialized_start = 169 + _globals["_SERVICEPERIMETER"]._serialized_end = 828 + _globals["_SERVICEPERIMETER_PERIMETERTYPE"]._serialized_start = 629 + _globals["_SERVICEPERIMETER_PERIMETERTYPE"]._serialized_end = 699 + _globals["_SERVICEPERIMETERCONFIG"]._serialized_start = 831 + _globals["_SERVICEPERIMETERCONFIG"]._serialized_end = 2804 + _globals["_SERVICEPERIMETERCONFIG_VPCACCESSIBLESERVICES"]._serialized_start = 1256 + _globals["_SERVICEPERIMETERCONFIG_VPCACCESSIBLESERVICES"]._serialized_end = 1333 + _globals["_SERVICEPERIMETERCONFIG_METHODSELECTOR"]._serialized_start = 1335 + _globals["_SERVICEPERIMETERCONFIG_METHODSELECTOR"]._serialized_end = 1399 + _globals["_SERVICEPERIMETERCONFIG_APIOPERATION"]._serialized_start = 1402 + _globals["_SERVICEPERIMETERCONFIG_APIOPERATION"]._serialized_end = 1544 + _globals["_SERVICEPERIMETERCONFIG_INGRESSSOURCE"]._serialized_start = 1546 + _globals["_SERVICEPERIMETERCONFIG_INGRESSSOURCE"]._serialized_end = 1615 + _globals["_SERVICEPERIMETERCONFIG_INGRESSFROM"]._serialized_start = 1618 + _globals["_SERVICEPERIMETERCONFIG_INGRESSFROM"]._serialized_end = 1848 + _globals["_SERVICEPERIMETERCONFIG_INGRESSTO"]._serialized_start = 1851 + _globals["_SERVICEPERIMETERCONFIG_INGRESSTO"]._serialized_end = 1979 + _globals["_SERVICEPERIMETERCONFIG_INGRESSPOLICY"]._serialized_start = 1982 + _globals["_SERVICEPERIMETERCONFIG_INGRESSPOLICY"]._serialized_end = 2191 + _globals["_SERVICEPERIMETERCONFIG_EGRESSFROM"]._serialized_start = 2194 + _globals["_SERVICEPERIMETERCONFIG_EGRESSFROM"]._serialized_end = 2327 + _globals["_SERVICEPERIMETERCONFIG_EGRESSTO"]._serialized_start = 2330 + _globals["_SERVICEPERIMETERCONFIG_EGRESSTO"]._serialized_end = 2485 + _globals["_SERVICEPERIMETERCONFIG_EGRESSPOLICY"]._serialized_start = 2488 + _globals["_SERVICEPERIMETERCONFIG_EGRESSPOLICY"]._serialized_end = 2692 + _globals["_SERVICEPERIMETERCONFIG_IDENTITYTYPE"]._serialized_start = 2694 + _globals["_SERVICEPERIMETERCONFIG_IDENTITYTYPE"]._serialized_end = 2804 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-access-context-manager/noxfile.py b/packages/google-cloud-access-context-manager/noxfile.py new file mode 100644 index 000000000000..a9ceef47133c --- /dev/null +++ b/packages/google-cloud-access-context-manager/noxfile.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-access-context-manager/renovate.json b/packages/google-cloud-access-context-manager/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-access-context-manager/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-access-context-manager/scripts/decrypt-secrets.sh b/packages/google-cloud-access-context-manager/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-access-context-manager/scripts/readme-gen/readme_gen.py b/packages/google-cloud-access-context-manager/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..8f5e248a0da1 --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/readme-gen/readme_gen.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..6f069c6c87a5 --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.7+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-access-context-manager/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-access-context-manager/setup.cfg b/packages/google-cloud-access-context-manager/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-access-context-manager/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-access-context-manager/setup.py b/packages/google-cloud-access-context-manager/setup.py new file mode 100644 index 000000000000..8784ec630c69 --- /dev/null +++ b/packages/google-cloud-access-context-manager/setup.py @@ -0,0 +1,74 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools +from setuptools import find_namespace_packages + +# Package metadata. + +name = "google-cloud-access-context-manager" +description = "Google Cloud Access Context Manager Protobufs" +version = "0.2.1" +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Production/Stable' +release_status = "Development Status :: 4 - Beta" +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] + +# Setup boilerplate below this line. +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/googleapis/python-access-context-manager", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=find_namespace_packages(exclude=("tests*", "testing*")), + install_requires=dependencies, + python_requires=">=3.7", + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-access-context-manager/testing/.gitignore b/packages/google-cloud-access-context-manager/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-access-context-manager/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.10.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.10.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.11.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.11.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.12.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.12.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.13.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.13.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.7.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.7.txt new file mode 100644 index 000000000000..5dccad8acbae --- /dev/null +++ b/packages/google-cloud-access-context-manager/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +protobuf==3.20.2 +google-api-core==1.34.1 diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.8.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.8.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-access-context-manager/testing/constraints-3.9.txt b/packages/google-cloud-access-context-manager/testing/constraints-3.9.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-access-context-manager/tests/unit/test_import.py b/packages/google-cloud-access-context-manager/tests/unit/test_import.py new file mode 100644 index 000000000000..d32d192c26ac --- /dev/null +++ b/packages/google-cloud-access-context-manager/tests/unit/test_import.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.identity.accesscontextmanager.v1 import access_level_pb2 + + +def test_create_audit_log(): + # just check that the import works + # and that an AuditLog instance can be instantiated + access_level_pb2.BasicLevel() diff --git a/packages/google-cloud-access-context-manager/tests/unit/test_packaging.py b/packages/google-cloud-access-context-manager/tests/unit/test_packaging.py new file mode 100644 index 000000000000..01905d7c9bfb --- /dev/null +++ b/packages/google-cloud-access-context-manager/tests/unit/test_packaging.py @@ -0,0 +1,48 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-access-context-manager``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.identity`` namespace package should not be masked + # by the presence of ``google-cloud-access-context-manager``. + google_identity = tmp_path / "google" / "identity" + google_identity.mkdir() + google_identity.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.identity.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.identity.accesscontextmanager`` namespace package should not be masked + # by the presence of ``google-cloud-access-context-manager``. + google_identity_accesscontextmanager = ( + tmp_path / "google" / "identity" / "accesscontextmanager" + ) + google_identity_accesscontextmanager.mkdir() + google_identity_accesscontextmanager.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.identity.accesscontextmanager.othermod"] + subprocess.check_call(cmd, env=env) diff --git a/packages/google-cloud-advisorynotifications/CHANGELOG.md b/packages/google-cloud-advisorynotifications/CHANGELOG.md index af9759a01267..4a8a8cea6166 100644 --- a/packages/google-cloud-advisorynotifications/CHANGELOG.md +++ b/packages/google-cloud-advisorynotifications/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.3.13...google-cloud-advisorynotifications-v0.3.14) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-advisorynotifications-v0.3.12...google-cloud-advisorynotifications-v0.3.13) (2024-12-12) diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py index fb3463bbb3c2..0106eadcd8d9 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.3.14" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py index fb3463bbb3c2..0106eadcd8d9 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.3.14" # {x-release-please-version} diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py index d07a0bfcddd5..e9196e1357c7 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py index 6cb2747c75b4..e2e3b223030d 100644 --- a/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py +++ b/packages/google-cloud-advisorynotifications/google/cloud/advisorynotifications_v1/services/advisory_notifications_service/transports/rest.py @@ -123,12 +123,35 @@ def post_get_notification( ) -> service.Notification: """Post-rpc interceptor for get_notification - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_notification_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdvisoryNotificationsService server but before - it is returned to user code. + it is returned to user code. This `post_get_notification` interceptor runs + before the `post_get_notification_with_metadata` interceptor. """ return response + def post_get_notification_with_metadata( + self, + response: service.Notification, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Notification, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_notification + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdvisoryNotificationsService server but before it is returned to user code. + + We recommend only using this `post_get_notification_with_metadata` + interceptor in new development instead of the `post_get_notification` interceptor. + When both interceptors are used, this `post_get_notification_with_metadata` interceptor runs after the + `post_get_notification` interceptor. The (possibly modified) response returned by + `post_get_notification` will be passed to + `post_get_notification_with_metadata`. + """ + return response, metadata + def pre_get_settings( self, request: service.GetSettingsRequest, @@ -144,12 +167,35 @@ def pre_get_settings( def post_get_settings(self, response: service.Settings) -> service.Settings: """Post-rpc interceptor for get_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdvisoryNotificationsService server but before - it is returned to user code. + it is returned to user code. This `post_get_settings` interceptor runs + before the `post_get_settings_with_metadata` interceptor. """ return response + def post_get_settings_with_metadata( + self, + response: service.Settings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Settings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdvisoryNotificationsService server but before it is returned to user code. + + We recommend only using this `post_get_settings_with_metadata` + interceptor in new development instead of the `post_get_settings` interceptor. + When both interceptors are used, this `post_get_settings_with_metadata` interceptor runs after the + `post_get_settings` interceptor. The (possibly modified) response returned by + `post_get_settings` will be passed to + `post_get_settings_with_metadata`. + """ + return response, metadata + def pre_list_notifications( self, request: service.ListNotificationsRequest, @@ -169,12 +215,37 @@ def post_list_notifications( ) -> service.ListNotificationsResponse: """Post-rpc interceptor for list_notifications - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_notifications_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdvisoryNotificationsService server but before - it is returned to user code. + it is returned to user code. This `post_list_notifications` interceptor runs + before the `post_list_notifications_with_metadata` interceptor. """ return response + def post_list_notifications_with_metadata( + self, + response: service.ListNotificationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListNotificationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_notifications + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdvisoryNotificationsService server but before it is returned to user code. + + We recommend only using this `post_list_notifications_with_metadata` + interceptor in new development instead of the `post_list_notifications` interceptor. + When both interceptors are used, this `post_list_notifications_with_metadata` interceptor runs after the + `post_list_notifications` interceptor. The (possibly modified) response returned by + `post_list_notifications` will be passed to + `post_list_notifications_with_metadata`. + """ + return response, metadata + def pre_update_settings( self, request: service.UpdateSettingsRequest, @@ -190,12 +261,35 @@ def pre_update_settings( def post_update_settings(self, response: service.Settings) -> service.Settings: """Post-rpc interceptor for update_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AdvisoryNotificationsService server but before - it is returned to user code. + it is returned to user code. This `post_update_settings` interceptor runs + before the `post_update_settings_with_metadata` interceptor. """ return response + def post_update_settings_with_metadata( + self, + response: service.Settings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Settings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AdvisoryNotificationsService server but before it is returned to user code. + + We recommend only using this `post_update_settings_with_metadata` + interceptor in new development instead of the `post_update_settings` interceptor. + When both interceptors are used, this `post_update_settings_with_metadata` interceptor runs after the + `post_update_settings` interceptor. The (possibly modified) response returned by + `post_update_settings` will be passed to + `post_update_settings_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AdvisoryNotificationsServiceRestStub: @@ -408,6 +502,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_notification(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_notification_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -551,6 +649,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -697,6 +799,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_notifications(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_notifications_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -848,6 +954,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json index 92ef859c8101..d4905f0be2a0 100644 --- a/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json +++ b/packages/google-cloud-advisorynotifications/samples/generated_samples/snippet_metadata_google.cloud.advisorynotifications.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-advisorynotifications", - "version": "0.3.13" + "version": "0.3.14" }, "snippets": [ { diff --git a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py index f910f0508058..bbbf908f3343 100644 --- a/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py +++ b/packages/google-cloud-advisorynotifications/tests/unit/gapic/advisorynotifications_v1/test_advisory_notifications_service.py @@ -61,6 +61,13 @@ ) from google.cloud.advisorynotifications_v1.types import service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AdvisoryNotificationsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AdvisoryNotificationsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3885,10 +3935,14 @@ def test_list_notifications_rest_interceptors(null_interceptor): transports.AdvisoryNotificationsServiceRestInterceptor, "post_list_notifications", ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, + "post_list_notifications_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "pre_list_notifications" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListNotificationsRequest.pb( service.ListNotificationsRequest() ) @@ -3914,6 +3968,7 @@ def test_list_notifications_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListNotificationsResponse() + post_with_metadata.return_value = service.ListNotificationsResponse(), metadata client.list_notifications( request, @@ -3925,6 +3980,7 @@ def test_list_notifications_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_notification_rest_bad_request(request_type=service.GetNotificationRequest): @@ -4016,10 +4072,14 @@ def test_get_notification_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "post_get_notification" ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, + "post_get_notification_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "pre_get_notification" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetNotificationRequest.pb(service.GetNotificationRequest()) transcode.return_value = { "method": "post", @@ -4041,6 +4101,7 @@ def test_get_notification_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Notification() + post_with_metadata.return_value = service.Notification(), metadata client.get_notification( request, @@ -4052,6 +4113,7 @@ def test_get_notification_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_settings_rest_bad_request(request_type=service.GetSettingsRequest): @@ -4136,10 +4198,14 @@ def test_get_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "post_get_settings" ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, + "post_get_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSettingsRequest.pb(service.GetSettingsRequest()) transcode.return_value = { "method": "post", @@ -4161,6 +4227,7 @@ def test_get_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Settings() + post_with_metadata.return_value = service.Settings(), metadata client.get_settings( request, @@ -4172,6 +4239,7 @@ def test_get_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_settings_rest_bad_request(request_type=service.UpdateSettingsRequest): @@ -4332,10 +4400,14 @@ def test_update_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "post_update_settings" ) as post, mock.patch.object( + transports.AdvisoryNotificationsServiceRestInterceptor, + "post_update_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AdvisoryNotificationsServiceRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateSettingsRequest.pb(service.UpdateSettingsRequest()) transcode.return_value = { "method": "post", @@ -4357,6 +4429,7 @@ def test_update_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Settings() + post_with_metadata.return_value = service.Settings(), metadata client.update_settings( request, @@ -4368,6 +4441,7 @@ def test_update_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-alloydb-connectors/CHANGELOG.md b/packages/google-cloud-alloydb-connectors/CHANGELOG.md index b8a5a8130780..696cc1784e8d 100644 --- a/packages/google-cloud-alloydb-connectors/CHANGELOG.md +++ b/packages/google-cloud-alloydb-connectors/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.8](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-connectors-v0.1.7...google-cloud-alloydb-connectors-v0.1.8) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-connectors-v0.1.6...google-cloud-alloydb-connectors-v0.1.7) (2024-10-24) diff --git a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py index cf5493b86bbc..ec8d212c9160 100644 --- a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py +++ b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py index cf5493b86bbc..ec8d212c9160 100644 --- a/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py +++ b/packages/google-cloud-alloydb-connectors/google/cloud/alloydb/connectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.1.8" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/CHANGELOG.md b/packages/google-cloud-alloydb/CHANGELOG.md index cca7c7794ba6..8b23f1de8325 100644 --- a/packages/google-cloud-alloydb/CHANGELOG.md +++ b/packages/google-cloud-alloydb/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.4.1...google-cloud-alloydb-v0.4.2) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.4.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-alloydb-v0.4.0...google-cloud-alloydb-v0.4.1) (2024-12-12) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py index 72f01aec6400..ee41ffcc0a1d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.1" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py index 72f01aec6400..ee41ffcc0a1d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.1" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py index 9af3a0a32780..b9f974b9ddc1 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -677,6 +679,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5161,16 +5190,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5216,16 +5249,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5382,16 +5419,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5437,16 +5478,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py index faa1b840a61b..83eaa4d7b61e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py @@ -366,12 +366,35 @@ def post_batch_create_instances( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_instances` interceptor runs + before the `post_batch_create_instances_with_metadata` interceptor. """ return response + def post_batch_create_instances_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_batch_create_instances_with_metadata` + interceptor in new development instead of the `post_batch_create_instances` interceptor. + When both interceptors are used, this `post_batch_create_instances_with_metadata` interceptor runs after the + `post_batch_create_instances` interceptor. The (possibly modified) response returned by + `post_batch_create_instances` will be passed to + `post_batch_create_instances_with_metadata`. + """ + return response, metadata + def pre_create_backup( self, request: service.CreateBackupRequest, @@ -389,12 +412,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_cluster( self, request: service.CreateClusterRequest, @@ -412,12 +458,35 @@ def post_create_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: service.CreateInstanceRequest, @@ -435,12 +504,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_create_secondary_cluster( self, request: service.CreateSecondaryClusterRequest, @@ -460,12 +552,35 @@ def post_create_secondary_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_secondary_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_secondary_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_secondary_cluster` interceptor runs + before the `post_create_secondary_cluster_with_metadata` interceptor. """ return response + def post_create_secondary_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_secondary_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_secondary_cluster_with_metadata` + interceptor in new development instead of the `post_create_secondary_cluster` interceptor. + When both interceptors are used, this `post_create_secondary_cluster_with_metadata` interceptor runs after the + `post_create_secondary_cluster` interceptor. The (possibly modified) response returned by + `post_create_secondary_cluster` will be passed to + `post_create_secondary_cluster_with_metadata`. + """ + return response, metadata + def pre_create_secondary_instance( self, request: service.CreateSecondaryInstanceRequest, @@ -485,12 +600,35 @@ def post_create_secondary_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_secondary_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_secondary_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_secondary_instance` interceptor runs + before the `post_create_secondary_instance_with_metadata` interceptor. """ return response + def post_create_secondary_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_secondary_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_secondary_instance_with_metadata` + interceptor in new development instead of the `post_create_secondary_instance` interceptor. + When both interceptors are used, this `post_create_secondary_instance_with_metadata` interceptor runs after the + `post_create_secondary_instance` interceptor. The (possibly modified) response returned by + `post_create_secondary_instance` will be passed to + `post_create_secondary_instance_with_metadata`. + """ + return response, metadata + def pre_create_user( self, request: service.CreateUserRequest, @@ -506,12 +644,35 @@ def pre_create_user( def post_create_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for create_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_user` interceptor runs + before the `post_create_user_with_metadata` interceptor. """ return response + def post_create_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_user_with_metadata` + interceptor in new development instead of the `post_create_user` interceptor. + When both interceptors are used, this `post_create_user_with_metadata` interceptor runs after the + `post_create_user` interceptor. The (possibly modified) response returned by + `post_create_user` will be passed to + `post_create_user_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: service.DeleteBackupRequest, @@ -529,12 +690,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: service.DeleteClusterRequest, @@ -552,12 +736,35 @@ def post_delete_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: service.DeleteInstanceRequest, @@ -575,12 +782,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_delete_user( self, request: service.DeleteUserRequest, @@ -610,12 +840,35 @@ def post_execute_sql( ) -> service.ExecuteSqlResponse: """Post-rpc interceptor for execute_sql - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_execute_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_execute_sql` interceptor runs + before the `post_execute_sql_with_metadata` interceptor. """ return response + def post_execute_sql_with_metadata( + self, + response: service.ExecuteSqlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ExecuteSqlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_execute_sql_with_metadata` + interceptor in new development instead of the `post_execute_sql` interceptor. + When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the + `post_execute_sql` interceptor. The (possibly modified) response returned by + `post_execute_sql` will be passed to + `post_execute_sql_with_metadata`. + """ + return response, metadata + def pre_failover_instance( self, request: service.FailoverInstanceRequest, @@ -635,12 +888,35 @@ def post_failover_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. """ return response + def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + def pre_generate_client_certificate( self, request: service.GenerateClientCertificateRequest, @@ -661,12 +937,38 @@ def post_generate_client_certificate( ) -> service.GenerateClientCertificateResponse: """Post-rpc interceptor for generate_client_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_client_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_generate_client_certificate` interceptor runs + before the `post_generate_client_certificate_with_metadata` interceptor. """ return response + def post_generate_client_certificate_with_metadata( + self, + response: service.GenerateClientCertificateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateClientCertificateResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_client_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_generate_client_certificate_with_metadata` + interceptor in new development instead of the `post_generate_client_certificate` interceptor. + When both interceptors are used, this `post_generate_client_certificate_with_metadata` interceptor runs after the + `post_generate_client_certificate` interceptor. The (possibly modified) response returned by + `post_generate_client_certificate` will be passed to + `post_generate_client_certificate_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: service.GetBackupRequest, @@ -682,12 +984,35 @@ def pre_get_backup( def post_get_backup(self, response: resources.Backup) -> resources.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: resources.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_cluster( self, request: service.GetClusterRequest, @@ -703,12 +1028,35 @@ def pre_get_cluster( def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: resources.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_get_connection_info( self, request: service.GetConnectionInfoRequest, @@ -728,12 +1076,35 @@ def post_get_connection_info( ) -> resources.ConnectionInfo: """Post-rpc interceptor for get_connection_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_connection_info` interceptor runs + before the `post_get_connection_info_with_metadata` interceptor. """ return response + def post_get_connection_info_with_metadata( + self, + response: resources.ConnectionInfo, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConnectionInfo, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_connection_info_with_metadata` + interceptor in new development instead of the `post_get_connection_info` interceptor. + When both interceptors are used, this `post_get_connection_info_with_metadata` interceptor runs after the + `post_get_connection_info` interceptor. The (possibly modified) response returned by + `post_get_connection_info` will be passed to + `post_get_connection_info_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: service.GetInstanceRequest, @@ -749,12 +1120,35 @@ def pre_get_instance( def post_get_instance(self, response: resources.Instance) -> resources.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: resources.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_user( self, request: service.GetUserRequest, @@ -770,12 +1164,35 @@ def pre_get_user( def post_get_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for get_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_user` interceptor runs + before the `post_get_user_with_metadata` interceptor. """ return response + def post_get_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_user_with_metadata` + interceptor in new development instead of the `post_get_user` interceptor. + When both interceptors are used, this `post_get_user_with_metadata` interceptor runs after the + `post_get_user` interceptor. The (possibly modified) response returned by + `post_get_user` will be passed to + `post_get_user_with_metadata`. + """ + return response, metadata + def pre_inject_fault( self, request: service.InjectFaultRequest, @@ -793,12 +1210,35 @@ def post_inject_fault( ) -> operations_pb2.Operation: """Post-rpc interceptor for inject_fault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_inject_fault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_inject_fault` interceptor runs + before the `post_inject_fault_with_metadata` interceptor. """ return response + def post_inject_fault_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for inject_fault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_inject_fault_with_metadata` + interceptor in new development instead of the `post_inject_fault` interceptor. + When both interceptors are used, this `post_inject_fault_with_metadata` interceptor runs after the + `post_inject_fault` interceptor. The (possibly modified) response returned by + `post_inject_fault` will be passed to + `post_inject_fault_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: service.ListBackupsRequest, @@ -816,12 +1256,35 @@ def post_list_backups( ) -> service.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: service.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: service.ListClustersRequest, @@ -839,12 +1302,35 @@ def post_list_clusters( ) -> service.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: service.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_list_databases( self, request: service.ListDatabasesRequest, @@ -862,12 +1348,35 @@ def post_list_databases( ) -> service.ListDatabasesResponse: """Post-rpc interceptor for list_databases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. """ return response + def post_list_databases_with_metadata( + self, + response: service.ListDatabasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: service.ListInstancesRequest, @@ -885,12 +1394,35 @@ def post_list_instances( ) -> service.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: service.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_list_supported_database_flags( self, request: service.ListSupportedDatabaseFlagsRequest, @@ -911,12 +1443,38 @@ def post_list_supported_database_flags( ) -> service.ListSupportedDatabaseFlagsResponse: """Post-rpc interceptor for list_supported_database_flags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_supported_database_flags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_supported_database_flags` interceptor runs + before the `post_list_supported_database_flags_with_metadata` interceptor. """ return response + def post_list_supported_database_flags_with_metadata( + self, + response: service.ListSupportedDatabaseFlagsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListSupportedDatabaseFlagsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_supported_database_flags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_supported_database_flags_with_metadata` + interceptor in new development instead of the `post_list_supported_database_flags` interceptor. + When both interceptors are used, this `post_list_supported_database_flags_with_metadata` interceptor runs after the + `post_list_supported_database_flags` interceptor. The (possibly modified) response returned by + `post_list_supported_database_flags` will be passed to + `post_list_supported_database_flags_with_metadata`. + """ + return response, metadata + def pre_list_users( self, request: service.ListUsersRequest, @@ -934,12 +1492,35 @@ def post_list_users( ) -> service.ListUsersResponse: """Post-rpc interceptor for list_users - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_users_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_users` interceptor runs + before the `post_list_users_with_metadata` interceptor. """ return response + def post_list_users_with_metadata( + self, + response: service.ListUsersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListUsersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_users + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_users_with_metadata` + interceptor in new development instead of the `post_list_users` interceptor. + When both interceptors are used, this `post_list_users_with_metadata` interceptor runs after the + `post_list_users` interceptor. The (possibly modified) response returned by + `post_list_users` will be passed to + `post_list_users_with_metadata`. + """ + return response, metadata + def pre_promote_cluster( self, request: service.PromoteClusterRequest, @@ -957,12 +1538,35 @@ def post_promote_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for promote_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_promote_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_promote_cluster` interceptor runs + before the `post_promote_cluster_with_metadata` interceptor. """ return response + def post_promote_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for promote_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_promote_cluster_with_metadata` + interceptor in new development instead of the `post_promote_cluster` interceptor. + When both interceptors are used, this `post_promote_cluster_with_metadata` interceptor runs after the + `post_promote_cluster` interceptor. The (possibly modified) response returned by + `post_promote_cluster` will be passed to + `post_promote_cluster_with_metadata`. + """ + return response, metadata + def pre_restart_instance( self, request: service.RestartInstanceRequest, @@ -980,12 +1584,35 @@ def post_restart_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for restart_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restart_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restart_instance` interceptor runs + before the `post_restart_instance_with_metadata` interceptor. """ return response + def post_restart_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restart_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_restart_instance_with_metadata` + interceptor in new development instead of the `post_restart_instance` interceptor. + When both interceptors are used, this `post_restart_instance_with_metadata` interceptor runs after the + `post_restart_instance` interceptor. The (possibly modified) response returned by + `post_restart_instance` will be passed to + `post_restart_instance_with_metadata`. + """ + return response, metadata + def pre_restore_cluster( self, request: service.RestoreClusterRequest, @@ -1003,12 +1630,35 @@ def post_restore_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restore_cluster` interceptor runs + before the `post_restore_cluster_with_metadata` interceptor. """ return response + def post_restore_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_restore_cluster_with_metadata` + interceptor in new development instead of the `post_restore_cluster` interceptor. + When both interceptors are used, this `post_restore_cluster_with_metadata` interceptor runs after the + `post_restore_cluster` interceptor. The (possibly modified) response returned by + `post_restore_cluster` will be passed to + `post_restore_cluster_with_metadata`. + """ + return response, metadata + def pre_switchover_cluster( self, request: service.SwitchoverClusterRequest, @@ -1028,12 +1678,35 @@ def post_switchover_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for switchover_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_switchover_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_switchover_cluster` interceptor runs + before the `post_switchover_cluster_with_metadata` interceptor. """ return response + def post_switchover_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for switchover_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_switchover_cluster_with_metadata` + interceptor in new development instead of the `post_switchover_cluster` interceptor. + When both interceptors are used, this `post_switchover_cluster_with_metadata` interceptor runs after the + `post_switchover_cluster` interceptor. The (possibly modified) response returned by + `post_switchover_cluster` will be passed to + `post_switchover_cluster_with_metadata`. + """ + return response, metadata + def pre_update_backup( self, request: service.UpdateBackupRequest, @@ -1051,12 +1724,35 @@ def post_update_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. """ return response + def post_update_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: service.UpdateClusterRequest, @@ -1074,12 +1770,35 @@ def post_update_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: service.UpdateInstanceRequest, @@ -1097,12 +1816,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_user( self, request: service.UpdateUserRequest, @@ -1118,12 +1860,35 @@ def pre_update_user( def post_update_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for update_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_user` interceptor runs + before the `post_update_user_with_metadata` interceptor. """ return response + def post_update_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_user_with_metadata` + interceptor in new development instead of the `post_update_user` interceptor. + When both interceptors are used, this `post_update_user_with_metadata` interceptor runs after the + `post_update_user` interceptor. The (possibly modified) response returned by + `post_update_user` will be passed to + `post_update_user_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1538,6 +2303,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1689,6 +2458,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1836,6 +2609,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1983,6 +2760,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2132,6 +2913,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_secondary_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_secondary_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2283,6 +3068,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_secondary_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_secondary_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2435,6 +3224,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2578,6 +3371,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2719,6 +3516,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2860,6 +3661,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3122,6 +3927,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_execute_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_sql_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3272,6 +4081,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_failover_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3428,6 +4241,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_client_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_client_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3574,6 +4391,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3724,6 +4545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3869,6 +4694,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4017,6 +4846,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4159,6 +4992,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4313,6 +5150,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_inject_fault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_inject_fault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4460,6 +5301,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4605,6 +5450,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4748,6 +5597,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4891,6 +5744,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5040,6 +5897,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_supported_database_flags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_supported_database_flags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5186,6 +6050,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_users(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_users_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5333,6 +6201,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_promote_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_promote_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5482,6 +6354,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restart_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restart_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5631,6 +6507,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5781,6 +6661,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_switchover_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_switchover_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5932,6 +6816,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6079,6 +6967,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6226,6 +7118,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6378,6 +7274,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py index 72f01aec6400..ee41ffcc0a1d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.1" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index 8be23dd20736..2022b8966e0a 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -683,6 +685,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5299,16 +5328,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5354,16 +5387,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5520,16 +5557,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5575,16 +5616,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py index 7a0bbcd53a09..d8a83afe7f05 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py @@ -374,12 +374,35 @@ def post_batch_create_instances( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_instances` interceptor runs + before the `post_batch_create_instances_with_metadata` interceptor. """ return response + def post_batch_create_instances_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_batch_create_instances_with_metadata` + interceptor in new development instead of the `post_batch_create_instances` interceptor. + When both interceptors are used, this `post_batch_create_instances_with_metadata` interceptor runs after the + `post_batch_create_instances` interceptor. The (possibly modified) response returned by + `post_batch_create_instances` will be passed to + `post_batch_create_instances_with_metadata`. + """ + return response, metadata + def pre_create_backup( self, request: service.CreateBackupRequest, @@ -397,12 +420,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_cluster( self, request: service.CreateClusterRequest, @@ -420,12 +466,35 @@ def post_create_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: service.CreateInstanceRequest, @@ -443,12 +512,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_create_secondary_cluster( self, request: service.CreateSecondaryClusterRequest, @@ -468,12 +560,35 @@ def post_create_secondary_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_secondary_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_secondary_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_secondary_cluster` interceptor runs + before the `post_create_secondary_cluster_with_metadata` interceptor. """ return response + def post_create_secondary_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_secondary_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_secondary_cluster_with_metadata` + interceptor in new development instead of the `post_create_secondary_cluster` interceptor. + When both interceptors are used, this `post_create_secondary_cluster_with_metadata` interceptor runs after the + `post_create_secondary_cluster` interceptor. The (possibly modified) response returned by + `post_create_secondary_cluster` will be passed to + `post_create_secondary_cluster_with_metadata`. + """ + return response, metadata + def pre_create_secondary_instance( self, request: service.CreateSecondaryInstanceRequest, @@ -493,12 +608,35 @@ def post_create_secondary_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_secondary_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_secondary_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_secondary_instance` interceptor runs + before the `post_create_secondary_instance_with_metadata` interceptor. """ return response + def post_create_secondary_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_secondary_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_secondary_instance_with_metadata` + interceptor in new development instead of the `post_create_secondary_instance` interceptor. + When both interceptors are used, this `post_create_secondary_instance_with_metadata` interceptor runs after the + `post_create_secondary_instance` interceptor. The (possibly modified) response returned by + `post_create_secondary_instance` will be passed to + `post_create_secondary_instance_with_metadata`. + """ + return response, metadata + def pre_create_user( self, request: service.CreateUserRequest, @@ -514,12 +652,35 @@ def pre_create_user( def post_create_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for create_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_user` interceptor runs + before the `post_create_user_with_metadata` interceptor. """ return response + def post_create_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_user_with_metadata` + interceptor in new development instead of the `post_create_user` interceptor. + When both interceptors are used, this `post_create_user_with_metadata` interceptor runs after the + `post_create_user` interceptor. The (possibly modified) response returned by + `post_create_user` will be passed to + `post_create_user_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: service.DeleteBackupRequest, @@ -537,12 +698,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: service.DeleteClusterRequest, @@ -560,12 +744,35 @@ def post_delete_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: service.DeleteInstanceRequest, @@ -583,12 +790,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_delete_user( self, request: service.DeleteUserRequest, @@ -618,12 +848,35 @@ def post_execute_sql( ) -> service.ExecuteSqlResponse: """Post-rpc interceptor for execute_sql - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_execute_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_execute_sql` interceptor runs + before the `post_execute_sql_with_metadata` interceptor. """ return response + def post_execute_sql_with_metadata( + self, + response: service.ExecuteSqlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ExecuteSqlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_execute_sql_with_metadata` + interceptor in new development instead of the `post_execute_sql` interceptor. + When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the + `post_execute_sql` interceptor. The (possibly modified) response returned by + `post_execute_sql` will be passed to + `post_execute_sql_with_metadata`. + """ + return response, metadata + def pre_failover_instance( self, request: service.FailoverInstanceRequest, @@ -643,12 +896,35 @@ def post_failover_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. """ return response + def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + def pre_generate_client_certificate( self, request: service.GenerateClientCertificateRequest, @@ -669,12 +945,38 @@ def post_generate_client_certificate( ) -> service.GenerateClientCertificateResponse: """Post-rpc interceptor for generate_client_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_client_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_generate_client_certificate` interceptor runs + before the `post_generate_client_certificate_with_metadata` interceptor. """ return response + def post_generate_client_certificate_with_metadata( + self, + response: service.GenerateClientCertificateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateClientCertificateResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_client_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_generate_client_certificate_with_metadata` + interceptor in new development instead of the `post_generate_client_certificate` interceptor. + When both interceptors are used, this `post_generate_client_certificate_with_metadata` interceptor runs after the + `post_generate_client_certificate` interceptor. The (possibly modified) response returned by + `post_generate_client_certificate` will be passed to + `post_generate_client_certificate_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: service.GetBackupRequest, @@ -690,12 +992,35 @@ def pre_get_backup( def post_get_backup(self, response: resources.Backup) -> resources.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: resources.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_cluster( self, request: service.GetClusterRequest, @@ -711,12 +1036,35 @@ def pre_get_cluster( def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: resources.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_get_connection_info( self, request: service.GetConnectionInfoRequest, @@ -736,12 +1084,35 @@ def post_get_connection_info( ) -> resources.ConnectionInfo: """Post-rpc interceptor for get_connection_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_connection_info` interceptor runs + before the `post_get_connection_info_with_metadata` interceptor. """ return response + def post_get_connection_info_with_metadata( + self, + response: resources.ConnectionInfo, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConnectionInfo, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_connection_info_with_metadata` + interceptor in new development instead of the `post_get_connection_info` interceptor. + When both interceptors are used, this `post_get_connection_info_with_metadata` interceptor runs after the + `post_get_connection_info` interceptor. The (possibly modified) response returned by + `post_get_connection_info` will be passed to + `post_get_connection_info_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: service.GetInstanceRequest, @@ -757,12 +1128,35 @@ def pre_get_instance( def post_get_instance(self, response: resources.Instance) -> resources.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: resources.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_user( self, request: service.GetUserRequest, @@ -778,12 +1172,35 @@ def pre_get_user( def post_get_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for get_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_user` interceptor runs + before the `post_get_user_with_metadata` interceptor. """ return response + def post_get_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_user_with_metadata` + interceptor in new development instead of the `post_get_user` interceptor. + When both interceptors are used, this `post_get_user_with_metadata` interceptor runs after the + `post_get_user` interceptor. The (possibly modified) response returned by + `post_get_user` will be passed to + `post_get_user_with_metadata`. + """ + return response, metadata + def pre_inject_fault( self, request: service.InjectFaultRequest, @@ -801,12 +1218,35 @@ def post_inject_fault( ) -> operations_pb2.Operation: """Post-rpc interceptor for inject_fault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_inject_fault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_inject_fault` interceptor runs + before the `post_inject_fault_with_metadata` interceptor. """ return response + def post_inject_fault_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for inject_fault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_inject_fault_with_metadata` + interceptor in new development instead of the `post_inject_fault` interceptor. + When both interceptors are used, this `post_inject_fault_with_metadata` interceptor runs after the + `post_inject_fault` interceptor. The (possibly modified) response returned by + `post_inject_fault` will be passed to + `post_inject_fault_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: service.ListBackupsRequest, @@ -824,12 +1264,35 @@ def post_list_backups( ) -> service.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: service.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: service.ListClustersRequest, @@ -847,12 +1310,35 @@ def post_list_clusters( ) -> service.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: service.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_list_databases( self, request: service.ListDatabasesRequest, @@ -870,12 +1356,35 @@ def post_list_databases( ) -> service.ListDatabasesResponse: """Post-rpc interceptor for list_databases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. """ return response + def post_list_databases_with_metadata( + self, + response: service.ListDatabasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: service.ListInstancesRequest, @@ -893,12 +1402,35 @@ def post_list_instances( ) -> service.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: service.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_list_supported_database_flags( self, request: service.ListSupportedDatabaseFlagsRequest, @@ -919,12 +1451,38 @@ def post_list_supported_database_flags( ) -> service.ListSupportedDatabaseFlagsResponse: """Post-rpc interceptor for list_supported_database_flags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_supported_database_flags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_supported_database_flags` interceptor runs + before the `post_list_supported_database_flags_with_metadata` interceptor. """ return response + def post_list_supported_database_flags_with_metadata( + self, + response: service.ListSupportedDatabaseFlagsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListSupportedDatabaseFlagsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_supported_database_flags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_supported_database_flags_with_metadata` + interceptor in new development instead of the `post_list_supported_database_flags` interceptor. + When both interceptors are used, this `post_list_supported_database_flags_with_metadata` interceptor runs after the + `post_list_supported_database_flags` interceptor. The (possibly modified) response returned by + `post_list_supported_database_flags` will be passed to + `post_list_supported_database_flags_with_metadata`. + """ + return response, metadata + def pre_list_users( self, request: service.ListUsersRequest, @@ -942,12 +1500,35 @@ def post_list_users( ) -> service.ListUsersResponse: """Post-rpc interceptor for list_users - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_users_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_users` interceptor runs + before the `post_list_users_with_metadata` interceptor. """ return response + def post_list_users_with_metadata( + self, + response: service.ListUsersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListUsersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_users + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_users_with_metadata` + interceptor in new development instead of the `post_list_users` interceptor. + When both interceptors are used, this `post_list_users_with_metadata` interceptor runs after the + `post_list_users` interceptor. The (possibly modified) response returned by + `post_list_users` will be passed to + `post_list_users_with_metadata`. + """ + return response, metadata + def pre_promote_cluster( self, request: service.PromoteClusterRequest, @@ -965,12 +1546,35 @@ def post_promote_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for promote_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_promote_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_promote_cluster` interceptor runs + before the `post_promote_cluster_with_metadata` interceptor. """ return response + def post_promote_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for promote_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_promote_cluster_with_metadata` + interceptor in new development instead of the `post_promote_cluster` interceptor. + When both interceptors are used, this `post_promote_cluster_with_metadata` interceptor runs after the + `post_promote_cluster` interceptor. The (possibly modified) response returned by + `post_promote_cluster` will be passed to + `post_promote_cluster_with_metadata`. + """ + return response, metadata + def pre_restart_instance( self, request: service.RestartInstanceRequest, @@ -988,12 +1592,35 @@ def post_restart_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for restart_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restart_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restart_instance` interceptor runs + before the `post_restart_instance_with_metadata` interceptor. """ return response + def post_restart_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restart_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_restart_instance_with_metadata` + interceptor in new development instead of the `post_restart_instance` interceptor. + When both interceptors are used, this `post_restart_instance_with_metadata` interceptor runs after the + `post_restart_instance` interceptor. The (possibly modified) response returned by + `post_restart_instance` will be passed to + `post_restart_instance_with_metadata`. + """ + return response, metadata + def pre_restore_cluster( self, request: service.RestoreClusterRequest, @@ -1011,12 +1638,35 @@ def post_restore_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restore_cluster` interceptor runs + before the `post_restore_cluster_with_metadata` interceptor. """ return response + def post_restore_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_restore_cluster_with_metadata` + interceptor in new development instead of the `post_restore_cluster` interceptor. + When both interceptors are used, this `post_restore_cluster_with_metadata` interceptor runs after the + `post_restore_cluster` interceptor. The (possibly modified) response returned by + `post_restore_cluster` will be passed to + `post_restore_cluster_with_metadata`. + """ + return response, metadata + def pre_switchover_cluster( self, request: service.SwitchoverClusterRequest, @@ -1036,12 +1686,35 @@ def post_switchover_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for switchover_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_switchover_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_switchover_cluster` interceptor runs + before the `post_switchover_cluster_with_metadata` interceptor. """ return response + def post_switchover_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for switchover_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_switchover_cluster_with_metadata` + interceptor in new development instead of the `post_switchover_cluster` interceptor. + When both interceptors are used, this `post_switchover_cluster_with_metadata` interceptor runs after the + `post_switchover_cluster` interceptor. The (possibly modified) response returned by + `post_switchover_cluster` will be passed to + `post_switchover_cluster_with_metadata`. + """ + return response, metadata + def pre_update_backup( self, request: service.UpdateBackupRequest, @@ -1059,12 +1732,35 @@ def post_update_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. """ return response + def post_update_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: service.UpdateClusterRequest, @@ -1082,12 +1778,35 @@ def post_update_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: service.UpdateInstanceRequest, @@ -1105,12 +1824,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_user( self, request: service.UpdateUserRequest, @@ -1126,12 +1868,35 @@ def pre_update_user( def post_update_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for update_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_user` interceptor runs + before the `post_update_user_with_metadata` interceptor. """ return response + def post_update_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_user_with_metadata` + interceptor in new development instead of the `post_update_user` interceptor. + When both interceptors are used, this `post_update_user_with_metadata` interceptor runs after the + `post_update_user` interceptor. The (possibly modified) response returned by + `post_update_user` will be passed to + `post_update_user_with_metadata`. + """ + return response, metadata + def pre_upgrade_cluster( self, request: service.UpgradeClusterRequest, @@ -1149,12 +1914,35 @@ def post_upgrade_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upgrade_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_upgrade_cluster` interceptor runs + before the `post_upgrade_cluster_with_metadata` interceptor. """ return response + def post_upgrade_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_upgrade_cluster_with_metadata` + interceptor in new development instead of the `post_upgrade_cluster` interceptor. + When both interceptors are used, this `post_upgrade_cluster_with_metadata` interceptor runs after the + `post_upgrade_cluster` interceptor. The (possibly modified) response returned by + `post_upgrade_cluster` will be passed to + `post_upgrade_cluster_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1569,6 +2357,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1720,6 +2512,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1867,6 +2663,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2014,6 +2814,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2163,6 +2967,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_secondary_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_secondary_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2314,6 +3122,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_secondary_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_secondary_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2466,6 +3278,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2609,6 +3425,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2750,6 +3570,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2891,6 +3715,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3153,6 +3981,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_execute_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_sql_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3303,6 +4135,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_failover_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3459,6 +4295,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_client_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_client_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3605,6 +4445,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3755,6 +4599,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3900,6 +4748,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4048,6 +4900,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4190,6 +5046,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4344,6 +5204,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_inject_fault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_inject_fault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4491,6 +5355,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4636,6 +5504,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4779,6 +5651,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4922,6 +5798,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5071,6 +5951,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_supported_database_flags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_supported_database_flags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5217,6 +6104,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_users(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_users_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5364,6 +6255,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_promote_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_promote_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5513,6 +6408,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restart_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restart_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5662,6 +6561,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5812,6 +6715,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_switchover_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_switchover_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5963,6 +6870,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6110,6 +7021,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6257,6 +7172,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6409,6 +7328,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6556,6 +7479,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_upgrade_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upgrade_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py index 72f01aec6400..ee41ffcc0a1d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.1" # {x-release-please-version} +__version__ = "0.4.2" # {x-release-please-version} diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py index 53e3e679b97b..341e35916a19 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -683,6 +685,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5299,16 +5328,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5354,16 +5387,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5520,16 +5557,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5575,16 +5616,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py index aca2fbca3004..4f08d54c1462 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py @@ -374,12 +374,35 @@ def post_batch_create_instances( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_instances` interceptor runs + before the `post_batch_create_instances_with_metadata` interceptor. """ return response + def post_batch_create_instances_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_batch_create_instances_with_metadata` + interceptor in new development instead of the `post_batch_create_instances` interceptor. + When both interceptors are used, this `post_batch_create_instances_with_metadata` interceptor runs after the + `post_batch_create_instances` interceptor. The (possibly modified) response returned by + `post_batch_create_instances` will be passed to + `post_batch_create_instances_with_metadata`. + """ + return response, metadata + def pre_create_backup( self, request: service.CreateBackupRequest, @@ -397,12 +420,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_cluster( self, request: service.CreateClusterRequest, @@ -420,12 +466,35 @@ def post_create_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: service.CreateInstanceRequest, @@ -443,12 +512,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_create_secondary_cluster( self, request: service.CreateSecondaryClusterRequest, @@ -468,12 +560,35 @@ def post_create_secondary_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_secondary_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_secondary_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_secondary_cluster` interceptor runs + before the `post_create_secondary_cluster_with_metadata` interceptor. """ return response + def post_create_secondary_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_secondary_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_secondary_cluster_with_metadata` + interceptor in new development instead of the `post_create_secondary_cluster` interceptor. + When both interceptors are used, this `post_create_secondary_cluster_with_metadata` interceptor runs after the + `post_create_secondary_cluster` interceptor. The (possibly modified) response returned by + `post_create_secondary_cluster` will be passed to + `post_create_secondary_cluster_with_metadata`. + """ + return response, metadata + def pre_create_secondary_instance( self, request: service.CreateSecondaryInstanceRequest, @@ -493,12 +608,35 @@ def post_create_secondary_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_secondary_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_secondary_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_secondary_instance` interceptor runs + before the `post_create_secondary_instance_with_metadata` interceptor. """ return response + def post_create_secondary_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_secondary_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_secondary_instance_with_metadata` + interceptor in new development instead of the `post_create_secondary_instance` interceptor. + When both interceptors are used, this `post_create_secondary_instance_with_metadata` interceptor runs after the + `post_create_secondary_instance` interceptor. The (possibly modified) response returned by + `post_create_secondary_instance` will be passed to + `post_create_secondary_instance_with_metadata`. + """ + return response, metadata + def pre_create_user( self, request: service.CreateUserRequest, @@ -514,12 +652,35 @@ def pre_create_user( def post_create_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for create_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_create_user` interceptor runs + before the `post_create_user_with_metadata` interceptor. """ return response + def post_create_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_user_with_metadata` + interceptor in new development instead of the `post_create_user` interceptor. + When both interceptors are used, this `post_create_user_with_metadata` interceptor runs after the + `post_create_user` interceptor. The (possibly modified) response returned by + `post_create_user` will be passed to + `post_create_user_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: service.DeleteBackupRequest, @@ -537,12 +698,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: service.DeleteClusterRequest, @@ -560,12 +744,35 @@ def post_delete_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: service.DeleteInstanceRequest, @@ -583,12 +790,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_delete_user( self, request: service.DeleteUserRequest, @@ -618,12 +848,35 @@ def post_execute_sql( ) -> service.ExecuteSqlResponse: """Post-rpc interceptor for execute_sql - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_execute_sql_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_execute_sql` interceptor runs + before the `post_execute_sql_with_metadata` interceptor. """ return response + def post_execute_sql_with_metadata( + self, + response: service.ExecuteSqlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ExecuteSqlResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_sql + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_execute_sql_with_metadata` + interceptor in new development instead of the `post_execute_sql` interceptor. + When both interceptors are used, this `post_execute_sql_with_metadata` interceptor runs after the + `post_execute_sql` interceptor. The (possibly modified) response returned by + `post_execute_sql` will be passed to + `post_execute_sql_with_metadata`. + """ + return response, metadata + def pre_failover_instance( self, request: service.FailoverInstanceRequest, @@ -643,12 +896,35 @@ def post_failover_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. """ return response + def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + def pre_generate_client_certificate( self, request: service.GenerateClientCertificateRequest, @@ -669,12 +945,38 @@ def post_generate_client_certificate( ) -> service.GenerateClientCertificateResponse: """Post-rpc interceptor for generate_client_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_client_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_generate_client_certificate` interceptor runs + before the `post_generate_client_certificate_with_metadata` interceptor. """ return response + def post_generate_client_certificate_with_metadata( + self, + response: service.GenerateClientCertificateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateClientCertificateResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_client_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_generate_client_certificate_with_metadata` + interceptor in new development instead of the `post_generate_client_certificate` interceptor. + When both interceptors are used, this `post_generate_client_certificate_with_metadata` interceptor runs after the + `post_generate_client_certificate` interceptor. The (possibly modified) response returned by + `post_generate_client_certificate` will be passed to + `post_generate_client_certificate_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: service.GetBackupRequest, @@ -690,12 +992,35 @@ def pre_get_backup( def post_get_backup(self, response: resources.Backup) -> resources.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: resources.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_cluster( self, request: service.GetClusterRequest, @@ -711,12 +1036,35 @@ def pre_get_cluster( def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: resources.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_get_connection_info( self, request: service.GetConnectionInfoRequest, @@ -736,12 +1084,35 @@ def post_get_connection_info( ) -> resources.ConnectionInfo: """Post-rpc interceptor for get_connection_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_connection_info` interceptor runs + before the `post_get_connection_info_with_metadata` interceptor. """ return response + def post_get_connection_info_with_metadata( + self, + response: resources.ConnectionInfo, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConnectionInfo, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_connection_info_with_metadata` + interceptor in new development instead of the `post_get_connection_info` interceptor. + When both interceptors are used, this `post_get_connection_info_with_metadata` interceptor runs after the + `post_get_connection_info` interceptor. The (possibly modified) response returned by + `post_get_connection_info` will be passed to + `post_get_connection_info_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: service.GetInstanceRequest, @@ -757,12 +1128,35 @@ def pre_get_instance( def post_get_instance(self, response: resources.Instance) -> resources.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: resources.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_user( self, request: service.GetUserRequest, @@ -778,12 +1172,35 @@ def pre_get_user( def post_get_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for get_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_user` interceptor runs + before the `post_get_user_with_metadata` interceptor. """ return response + def post_get_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_get_user_with_metadata` + interceptor in new development instead of the `post_get_user` interceptor. + When both interceptors are used, this `post_get_user_with_metadata` interceptor runs after the + `post_get_user` interceptor. The (possibly modified) response returned by + `post_get_user` will be passed to + `post_get_user_with_metadata`. + """ + return response, metadata + def pre_inject_fault( self, request: service.InjectFaultRequest, @@ -801,12 +1218,35 @@ def post_inject_fault( ) -> operations_pb2.Operation: """Post-rpc interceptor for inject_fault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_inject_fault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_inject_fault` interceptor runs + before the `post_inject_fault_with_metadata` interceptor. """ return response + def post_inject_fault_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for inject_fault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_inject_fault_with_metadata` + interceptor in new development instead of the `post_inject_fault` interceptor. + When both interceptors are used, this `post_inject_fault_with_metadata` interceptor runs after the + `post_inject_fault` interceptor. The (possibly modified) response returned by + `post_inject_fault` will be passed to + `post_inject_fault_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: service.ListBackupsRequest, @@ -824,12 +1264,35 @@ def post_list_backups( ) -> service.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: service.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: service.ListClustersRequest, @@ -847,12 +1310,35 @@ def post_list_clusters( ) -> service.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: service.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_list_databases( self, request: service.ListDatabasesRequest, @@ -870,12 +1356,35 @@ def post_list_databases( ) -> service.ListDatabasesResponse: """Post-rpc interceptor for list_databases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. """ return response + def post_list_databases_with_metadata( + self, + response: service.ListDatabasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: service.ListInstancesRequest, @@ -893,12 +1402,35 @@ def post_list_instances( ) -> service.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: service.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_list_supported_database_flags( self, request: service.ListSupportedDatabaseFlagsRequest, @@ -919,12 +1451,38 @@ def post_list_supported_database_flags( ) -> service.ListSupportedDatabaseFlagsResponse: """Post-rpc interceptor for list_supported_database_flags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_supported_database_flags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_supported_database_flags` interceptor runs + before the `post_list_supported_database_flags_with_metadata` interceptor. """ return response + def post_list_supported_database_flags_with_metadata( + self, + response: service.ListSupportedDatabaseFlagsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListSupportedDatabaseFlagsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_supported_database_flags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_supported_database_flags_with_metadata` + interceptor in new development instead of the `post_list_supported_database_flags` interceptor. + When both interceptors are used, this `post_list_supported_database_flags_with_metadata` interceptor runs after the + `post_list_supported_database_flags` interceptor. The (possibly modified) response returned by + `post_list_supported_database_flags` will be passed to + `post_list_supported_database_flags_with_metadata`. + """ + return response, metadata + def pre_list_users( self, request: service.ListUsersRequest, @@ -942,12 +1500,35 @@ def post_list_users( ) -> service.ListUsersResponse: """Post-rpc interceptor for list_users - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_users_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_list_users` interceptor runs + before the `post_list_users_with_metadata` interceptor. """ return response + def post_list_users_with_metadata( + self, + response: service.ListUsersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListUsersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_users + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_list_users_with_metadata` + interceptor in new development instead of the `post_list_users` interceptor. + When both interceptors are used, this `post_list_users_with_metadata` interceptor runs after the + `post_list_users` interceptor. The (possibly modified) response returned by + `post_list_users` will be passed to + `post_list_users_with_metadata`. + """ + return response, metadata + def pre_promote_cluster( self, request: service.PromoteClusterRequest, @@ -965,12 +1546,35 @@ def post_promote_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for promote_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_promote_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_promote_cluster` interceptor runs + before the `post_promote_cluster_with_metadata` interceptor. """ return response + def post_promote_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for promote_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_promote_cluster_with_metadata` + interceptor in new development instead of the `post_promote_cluster` interceptor. + When both interceptors are used, this `post_promote_cluster_with_metadata` interceptor runs after the + `post_promote_cluster` interceptor. The (possibly modified) response returned by + `post_promote_cluster` will be passed to + `post_promote_cluster_with_metadata`. + """ + return response, metadata + def pre_restart_instance( self, request: service.RestartInstanceRequest, @@ -988,12 +1592,35 @@ def post_restart_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for restart_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restart_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restart_instance` interceptor runs + before the `post_restart_instance_with_metadata` interceptor. """ return response + def post_restart_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restart_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_restart_instance_with_metadata` + interceptor in new development instead of the `post_restart_instance` interceptor. + When both interceptors are used, this `post_restart_instance_with_metadata` interceptor runs after the + `post_restart_instance` interceptor. The (possibly modified) response returned by + `post_restart_instance` will be passed to + `post_restart_instance_with_metadata`. + """ + return response, metadata + def pre_restore_cluster( self, request: service.RestoreClusterRequest, @@ -1011,12 +1638,35 @@ def post_restore_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_restore_cluster` interceptor runs + before the `post_restore_cluster_with_metadata` interceptor. """ return response + def post_restore_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_restore_cluster_with_metadata` + interceptor in new development instead of the `post_restore_cluster` interceptor. + When both interceptors are used, this `post_restore_cluster_with_metadata` interceptor runs after the + `post_restore_cluster` interceptor. The (possibly modified) response returned by + `post_restore_cluster` will be passed to + `post_restore_cluster_with_metadata`. + """ + return response, metadata + def pre_switchover_cluster( self, request: service.SwitchoverClusterRequest, @@ -1036,12 +1686,35 @@ def post_switchover_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for switchover_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_switchover_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_switchover_cluster` interceptor runs + before the `post_switchover_cluster_with_metadata` interceptor. """ return response + def post_switchover_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for switchover_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_switchover_cluster_with_metadata` + interceptor in new development instead of the `post_switchover_cluster` interceptor. + When both interceptors are used, this `post_switchover_cluster_with_metadata` interceptor runs after the + `post_switchover_cluster` interceptor. The (possibly modified) response returned by + `post_switchover_cluster` will be passed to + `post_switchover_cluster_with_metadata`. + """ + return response, metadata + def pre_update_backup( self, request: service.UpdateBackupRequest, @@ -1059,12 +1732,35 @@ def post_update_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. """ return response + def post_update_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: service.UpdateClusterRequest, @@ -1082,12 +1778,35 @@ def post_update_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: service.UpdateInstanceRequest, @@ -1105,12 +1824,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_user( self, request: service.UpdateUserRequest, @@ -1126,12 +1868,35 @@ def pre_update_user( def post_update_user(self, response: resources.User) -> resources.User: """Post-rpc interceptor for update_user - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_user_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_user` interceptor runs + before the `post_update_user_with_metadata` interceptor. """ return response + def post_update_user_with_metadata( + self, + response: resources.User, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.User, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_user + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_update_user_with_metadata` + interceptor in new development instead of the `post_update_user` interceptor. + When both interceptors are used, this `post_update_user_with_metadata` interceptor runs after the + `post_update_user` interceptor. The (possibly modified) response returned by + `post_update_user` will be passed to + `post_update_user_with_metadata`. + """ + return response, metadata + def pre_upgrade_cluster( self, request: service.UpgradeClusterRequest, @@ -1149,12 +1914,35 @@ def post_upgrade_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upgrade_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AlloyDBAdmin server but before - it is returned to user code. + it is returned to user code. This `post_upgrade_cluster` interceptor runs + before the `post_upgrade_cluster_with_metadata` interceptor. """ return response + def post_upgrade_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_upgrade_cluster_with_metadata` + interceptor in new development instead of the `post_upgrade_cluster` interceptor. + When both interceptors are used, this `post_upgrade_cluster_with_metadata` interceptor runs after the + `post_upgrade_cluster` interceptor. The (possibly modified) response returned by + `post_upgrade_cluster` will be passed to + `post_upgrade_cluster_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1568,6 +2356,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1719,6 +2511,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1866,6 +2662,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2013,6 +2813,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2162,6 +2966,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_secondary_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_secondary_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2313,6 +3121,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_secondary_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_secondary_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2465,6 +3277,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2608,6 +3424,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2749,6 +3569,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2890,6 +3714,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3152,6 +3980,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_execute_sql(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_sql_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3302,6 +4134,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_failover_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3458,6 +4294,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_client_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_client_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3604,6 +4444,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3754,6 +4598,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3899,6 +4747,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4047,6 +4899,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4189,6 +5045,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4343,6 +5203,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_inject_fault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_inject_fault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4490,6 +5354,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4635,6 +5503,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4778,6 +5650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4921,6 +5797,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5070,6 +5950,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_supported_database_flags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_supported_database_flags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5216,6 +6103,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_users(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_users_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5363,6 +6254,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_promote_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_promote_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5512,6 +6407,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restart_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restart_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5661,6 +6560,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5811,6 +6714,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_switchover_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_switchover_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5962,6 +6869,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6109,6 +7020,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6256,6 +7171,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6408,6 +7327,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_user(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_user_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6555,6 +7478,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_upgrade_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upgrade_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json index 770170613b87..bafdc61433dc 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.4.1" + "version": "0.4.2" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index 16fcd7cdfe58..50eb941ff80e 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.4.1" + "version": "0.4.2" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index c90c9998fad0..d63c8b069fce 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-alloydb", - "version": "0.4.1" + "version": "0.4.2" }, "snippets": [ { diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index d7254314ed4f..c628c224db14 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -80,6 +80,13 @@ ) from google.cloud.alloydb_v1.types import data_model, resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -323,6 +330,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AlloyDBAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AlloyDBAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -22572,10 +22622,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) transcode.return_value = { "method": "post", @@ -22599,6 +22652,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListClustersResponse() + post_with_metadata.return_value = service.ListClustersResponse(), metadata client.list_clusters( request, @@ -22610,6 +22664,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=service.GetClusterRequest): @@ -22712,10 +22767,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) transcode.return_value = { "method": "post", @@ -22737,6 +22795,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Cluster() + post_with_metadata.return_value = resources.Cluster(), metadata client.get_cluster( request, @@ -22748,6 +22807,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cluster_rest_bad_request(request_type=service.CreateClusterRequest): @@ -22978,10 +23038,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) transcode.return_value = { "method": "post", @@ -23003,6 +23066,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_cluster( request, @@ -23014,6 +23078,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request(request_type=service.UpdateClusterRequest): @@ -23248,10 +23313,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) transcode.return_value = { "method": "post", @@ -23273,6 +23341,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_cluster( request, @@ -23284,6 +23353,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): @@ -23362,10 +23432,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", @@ -23387,6 +23460,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_cluster( request, @@ -23398,6 +23472,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): @@ -23476,10 +23551,13 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) transcode.return_value = { "method": "post", @@ -23501,6 +23579,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.promote_cluster( request, @@ -23512,6 +23591,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_switchover_cluster_rest_bad_request( @@ -23592,10 +23672,13 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_switchover_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SwitchoverClusterRequest.pb( service.SwitchoverClusterRequest() ) @@ -23619,6 +23702,7 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.switchover_cluster( request, @@ -23630,6 +23714,7 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_cluster_rest_bad_request(request_type=service.RestoreClusterRequest): @@ -23708,10 +23793,13 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) transcode.return_value = { "method": "post", @@ -23733,6 +23821,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_cluster( request, @@ -23744,6 +23833,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_secondary_cluster_rest_bad_request( @@ -23976,10 +24066,14 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_create_secondary_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSecondaryClusterRequest.pb( service.CreateSecondaryClusterRequest() ) @@ -24003,6 +24097,7 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_secondary_cluster( request, @@ -24014,6 +24109,7 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_instances_rest_bad_request(request_type=service.ListInstancesRequest): @@ -24098,10 +24194,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -24125,6 +24224,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListInstancesResponse() + post_with_metadata.return_value = service.ListInstancesResponse(), metadata client.list_instances( request, @@ -24136,6 +24236,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=service.GetInstanceRequest): @@ -24248,10 +24349,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -24273,6 +24377,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Instance() + post_with_metadata.return_value = resources.Instance(), metadata client.get_instance( request, @@ -24284,6 +24389,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request(request_type=service.CreateInstanceRequest): @@ -24485,10 +24591,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) transcode.return_value = { "method": "post", @@ -24510,6 +24619,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -24521,6 +24631,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_secondary_instance_rest_bad_request( @@ -24724,10 +24835,14 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_create_secondary_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSecondaryInstanceRequest.pb( service.CreateSecondaryInstanceRequest() ) @@ -24751,6 +24866,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_secondary_instance( request, @@ -24762,6 +24878,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_instances_rest_bad_request( @@ -24977,10 +25094,14 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_batch_create_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.BatchCreateInstancesRequest.pb( service.BatchCreateInstancesRequest() ) @@ -25004,6 +25125,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_instances( request, @@ -25015,6 +25137,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request(request_type=service.UpdateInstanceRequest): @@ -25224,10 +25347,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) transcode.return_value = { "method": "post", @@ -25249,6 +25375,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -25260,6 +25387,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request(request_type=service.DeleteInstanceRequest): @@ -25342,10 +25470,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) transcode.return_value = { "method": "post", @@ -25367,6 +25498,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -25378,6 +25510,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_failover_instance_rest_bad_request( @@ -25462,10 +25595,13 @@ def test_failover_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_failover_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_failover_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.FailoverInstanceRequest.pb( service.FailoverInstanceRequest() ) @@ -25489,6 +25625,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.failover_instance( request, @@ -25500,6 +25637,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_inject_fault_rest_bad_request(request_type=service.InjectFaultRequest): @@ -25582,10 +25720,13 @@ def test_inject_fault_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_inject_fault" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_inject_fault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_inject_fault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.InjectFaultRequest.pb(service.InjectFaultRequest()) transcode.return_value = { "method": "post", @@ -25607,6 +25748,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.inject_fault( request, @@ -25618,6 +25760,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restart_instance_rest_bad_request(request_type=service.RestartInstanceRequest): @@ -25700,10 +25843,13 @@ def test_restart_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_restart_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restart_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) transcode.return_value = { "method": "post", @@ -25725,6 +25871,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restart_instance( request, @@ -25736,6 +25883,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_execute_sql_rest_bad_request(request_type=service.ExecuteSqlRequest): @@ -25819,10 +25967,13 @@ def test_execute_sql_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_execute_sql" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_execute_sql_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_execute_sql" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExecuteSqlRequest.pb(service.ExecuteSqlRequest()) transcode.return_value = { "method": "post", @@ -25844,6 +25995,7 @@ def test_execute_sql_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ExecuteSqlResponse() + post_with_metadata.return_value = service.ExecuteSqlResponse(), metadata client.execute_sql( request, @@ -25855,6 +26007,7 @@ def test_execute_sql_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=service.ListBackupsRequest): @@ -25939,10 +26092,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -25966,6 +26122,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListBackupsResponse() + post_with_metadata.return_value = service.ListBackupsResponse(), metadata client.list_backups( request, @@ -25977,6 +26134,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=service.GetBackupRequest): @@ -26083,10 +26241,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -26108,6 +26269,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Backup() + post_with_metadata.return_value = resources.Backup(), metadata client.get_backup( request, @@ -26119,6 +26281,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=service.CreateBackupRequest): @@ -26292,10 +26455,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -26317,6 +26483,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -26328,6 +26495,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_rest_bad_request(request_type=service.UpdateBackupRequest): @@ -26505,10 +26673,13 @@ def test_update_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) transcode.return_value = { "method": "post", @@ -26530,6 +26701,7 @@ def test_update_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup( request, @@ -26541,6 +26713,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=service.DeleteBackupRequest): @@ -26619,10 +26792,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -26644,6 +26820,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -26655,6 +26832,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_supported_database_flags_rest_bad_request( @@ -26739,10 +26917,14 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_list_supported_database_flags_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListSupportedDatabaseFlagsRequest.pb( service.ListSupportedDatabaseFlagsRequest() ) @@ -26768,6 +26950,10 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListSupportedDatabaseFlagsResponse() + post_with_metadata.return_value = ( + service.ListSupportedDatabaseFlagsResponse(), + metadata, + ) client.list_supported_database_flags( request, @@ -26779,6 +26965,7 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_client_certificate_rest_bad_request( @@ -26865,10 +27052,14 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_generate_client_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateClientCertificateRequest.pb( service.GenerateClientCertificateRequest() ) @@ -26894,6 +27085,10 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateClientCertificateResponse() + post_with_metadata.return_value = ( + service.GenerateClientCertificateResponse(), + metadata, + ) client.generate_client_certificate( request, @@ -26905,6 +27100,7 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_info_rest_bad_request( @@ -26999,10 +27195,13 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_connection_info_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetConnectionInfoRequest.pb( service.GetConnectionInfoRequest() ) @@ -27026,6 +27225,7 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConnectionInfo() + post_with_metadata.return_value = resources.ConnectionInfo(), metadata client.get_connection_info( request, @@ -27037,6 +27237,7 @@ def test_get_connection_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_users_rest_bad_request(request_type=service.ListUsersRequest): @@ -27121,10 +27322,13 @@ def test_list_users_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_users" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_users_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_users" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListUsersRequest.pb(service.ListUsersRequest()) transcode.return_value = { "method": "post", @@ -27146,6 +27350,7 @@ def test_list_users_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListUsersResponse() + post_with_metadata.return_value = service.ListUsersResponse(), metadata client.list_users( request, @@ -27157,6 +27362,7 @@ def test_list_users_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_user_rest_bad_request(request_type=service.GetUserRequest): @@ -27251,10 +27457,13 @@ def test_get_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetUserRequest.pb(service.GetUserRequest()) transcode.return_value = { "method": "post", @@ -27276,6 +27485,7 @@ def test_get_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.get_user( request, @@ -27287,6 +27497,7 @@ def test_get_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_user_rest_bad_request(request_type=service.CreateUserRequest): @@ -27451,10 +27662,13 @@ def test_create_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateUserRequest.pb(service.CreateUserRequest()) transcode.return_value = { "method": "post", @@ -27476,6 +27690,7 @@ def test_create_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.create_user( request, @@ -27487,6 +27702,7 @@ def test_create_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_user_rest_bad_request(request_type=service.UpdateUserRequest): @@ -27659,10 +27875,13 @@ def test_update_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) transcode.return_value = { "method": "post", @@ -27684,6 +27903,7 @@ def test_update_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.update_user( request, @@ -27695,6 +27915,7 @@ def test_update_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): @@ -27886,10 +28107,13 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_databases" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) transcode.return_value = { "method": "post", @@ -27913,6 +28137,7 @@ def test_list_databases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListDatabasesResponse() + post_with_metadata.return_value = service.ListDatabasesResponse(), metadata client.list_databases( request, @@ -27924,6 +28149,7 @@ def test_list_databases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index 5935432e7a1f..40e03d35d80b 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -86,6 +86,13 @@ service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AlloyDBAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AlloyDBAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -23191,10 +23241,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) transcode.return_value = { "method": "post", @@ -23218,6 +23271,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListClustersResponse() + post_with_metadata.return_value = service.ListClustersResponse(), metadata client.list_clusters( request, @@ -23229,6 +23283,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=service.GetClusterRequest): @@ -23333,10 +23388,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) transcode.return_value = { "method": "post", @@ -23358,6 +23416,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Cluster() + post_with_metadata.return_value = resources.Cluster(), metadata client.get_cluster( request, @@ -23369,6 +23428,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cluster_rest_bad_request(request_type=service.CreateClusterRequest): @@ -23606,10 +23666,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) transcode.return_value = { "method": "post", @@ -23631,6 +23694,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_cluster( request, @@ -23642,6 +23706,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request(request_type=service.UpdateClusterRequest): @@ -23883,10 +23948,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) transcode.return_value = { "method": "post", @@ -23908,6 +23976,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_cluster( request, @@ -23919,6 +23988,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_upgrade_cluster_rest_bad_request(request_type=service.UpgradeClusterRequest): @@ -23997,10 +24067,13 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_upgrade_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_upgrade_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_upgrade_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpgradeClusterRequest.pb(service.UpgradeClusterRequest()) transcode.return_value = { "method": "post", @@ -24022,6 +24095,7 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.upgrade_cluster( request, @@ -24033,6 +24107,7 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): @@ -24111,10 +24186,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", @@ -24136,6 +24214,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_cluster( request, @@ -24147,6 +24226,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): @@ -24225,10 +24305,13 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) transcode.return_value = { "method": "post", @@ -24250,6 +24333,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.promote_cluster( request, @@ -24261,6 +24345,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_switchover_cluster_rest_bad_request( @@ -24341,10 +24426,13 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_switchover_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SwitchoverClusterRequest.pb( service.SwitchoverClusterRequest() ) @@ -24368,6 +24456,7 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.switchover_cluster( request, @@ -24379,6 +24468,7 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_cluster_rest_bad_request(request_type=service.RestoreClusterRequest): @@ -24457,10 +24547,13 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) transcode.return_value = { "method": "post", @@ -24482,6 +24575,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_cluster( request, @@ -24493,6 +24587,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_secondary_cluster_rest_bad_request( @@ -24732,10 +24827,14 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_create_secondary_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSecondaryClusterRequest.pb( service.CreateSecondaryClusterRequest() ) @@ -24759,6 +24858,7 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_secondary_cluster( request, @@ -24770,6 +24870,7 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_instances_rest_bad_request(request_type=service.ListInstancesRequest): @@ -24854,10 +24955,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -24881,6 +24985,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListInstancesResponse() + post_with_metadata.return_value = service.ListInstancesResponse(), metadata client.list_instances( request, @@ -24892,6 +24997,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=service.GetInstanceRequest): @@ -25006,10 +25112,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -25031,6 +25140,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Instance() + post_with_metadata.return_value = resources.Instance(), metadata client.get_instance( request, @@ -25042,6 +25152,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request(request_type=service.CreateInstanceRequest): @@ -25260,10 +25371,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) transcode.return_value = { "method": "post", @@ -25285,6 +25399,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -25296,6 +25411,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_secondary_instance_rest_bad_request( @@ -25516,10 +25632,14 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_create_secondary_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSecondaryInstanceRequest.pb( service.CreateSecondaryInstanceRequest() ) @@ -25543,6 +25663,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_secondary_instance( request, @@ -25554,6 +25675,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_instances_rest_bad_request( @@ -25788,10 +25910,14 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_batch_create_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.BatchCreateInstancesRequest.pb( service.BatchCreateInstancesRequest() ) @@ -25815,6 +25941,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_instances( request, @@ -25826,6 +25953,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request(request_type=service.UpdateInstanceRequest): @@ -26052,10 +26180,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) transcode.return_value = { "method": "post", @@ -26077,6 +26208,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -26088,6 +26220,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request(request_type=service.DeleteInstanceRequest): @@ -26170,10 +26303,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) transcode.return_value = { "method": "post", @@ -26195,6 +26331,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -26206,6 +26343,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_failover_instance_rest_bad_request( @@ -26290,10 +26428,13 @@ def test_failover_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_failover_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_failover_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.FailoverInstanceRequest.pb( service.FailoverInstanceRequest() ) @@ -26317,6 +26458,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.failover_instance( request, @@ -26328,6 +26470,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_inject_fault_rest_bad_request(request_type=service.InjectFaultRequest): @@ -26410,10 +26553,13 @@ def test_inject_fault_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_inject_fault" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_inject_fault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_inject_fault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.InjectFaultRequest.pb(service.InjectFaultRequest()) transcode.return_value = { "method": "post", @@ -26435,6 +26581,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.inject_fault( request, @@ -26446,6 +26593,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restart_instance_rest_bad_request(request_type=service.RestartInstanceRequest): @@ -26528,10 +26676,13 @@ def test_restart_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_restart_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restart_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) transcode.return_value = { "method": "post", @@ -26553,6 +26704,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restart_instance( request, @@ -26564,6 +26716,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_execute_sql_rest_bad_request(request_type=service.ExecuteSqlRequest): @@ -26647,10 +26800,13 @@ def test_execute_sql_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_execute_sql" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_execute_sql_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_execute_sql" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExecuteSqlRequest.pb(service.ExecuteSqlRequest()) transcode.return_value = { "method": "post", @@ -26672,6 +26828,7 @@ def test_execute_sql_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ExecuteSqlResponse() + post_with_metadata.return_value = service.ExecuteSqlResponse(), metadata client.execute_sql( request, @@ -26683,6 +26840,7 @@ def test_execute_sql_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=service.ListBackupsRequest): @@ -26767,10 +26925,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -26794,6 +26955,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListBackupsResponse() + post_with_metadata.return_value = service.ListBackupsResponse(), metadata client.list_backups( request, @@ -26805,6 +26967,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=service.GetBackupRequest): @@ -26913,10 +27076,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -26938,6 +27104,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Backup() + post_with_metadata.return_value = resources.Backup(), metadata client.get_backup( request, @@ -26949,6 +27116,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=service.CreateBackupRequest): @@ -27123,10 +27291,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -27148,6 +27319,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -27159,6 +27331,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_rest_bad_request(request_type=service.UpdateBackupRequest): @@ -27337,10 +27510,13 @@ def test_update_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) transcode.return_value = { "method": "post", @@ -27362,6 +27538,7 @@ def test_update_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup( request, @@ -27373,6 +27550,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=service.DeleteBackupRequest): @@ -27451,10 +27629,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -27476,6 +27657,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -27487,6 +27669,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_supported_database_flags_rest_bad_request( @@ -27571,10 +27754,14 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_list_supported_database_flags_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListSupportedDatabaseFlagsRequest.pb( service.ListSupportedDatabaseFlagsRequest() ) @@ -27600,6 +27787,10 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListSupportedDatabaseFlagsResponse() + post_with_metadata.return_value = ( + service.ListSupportedDatabaseFlagsResponse(), + metadata, + ) client.list_supported_database_flags( request, @@ -27611,6 +27802,7 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_client_certificate_rest_bad_request( @@ -27699,10 +27891,14 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_generate_client_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateClientCertificateRequest.pb( service.GenerateClientCertificateRequest() ) @@ -27728,6 +27924,10 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateClientCertificateResponse() + post_with_metadata.return_value = ( + service.GenerateClientCertificateResponse(), + metadata, + ) client.generate_client_certificate( request, @@ -27739,6 +27939,7 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_info_rest_bad_request( @@ -27837,10 +28038,13 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_connection_info_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetConnectionInfoRequest.pb( service.GetConnectionInfoRequest() ) @@ -27864,6 +28068,7 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConnectionInfo() + post_with_metadata.return_value = resources.ConnectionInfo(), metadata client.get_connection_info( request, @@ -27875,6 +28080,7 @@ def test_get_connection_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_users_rest_bad_request(request_type=service.ListUsersRequest): @@ -27959,10 +28165,13 @@ def test_list_users_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_users" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_users_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_users" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListUsersRequest.pb(service.ListUsersRequest()) transcode.return_value = { "method": "post", @@ -27984,6 +28193,7 @@ def test_list_users_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListUsersResponse() + post_with_metadata.return_value = service.ListUsersResponse(), metadata client.list_users( request, @@ -27995,6 +28205,7 @@ def test_list_users_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_user_rest_bad_request(request_type=service.GetUserRequest): @@ -28089,10 +28300,13 @@ def test_get_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetUserRequest.pb(service.GetUserRequest()) transcode.return_value = { "method": "post", @@ -28114,6 +28328,7 @@ def test_get_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.get_user( request, @@ -28125,6 +28340,7 @@ def test_get_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_user_rest_bad_request(request_type=service.CreateUserRequest): @@ -28289,10 +28505,13 @@ def test_create_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateUserRequest.pb(service.CreateUserRequest()) transcode.return_value = { "method": "post", @@ -28314,6 +28533,7 @@ def test_create_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.create_user( request, @@ -28325,6 +28545,7 @@ def test_create_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_user_rest_bad_request(request_type=service.UpdateUserRequest): @@ -28497,10 +28718,13 @@ def test_update_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) transcode.return_value = { "method": "post", @@ -28522,6 +28746,7 @@ def test_update_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.update_user( request, @@ -28533,6 +28758,7 @@ def test_update_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): @@ -28724,10 +28950,13 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_databases" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) transcode.return_value = { "method": "post", @@ -28751,6 +28980,7 @@ def test_list_databases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListDatabasesResponse() + post_with_metadata.return_value = service.ListDatabasesResponse(), metadata client.list_databases( request, @@ -28762,6 +28992,7 @@ def test_list_databases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index c5907e0a6acf..7a55dff3383d 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -86,6 +86,13 @@ service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AlloyDBAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AlloyDBAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -23176,10 +23226,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) transcode.return_value = { "method": "post", @@ -23203,6 +23256,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListClustersResponse() + post_with_metadata.return_value = service.ListClustersResponse(), metadata client.list_clusters( request, @@ -23214,6 +23268,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=service.GetClusterRequest): @@ -23316,10 +23371,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) transcode.return_value = { "method": "post", @@ -23341,6 +23399,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Cluster() + post_with_metadata.return_value = resources.Cluster(), metadata client.get_cluster( request, @@ -23352,6 +23411,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cluster_rest_bad_request(request_type=service.CreateClusterRequest): @@ -23588,10 +23648,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) transcode.return_value = { "method": "post", @@ -23613,6 +23676,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_cluster( request, @@ -23624,6 +23688,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request(request_type=service.UpdateClusterRequest): @@ -23864,10 +23929,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) transcode.return_value = { "method": "post", @@ -23889,6 +23957,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_cluster( request, @@ -23900,6 +23969,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_upgrade_cluster_rest_bad_request(request_type=service.UpgradeClusterRequest): @@ -23978,10 +24048,13 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_upgrade_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_upgrade_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_upgrade_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpgradeClusterRequest.pb(service.UpgradeClusterRequest()) transcode.return_value = { "method": "post", @@ -24003,6 +24076,7 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.upgrade_cluster( request, @@ -24014,6 +24088,7 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): @@ -24092,10 +24167,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", @@ -24117,6 +24195,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_cluster( request, @@ -24128,6 +24207,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_promote_cluster_rest_bad_request(request_type=service.PromoteClusterRequest): @@ -24206,10 +24286,13 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_promote_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_promote_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_promote_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.PromoteClusterRequest.pb(service.PromoteClusterRequest()) transcode.return_value = { "method": "post", @@ -24231,6 +24314,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.promote_cluster( request, @@ -24242,6 +24326,7 @@ def test_promote_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_switchover_cluster_rest_bad_request( @@ -24322,10 +24407,13 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_switchover_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_switchover_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SwitchoverClusterRequest.pb( service.SwitchoverClusterRequest() ) @@ -24349,6 +24437,7 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.switchover_cluster( request, @@ -24360,6 +24449,7 @@ def test_switchover_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_cluster_rest_bad_request(request_type=service.RestoreClusterRequest): @@ -24438,10 +24528,13 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_restore_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restore_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_restore_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestoreClusterRequest.pb(service.RestoreClusterRequest()) transcode.return_value = { "method": "post", @@ -24463,6 +24556,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_cluster( request, @@ -24474,6 +24568,7 @@ def test_restore_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_secondary_cluster_rest_bad_request( @@ -24712,10 +24807,14 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_secondary_cluster" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_create_secondary_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSecondaryClusterRequest.pb( service.CreateSecondaryClusterRequest() ) @@ -24739,6 +24838,7 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_secondary_cluster( request, @@ -24750,6 +24850,7 @@ def test_create_secondary_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_instances_rest_bad_request(request_type=service.ListInstancesRequest): @@ -24834,10 +24935,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListInstancesRequest.pb(service.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -24861,6 +24965,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListInstancesResponse() + post_with_metadata.return_value = service.ListInstancesResponse(), metadata client.list_instances( request, @@ -24872,6 +24977,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=service.GetInstanceRequest): @@ -24984,10 +25090,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetInstanceRequest.pb(service.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -25009,6 +25118,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Instance() + post_with_metadata.return_value = resources.Instance(), metadata client.get_instance( request, @@ -25020,6 +25130,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request(request_type=service.CreateInstanceRequest): @@ -25234,10 +25345,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateInstanceRequest.pb(service.CreateInstanceRequest()) transcode.return_value = { "method": "post", @@ -25259,6 +25373,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -25270,6 +25385,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_secondary_instance_rest_bad_request( @@ -25486,10 +25602,14 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_secondary_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_create_secondary_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_secondary_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSecondaryInstanceRequest.pb( service.CreateSecondaryInstanceRequest() ) @@ -25513,6 +25633,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_secondary_instance( request, @@ -25524,6 +25645,7 @@ def test_create_secondary_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_instances_rest_bad_request( @@ -25752,10 +25874,14 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_batch_create_instances" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_batch_create_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_batch_create_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.BatchCreateInstancesRequest.pb( service.BatchCreateInstancesRequest() ) @@ -25779,6 +25905,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_instances( request, @@ -25790,6 +25917,7 @@ def test_batch_create_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request(request_type=service.UpdateInstanceRequest): @@ -26012,10 +26140,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateInstanceRequest.pb(service.UpdateInstanceRequest()) transcode.return_value = { "method": "post", @@ -26037,6 +26168,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -26048,6 +26180,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request(request_type=service.DeleteInstanceRequest): @@ -26130,10 +26263,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteInstanceRequest.pb(service.DeleteInstanceRequest()) transcode.return_value = { "method": "post", @@ -26155,6 +26291,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -26166,6 +26303,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_failover_instance_rest_bad_request( @@ -26250,10 +26388,13 @@ def test_failover_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_failover_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_failover_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_failover_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.FailoverInstanceRequest.pb( service.FailoverInstanceRequest() ) @@ -26277,6 +26418,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.failover_instance( request, @@ -26288,6 +26430,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_inject_fault_rest_bad_request(request_type=service.InjectFaultRequest): @@ -26370,10 +26513,13 @@ def test_inject_fault_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_inject_fault" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_inject_fault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_inject_fault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.InjectFaultRequest.pb(service.InjectFaultRequest()) transcode.return_value = { "method": "post", @@ -26395,6 +26541,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.inject_fault( request, @@ -26406,6 +26553,7 @@ def test_inject_fault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restart_instance_rest_bad_request(request_type=service.RestartInstanceRequest): @@ -26488,10 +26636,13 @@ def test_restart_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_restart_instance" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_restart_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_restart_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestartInstanceRequest.pb(service.RestartInstanceRequest()) transcode.return_value = { "method": "post", @@ -26513,6 +26664,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restart_instance( request, @@ -26524,6 +26676,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_execute_sql_rest_bad_request(request_type=service.ExecuteSqlRequest): @@ -26607,10 +26760,13 @@ def test_execute_sql_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_execute_sql" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_execute_sql_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_execute_sql" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExecuteSqlRequest.pb(service.ExecuteSqlRequest()) transcode.return_value = { "method": "post", @@ -26632,6 +26788,7 @@ def test_execute_sql_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ExecuteSqlResponse() + post_with_metadata.return_value = service.ExecuteSqlResponse(), metadata client.execute_sql( request, @@ -26643,6 +26800,7 @@ def test_execute_sql_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=service.ListBackupsRequest): @@ -26727,10 +26885,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListBackupsRequest.pb(service.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -26754,6 +26915,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListBackupsResponse() + post_with_metadata.return_value = service.ListBackupsResponse(), metadata client.list_backups( request, @@ -26765,6 +26927,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=service.GetBackupRequest): @@ -26871,10 +27034,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetBackupRequest.pb(service.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -26896,6 +27062,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Backup() + post_with_metadata.return_value = resources.Backup(), metadata client.get_backup( request, @@ -26907,6 +27074,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=service.CreateBackupRequest): @@ -27080,10 +27248,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateBackupRequest.pb(service.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -27105,6 +27276,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -27116,6 +27288,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_rest_bad_request(request_type=service.UpdateBackupRequest): @@ -27293,10 +27466,13 @@ def test_update_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateBackupRequest.pb(service.UpdateBackupRequest()) transcode.return_value = { "method": "post", @@ -27318,6 +27494,7 @@ def test_update_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup( request, @@ -27329,6 +27506,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=service.DeleteBackupRequest): @@ -27407,10 +27585,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteBackupRequest.pb(service.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -27432,6 +27613,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -27443,6 +27625,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_supported_database_flags_rest_bad_request( @@ -27527,10 +27710,14 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_supported_database_flags" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_list_supported_database_flags_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_supported_database_flags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListSupportedDatabaseFlagsRequest.pb( service.ListSupportedDatabaseFlagsRequest() ) @@ -27556,6 +27743,10 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListSupportedDatabaseFlagsResponse() + post_with_metadata.return_value = ( + service.ListSupportedDatabaseFlagsResponse(), + metadata, + ) client.list_supported_database_flags( request, @@ -27567,6 +27758,7 @@ def test_list_supported_database_flags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_client_certificate_rest_bad_request( @@ -27655,10 +27847,14 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_generate_client_certificate" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, + "post_generate_client_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_generate_client_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateClientCertificateRequest.pb( service.GenerateClientCertificateRequest() ) @@ -27684,6 +27880,10 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateClientCertificateResponse() + post_with_metadata.return_value = ( + service.GenerateClientCertificateResponse(), + metadata, + ) client.generate_client_certificate( request, @@ -27695,6 +27895,7 @@ def test_generate_client_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_info_rest_bad_request( @@ -27793,10 +27994,13 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_connection_info" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_connection_info_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_connection_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetConnectionInfoRequest.pb( service.GetConnectionInfoRequest() ) @@ -27820,6 +28024,7 @@ def test_get_connection_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConnectionInfo() + post_with_metadata.return_value = resources.ConnectionInfo(), metadata client.get_connection_info( request, @@ -27831,6 +28036,7 @@ def test_get_connection_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_users_rest_bad_request(request_type=service.ListUsersRequest): @@ -27915,10 +28121,13 @@ def test_list_users_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_users" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_users_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_users" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListUsersRequest.pb(service.ListUsersRequest()) transcode.return_value = { "method": "post", @@ -27940,6 +28149,7 @@ def test_list_users_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListUsersResponse() + post_with_metadata.return_value = service.ListUsersResponse(), metadata client.list_users( request, @@ -27951,6 +28161,7 @@ def test_list_users_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_user_rest_bad_request(request_type=service.GetUserRequest): @@ -28045,10 +28256,13 @@ def test_get_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_get_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_get_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_get_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetUserRequest.pb(service.GetUserRequest()) transcode.return_value = { "method": "post", @@ -28070,6 +28284,7 @@ def test_get_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.get_user( request, @@ -28081,6 +28296,7 @@ def test_get_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_user_rest_bad_request(request_type=service.CreateUserRequest): @@ -28245,10 +28461,13 @@ def test_create_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_create_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_create_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_create_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateUserRequest.pb(service.CreateUserRequest()) transcode.return_value = { "method": "post", @@ -28270,6 +28489,7 @@ def test_create_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.create_user( request, @@ -28281,6 +28501,7 @@ def test_create_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_user_rest_bad_request(request_type=service.UpdateUserRequest): @@ -28453,10 +28674,13 @@ def test_update_user_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_update_user" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_update_user_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_update_user" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) transcode.return_value = { "method": "post", @@ -28478,6 +28702,7 @@ def test_update_user_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata client.update_user( request, @@ -28489,6 +28714,7 @@ def test_update_user_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): @@ -28680,10 +28906,13 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "post_list_databases" ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AlloyDBAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) transcode.return_value = { "method": "post", @@ -28707,6 +28936,7 @@ def test_list_databases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListDatabasesResponse() + post_with_metadata.return_value = service.ListDatabasesResponse(), metadata client.list_databases( request, @@ -28718,6 +28948,7 @@ def test_list_databases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-api-gateway/CHANGELOG.md b/packages/google-cloud-api-gateway/CHANGELOG.md index 7ffa45c202b4..5092f703c161 100644 --- a/packages/google-cloud-api-gateway/CHANGELOG.md +++ b/packages/google-cloud-api-gateway/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-gateway-v1.11.0...google-cloud-api-gateway-v1.12.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-gateway-v1.10.1...google-cloud-api-gateway-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py index d32a2005b0a7..25cedbc25fc3 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -585,6 +587,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/transports/rest.py b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/transports/rest.py index ca7f2fcc2254..8c83e8c7c341 100644 --- a/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/transports/rest.py +++ b/packages/google-cloud-api-gateway/google/cloud/apigateway_v1/services/api_gateway_service/transports/rest.py @@ -212,12 +212,35 @@ def post_create_api( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_create_api` interceptor runs + before the `post_create_api_with_metadata` interceptor. """ return response + def post_create_api_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_create_api_with_metadata` + interceptor in new development instead of the `post_create_api` interceptor. + When both interceptors are used, this `post_create_api_with_metadata` interceptor runs after the + `post_create_api` interceptor. The (possibly modified) response returned by + `post_create_api` will be passed to + `post_create_api_with_metadata`. + """ + return response, metadata + def pre_create_api_config( self, request: apigateway.CreateApiConfigRequest, @@ -237,12 +260,35 @@ def post_create_api_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_api_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_create_api_config` interceptor runs + before the `post_create_api_config_with_metadata` interceptor. """ return response + def post_create_api_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_create_api_config_with_metadata` + interceptor in new development instead of the `post_create_api_config` interceptor. + When both interceptors are used, this `post_create_api_config_with_metadata` interceptor runs after the + `post_create_api_config` interceptor. The (possibly modified) response returned by + `post_create_api_config` will be passed to + `post_create_api_config_with_metadata`. + """ + return response, metadata + def pre_create_gateway( self, request: apigateway.CreateGatewayRequest, @@ -262,12 +308,35 @@ def post_create_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_create_gateway` interceptor runs + before the `post_create_gateway_with_metadata` interceptor. """ return response + def post_create_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_create_gateway_with_metadata` + interceptor in new development instead of the `post_create_gateway` interceptor. + When both interceptors are used, this `post_create_gateway_with_metadata` interceptor runs after the + `post_create_gateway` interceptor. The (possibly modified) response returned by + `post_create_gateway` will be passed to + `post_create_gateway_with_metadata`. + """ + return response, metadata + def pre_delete_api( self, request: apigateway.DeleteApiRequest, @@ -285,12 +354,35 @@ def post_delete_api( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_delete_api` interceptor runs + before the `post_delete_api_with_metadata` interceptor. """ return response + def post_delete_api_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_delete_api_with_metadata` + interceptor in new development instead of the `post_delete_api` interceptor. + When both interceptors are used, this `post_delete_api_with_metadata` interceptor runs after the + `post_delete_api` interceptor. The (possibly modified) response returned by + `post_delete_api` will be passed to + `post_delete_api_with_metadata`. + """ + return response, metadata + def pre_delete_api_config( self, request: apigateway.DeleteApiConfigRequest, @@ -310,12 +402,35 @@ def post_delete_api_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_api_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_api_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_delete_api_config` interceptor runs + before the `post_delete_api_config_with_metadata` interceptor. """ return response + def post_delete_api_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_api_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_delete_api_config_with_metadata` + interceptor in new development instead of the `post_delete_api_config` interceptor. + When both interceptors are used, this `post_delete_api_config_with_metadata` interceptor runs after the + `post_delete_api_config` interceptor. The (possibly modified) response returned by + `post_delete_api_config` will be passed to + `post_delete_api_config_with_metadata`. + """ + return response, metadata + def pre_delete_gateway( self, request: apigateway.DeleteGatewayRequest, @@ -335,12 +450,35 @@ def post_delete_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_delete_gateway` interceptor runs + before the `post_delete_gateway_with_metadata` interceptor. """ return response + def post_delete_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_delete_gateway_with_metadata` + interceptor in new development instead of the `post_delete_gateway` interceptor. + When both interceptors are used, this `post_delete_gateway_with_metadata` interceptor runs after the + `post_delete_gateway` interceptor. The (possibly modified) response returned by + `post_delete_gateway` will be passed to + `post_delete_gateway_with_metadata`. + """ + return response, metadata + def pre_get_api( self, request: apigateway.GetApiRequest, @@ -356,12 +494,35 @@ def pre_get_api( def post_get_api(self, response: apigateway.Api) -> apigateway.Api: """Post-rpc interceptor for get_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_get_api` interceptor runs + before the `post_get_api_with_metadata` interceptor. """ return response + def post_get_api_with_metadata( + self, + response: apigateway.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apigateway.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_get_api_with_metadata` + interceptor in new development instead of the `post_get_api` interceptor. + When both interceptors are used, this `post_get_api_with_metadata` interceptor runs after the + `post_get_api` interceptor. The (possibly modified) response returned by + `post_get_api` will be passed to + `post_get_api_with_metadata`. + """ + return response, metadata + def pre_get_api_config( self, request: apigateway.GetApiConfigRequest, @@ -379,12 +540,35 @@ def post_get_api_config( ) -> apigateway.ApiConfig: """Post-rpc interceptor for get_api_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_get_api_config` interceptor runs + before the `post_get_api_config_with_metadata` interceptor. """ return response + def post_get_api_config_with_metadata( + self, + response: apigateway.ApiConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apigateway.ApiConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_get_api_config_with_metadata` + interceptor in new development instead of the `post_get_api_config` interceptor. + When both interceptors are used, this `post_get_api_config_with_metadata` interceptor runs after the + `post_get_api_config` interceptor. The (possibly modified) response returned by + `post_get_api_config` will be passed to + `post_get_api_config_with_metadata`. + """ + return response, metadata + def pre_get_gateway( self, request: apigateway.GetGatewayRequest, @@ -400,12 +584,35 @@ def pre_get_gateway( def post_get_gateway(self, response: apigateway.Gateway) -> apigateway.Gateway: """Post-rpc interceptor for get_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_get_gateway` interceptor runs + before the `post_get_gateway_with_metadata` interceptor. """ return response + def post_get_gateway_with_metadata( + self, + response: apigateway.Gateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apigateway.Gateway, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_get_gateway_with_metadata` + interceptor in new development instead of the `post_get_gateway` interceptor. + When both interceptors are used, this `post_get_gateway_with_metadata` interceptor runs after the + `post_get_gateway` interceptor. The (possibly modified) response returned by + `post_get_gateway` will be passed to + `post_get_gateway_with_metadata`. + """ + return response, metadata + def pre_list_api_configs( self, request: apigateway.ListApiConfigsRequest, @@ -425,12 +632,37 @@ def post_list_api_configs( ) -> apigateway.ListApiConfigsResponse: """Post-rpc interceptor for list_api_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_list_api_configs` interceptor runs + before the `post_list_api_configs_with_metadata` interceptor. """ return response + def post_list_api_configs_with_metadata( + self, + response: apigateway.ListApiConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apigateway.ListApiConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_api_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_list_api_configs_with_metadata` + interceptor in new development instead of the `post_list_api_configs` interceptor. + When both interceptors are used, this `post_list_api_configs_with_metadata` interceptor runs after the + `post_list_api_configs` interceptor. The (possibly modified) response returned by + `post_list_api_configs` will be passed to + `post_list_api_configs_with_metadata`. + """ + return response, metadata + def pre_list_apis( self, request: apigateway.ListApisRequest, @@ -448,12 +680,35 @@ def post_list_apis( ) -> apigateway.ListApisResponse: """Post-rpc interceptor for list_apis - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_apis_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_list_apis` interceptor runs + before the `post_list_apis_with_metadata` interceptor. """ return response + def post_list_apis_with_metadata( + self, + response: apigateway.ListApisResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apigateway.ListApisResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_apis + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_list_apis_with_metadata` + interceptor in new development instead of the `post_list_apis` interceptor. + When both interceptors are used, this `post_list_apis_with_metadata` interceptor runs after the + `post_list_apis` interceptor. The (possibly modified) response returned by + `post_list_apis` will be passed to + `post_list_apis_with_metadata`. + """ + return response, metadata + def pre_list_gateways( self, request: apigateway.ListGatewaysRequest, @@ -471,12 +726,37 @@ def post_list_gateways( ) -> apigateway.ListGatewaysResponse: """Post-rpc interceptor for list_gateways - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_gateways_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_list_gateways` interceptor runs + before the `post_list_gateways_with_metadata` interceptor. """ return response + def post_list_gateways_with_metadata( + self, + response: apigateway.ListGatewaysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apigateway.ListGatewaysResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_gateways + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_list_gateways_with_metadata` + interceptor in new development instead of the `post_list_gateways` interceptor. + When both interceptors are used, this `post_list_gateways_with_metadata` interceptor runs after the + `post_list_gateways` interceptor. The (possibly modified) response returned by + `post_list_gateways` will be passed to + `post_list_gateways_with_metadata`. + """ + return response, metadata + def pre_update_api( self, request: apigateway.UpdateApiRequest, @@ -494,12 +774,35 @@ def post_update_api( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_update_api` interceptor runs + before the `post_update_api_with_metadata` interceptor. """ return response + def post_update_api_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_update_api_with_metadata` + interceptor in new development instead of the `post_update_api` interceptor. + When both interceptors are used, this `post_update_api_with_metadata` interceptor runs after the + `post_update_api` interceptor. The (possibly modified) response returned by + `post_update_api` will be passed to + `post_update_api_with_metadata`. + """ + return response, metadata + def pre_update_api_config( self, request: apigateway.UpdateApiConfigRequest, @@ -519,12 +822,35 @@ def post_update_api_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_api_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_update_api_config` interceptor runs + before the `post_update_api_config_with_metadata` interceptor. """ return response + def post_update_api_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_update_api_config_with_metadata` + interceptor in new development instead of the `post_update_api_config` interceptor. + When both interceptors are used, this `post_update_api_config_with_metadata` interceptor runs after the + `post_update_api_config` interceptor. The (possibly modified) response returned by + `post_update_api_config` will be passed to + `post_update_api_config_with_metadata`. + """ + return response, metadata + def pre_update_gateway( self, request: apigateway.UpdateGatewayRequest, @@ -544,12 +870,35 @@ def post_update_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiGatewayService server but before - it is returned to user code. + it is returned to user code. This `post_update_gateway` interceptor runs + before the `post_update_gateway_with_metadata` interceptor. """ return response + def post_update_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiGatewayService server but before it is returned to user code. + + We recommend only using this `post_update_gateway_with_metadata` + interceptor in new development instead of the `post_update_gateway` interceptor. + When both interceptors are used, this `post_update_gateway_with_metadata` interceptor runs after the + `post_update_gateway` interceptor. The (possibly modified) response returned by + `post_update_gateway` will be passed to + `post_update_gateway_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ApiGatewayServiceRestStub: @@ -817,6 +1166,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -968,6 +1321,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1117,6 +1474,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1259,6 +1620,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1404,6 +1769,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_api_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_api_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1547,6 +1916,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1694,6 +2067,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1838,6 +2215,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1985,6 +2366,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2131,6 +2516,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_api_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2276,6 +2665,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_apis(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_apis_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2419,6 +2812,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_gateways(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_gateways_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2567,6 +2964,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2718,6 +3119,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2867,6 +3272,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json index b6f087c63c0a..c93ee26a5f89 100644 --- a/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json +++ b/packages/google-cloud-api-gateway/samples/generated_samples/snippet_metadata_google.cloud.apigateway.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-gateway", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py index a64e1d0615a4..54bd4fd6bac9 100644 --- a/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py +++ b/packages/google-cloud-api-gateway/tests/unit/gapic/apigateway_v1/test_api_gateway_service.py @@ -73,6 +73,13 @@ ) from google.cloud.apigateway_v1.types import apigateway +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ApiGatewayServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ApiGatewayServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10690,10 +10740,13 @@ def test_list_gateways_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_list_gateways" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_list_gateways_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_list_gateways" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.ListGatewaysRequest.pb(apigateway.ListGatewaysRequest()) transcode.return_value = { "method": "post", @@ -10717,6 +10770,7 @@ def test_list_gateways_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apigateway.ListGatewaysResponse() + post_with_metadata.return_value = apigateway.ListGatewaysResponse(), metadata client.list_gateways( request, @@ -10728,6 +10782,7 @@ def test_list_gateways_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_gateway_rest_bad_request(request_type=apigateway.GetGatewayRequest): @@ -10818,10 +10873,13 @@ def test_get_gateway_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_get_gateway" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_get_gateway_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_get_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.GetGatewayRequest.pb(apigateway.GetGatewayRequest()) transcode.return_value = { "method": "post", @@ -10843,6 +10901,7 @@ def test_get_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apigateway.Gateway() + post_with_metadata.return_value = apigateway.Gateway(), metadata client.get_gateway( request, @@ -10854,6 +10913,7 @@ def test_get_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_gateway_rest_bad_request(request_type=apigateway.CreateGatewayRequest): @@ -11009,10 +11069,13 @@ def test_create_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_create_gateway" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_create_gateway_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_create_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.CreateGatewayRequest.pb( apigateway.CreateGatewayRequest() ) @@ -11036,6 +11099,7 @@ def test_create_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_gateway( request, @@ -11047,6 +11111,7 @@ def test_create_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_gateway_rest_bad_request(request_type=apigateway.UpdateGatewayRequest): @@ -11206,10 +11271,13 @@ def test_update_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_update_gateway" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_update_gateway_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_update_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.UpdateGatewayRequest.pb( apigateway.UpdateGatewayRequest() ) @@ -11233,6 +11301,7 @@ def test_update_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_gateway( request, @@ -11244,6 +11313,7 @@ def test_update_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_gateway_rest_bad_request(request_type=apigateway.DeleteGatewayRequest): @@ -11322,10 +11392,13 @@ def test_delete_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_delete_gateway" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_delete_gateway_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_delete_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.DeleteGatewayRequest.pb( apigateway.DeleteGatewayRequest() ) @@ -11349,6 +11422,7 @@ def test_delete_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_gateway( request, @@ -11360,6 +11434,7 @@ def test_delete_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_apis_rest_bad_request(request_type=apigateway.ListApisRequest): @@ -11444,10 +11519,13 @@ def test_list_apis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_list_apis" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_list_apis_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_list_apis" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.ListApisRequest.pb(apigateway.ListApisRequest()) transcode.return_value = { "method": "post", @@ -11471,6 +11549,7 @@ def test_list_apis_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apigateway.ListApisResponse() + post_with_metadata.return_value = apigateway.ListApisResponse(), metadata client.list_apis( request, @@ -11482,6 +11561,7 @@ def test_list_apis_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_rest_bad_request(request_type=apigateway.GetApiRequest): @@ -11570,10 +11650,13 @@ def test_get_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_get_api" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_get_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_get_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.GetApiRequest.pb(apigateway.GetApiRequest()) transcode.return_value = { "method": "post", @@ -11595,6 +11678,7 @@ def test_get_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apigateway.Api() + post_with_metadata.return_value = apigateway.Api(), metadata client.get_api( request, @@ -11606,6 +11690,7 @@ def test_get_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_api_rest_bad_request(request_type=apigateway.CreateApiRequest): @@ -11760,10 +11845,13 @@ def test_create_api_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_create_api" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_create_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_create_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.CreateApiRequest.pb(apigateway.CreateApiRequest()) transcode.return_value = { "method": "post", @@ -11785,6 +11873,7 @@ def test_create_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_api( request, @@ -11796,6 +11885,7 @@ def test_create_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_rest_bad_request(request_type=apigateway.UpdateApiRequest): @@ -11950,10 +12040,13 @@ def test_update_api_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_update_api" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_update_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_update_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.UpdateApiRequest.pb(apigateway.UpdateApiRequest()) transcode.return_value = { "method": "post", @@ -11975,6 +12068,7 @@ def test_update_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_api( request, @@ -11986,6 +12080,7 @@ def test_update_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_rest_bad_request(request_type=apigateway.DeleteApiRequest): @@ -12064,10 +12159,13 @@ def test_delete_api_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_delete_api" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_delete_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_delete_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.DeleteApiRequest.pb(apigateway.DeleteApiRequest()) transcode.return_value = { "method": "post", @@ -12089,6 +12187,7 @@ def test_delete_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_api( request, @@ -12100,6 +12199,7 @@ def test_delete_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_api_configs_rest_bad_request( @@ -12186,10 +12286,14 @@ def test_list_api_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_list_api_configs" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, + "post_list_api_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_list_api_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.ListApiConfigsRequest.pb( apigateway.ListApiConfigsRequest() ) @@ -12215,6 +12319,7 @@ def test_list_api_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apigateway.ListApiConfigsResponse() + post_with_metadata.return_value = apigateway.ListApiConfigsResponse(), metadata client.list_api_configs( request, @@ -12226,6 +12331,7 @@ def test_list_api_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_config_rest_bad_request(request_type=apigateway.GetApiConfigRequest): @@ -12320,10 +12426,13 @@ def test_get_api_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_get_api_config" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, "post_get_api_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_get_api_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.GetApiConfigRequest.pb(apigateway.GetApiConfigRequest()) transcode.return_value = { "method": "post", @@ -12345,6 +12454,7 @@ def test_get_api_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apigateway.ApiConfig() + post_with_metadata.return_value = apigateway.ApiConfig(), metadata client.get_api_config( request, @@ -12356,6 +12466,7 @@ def test_get_api_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_api_config_rest_bad_request( @@ -12518,10 +12629,14 @@ def test_create_api_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_create_api_config" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, + "post_create_api_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_create_api_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.CreateApiConfigRequest.pb( apigateway.CreateApiConfigRequest() ) @@ -12545,6 +12660,7 @@ def test_create_api_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_api_config( request, @@ -12556,6 +12672,7 @@ def test_create_api_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_config_rest_bad_request( @@ -12726,10 +12843,14 @@ def test_update_api_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_update_api_config" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, + "post_update_api_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_update_api_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.UpdateApiConfigRequest.pb( apigateway.UpdateApiConfigRequest() ) @@ -12753,6 +12874,7 @@ def test_update_api_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_api_config( request, @@ -12764,6 +12886,7 @@ def test_update_api_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_config_rest_bad_request( @@ -12848,10 +12971,14 @@ def test_delete_api_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "post_delete_api_config" ) as post, mock.patch.object( + transports.ApiGatewayServiceRestInterceptor, + "post_delete_api_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiGatewayServiceRestInterceptor, "pre_delete_api_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apigateway.DeleteApiConfigRequest.pb( apigateway.DeleteApiConfigRequest() ) @@ -12875,6 +13002,7 @@ def test_delete_api_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_api_config( request, @@ -12886,6 +13014,7 @@ def test_delete_api_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-api-keys/CHANGELOG.md b/packages/google-cloud-api-keys/CHANGELOG.md index f49c6c045fd9..f536ffee569b 100644 --- a/packages/google-cloud-api-keys/CHANGELOG.md +++ b/packages/google-cloud-api-keys/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-keys-v0.5.14...google-cloud-api-keys-v0.5.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.5.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-api-keys-v0.5.13...google-cloud-api-keys-v0.5.14) (2024-12-12) diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py index a526124aaff9..6513930b3813 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -489,6 +491,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1668,16 +1697,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/transports/rest.py b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/transports/rest.py index bb2a6dbffd8a..fb4e931065ab 100644 --- a/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/transports/rest.py +++ b/packages/google-cloud-api-keys/google/cloud/api_keys_v2/services/api_keys/transports/rest.py @@ -156,12 +156,35 @@ def post_create_key( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_create_key` interceptor runs + before the `post_create_key_with_metadata` interceptor. """ return response + def post_create_key_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_create_key_with_metadata` + interceptor in new development instead of the `post_create_key` interceptor. + When both interceptors are used, this `post_create_key_with_metadata` interceptor runs after the + `post_create_key` interceptor. The (possibly modified) response returned by + `post_create_key` will be passed to + `post_create_key_with_metadata`. + """ + return response, metadata + def pre_delete_key( self, request: apikeys.DeleteKeyRequest, @@ -179,12 +202,35 @@ def post_delete_key( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_delete_key` interceptor runs + before the `post_delete_key_with_metadata` interceptor. """ return response + def post_delete_key_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_delete_key_with_metadata` + interceptor in new development instead of the `post_delete_key` interceptor. + When both interceptors are used, this `post_delete_key_with_metadata` interceptor runs after the + `post_delete_key` interceptor. The (possibly modified) response returned by + `post_delete_key` will be passed to + `post_delete_key_with_metadata`. + """ + return response, metadata + def pre_get_key( self, request: apikeys.GetKeyRequest, @@ -200,12 +246,33 @@ def pre_get_key( def post_get_key(self, response: resources.Key) -> resources.Key: """Post-rpc interceptor for get_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_get_key` interceptor runs + before the `post_get_key_with_metadata` interceptor. """ return response + def post_get_key_with_metadata( + self, response: resources.Key, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[resources.Key, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_get_key_with_metadata` + interceptor in new development instead of the `post_get_key` interceptor. + When both interceptors are used, this `post_get_key_with_metadata` interceptor runs after the + `post_get_key` interceptor. The (possibly modified) response returned by + `post_get_key` will be passed to + `post_get_key_with_metadata`. + """ + return response, metadata + def pre_get_key_string( self, request: apikeys.GetKeyStringRequest, @@ -223,12 +290,35 @@ def post_get_key_string( ) -> apikeys.GetKeyStringResponse: """Post-rpc interceptor for get_key_string - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_key_string_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_get_key_string` interceptor runs + before the `post_get_key_string_with_metadata` interceptor. """ return response + def post_get_key_string_with_metadata( + self, + response: apikeys.GetKeyStringResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apikeys.GetKeyStringResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_key_string + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_get_key_string_with_metadata` + interceptor in new development instead of the `post_get_key_string` interceptor. + When both interceptors are used, this `post_get_key_string_with_metadata` interceptor runs after the + `post_get_key_string` interceptor. The (possibly modified) response returned by + `post_get_key_string` will be passed to + `post_get_key_string_with_metadata`. + """ + return response, metadata + def pre_list_keys( self, request: apikeys.ListKeysRequest, @@ -246,12 +336,35 @@ def post_list_keys( ) -> apikeys.ListKeysResponse: """Post-rpc interceptor for list_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_list_keys` interceptor runs + before the `post_list_keys_with_metadata` interceptor. """ return response + def post_list_keys_with_metadata( + self, + response: apikeys.ListKeysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apikeys.ListKeysResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_list_keys_with_metadata` + interceptor in new development instead of the `post_list_keys` interceptor. + When both interceptors are used, this `post_list_keys_with_metadata` interceptor runs after the + `post_list_keys` interceptor. The (possibly modified) response returned by + `post_list_keys` will be passed to + `post_list_keys_with_metadata`. + """ + return response, metadata + def pre_lookup_key( self, request: apikeys.LookupKeyRequest, @@ -269,12 +382,35 @@ def post_lookup_key( ) -> apikeys.LookupKeyResponse: """Post-rpc interceptor for lookup_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_lookup_key` interceptor runs + before the `post_lookup_key_with_metadata` interceptor. """ return response + def post_lookup_key_with_metadata( + self, + response: apikeys.LookupKeyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[apikeys.LookupKeyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lookup_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_lookup_key_with_metadata` + interceptor in new development instead of the `post_lookup_key` interceptor. + When both interceptors are used, this `post_lookup_key_with_metadata` interceptor runs after the + `post_lookup_key` interceptor. The (possibly modified) response returned by + `post_lookup_key` will be passed to + `post_lookup_key_with_metadata`. + """ + return response, metadata + def pre_undelete_key( self, request: apikeys.UndeleteKeyRequest, @@ -292,12 +428,35 @@ def post_undelete_key( ) -> operations_pb2.Operation: """Post-rpc interceptor for undelete_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undelete_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_undelete_key` interceptor runs + before the `post_undelete_key_with_metadata` interceptor. """ return response + def post_undelete_key_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undelete_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_undelete_key_with_metadata` + interceptor in new development instead of the `post_undelete_key` interceptor. + When both interceptors are used, this `post_undelete_key_with_metadata` interceptor runs after the + `post_undelete_key` interceptor. The (possibly modified) response returned by + `post_undelete_key` will be passed to + `post_undelete_key_with_metadata`. + """ + return response, metadata + def pre_update_key( self, request: apikeys.UpdateKeyRequest, @@ -315,12 +474,35 @@ def post_update_key( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiKeys server but before - it is returned to user code. + it is returned to user code. This `post_update_key` interceptor runs + before the `post_update_key_with_metadata` interceptor. """ return response + def post_update_key_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiKeys server but before it is returned to user code. + + We recommend only using this `post_update_key_with_metadata` + interceptor in new development instead of the `post_update_key` interceptor. + When both interceptors are used, this `post_update_key_with_metadata` interceptor runs after the + `post_update_key` interceptor. The (possibly modified) response returned by + `post_update_key` will be passed to + `post_update_key_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -592,6 +774,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -733,6 +919,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -873,6 +1063,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1015,6 +1209,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_key_string(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_string_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1155,6 +1353,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1295,6 +1497,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1444,6 +1650,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undelete_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undelete_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1591,6 +1801,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json index 0fcb55ecaf4b..03f4da6af6bc 100644 --- a/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json +++ b/packages/google-cloud-api-keys/samples/generated_samples/snippet_metadata_google.api.apikeys.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-api-keys", - "version": "0.5.14" + "version": "0.5.15" }, "snippets": [ { diff --git a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py index 5c44698b7fe6..2f3316e590a2 100644 --- a/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py +++ b/packages/google-cloud-api-keys/tests/unit/gapic/api_keys_v2/test_api_keys.py @@ -73,6 +73,13 @@ ) from google.cloud.api_keys_v2.types import apikeys, resources +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -296,6 +303,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ApiKeysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ApiKeysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5701,10 +5751,13 @@ def test_create_key_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiKeysRestInterceptor, "post_create_key" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_create_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_create_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.CreateKeyRequest.pb(apikeys.CreateKeyRequest()) transcode.return_value = { "method": "post", @@ -5726,6 +5779,7 @@ def test_create_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_key( request, @@ -5737,6 +5791,7 @@ def test_create_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_keys_rest_bad_request(request_type=apikeys.ListKeysRequest): @@ -5817,10 +5872,13 @@ def test_list_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiKeysRestInterceptor, "post_list_keys" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_list_keys_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_list_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.ListKeysRequest.pb(apikeys.ListKeysRequest()) transcode.return_value = { "method": "post", @@ -5842,6 +5900,7 @@ def test_list_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apikeys.ListKeysResponse() + post_with_metadata.return_value = apikeys.ListKeysResponse(), metadata client.list_keys( request, @@ -5853,6 +5912,7 @@ def test_list_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_key_rest_bad_request(request_type=apikeys.GetKeyRequest): @@ -5941,10 +6001,13 @@ def test_get_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiKeysRestInterceptor, "post_get_key" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_get_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_get_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.GetKeyRequest.pb(apikeys.GetKeyRequest()) transcode.return_value = { "method": "post", @@ -5966,6 +6029,7 @@ def test_get_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Key() + post_with_metadata.return_value = resources.Key(), metadata client.get_key( request, @@ -5977,6 +6041,7 @@ def test_get_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_key_string_rest_bad_request(request_type=apikeys.GetKeyStringRequest): @@ -6057,10 +6122,13 @@ def test_get_key_string_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiKeysRestInterceptor, "post_get_key_string" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_get_key_string_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_get_key_string" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.GetKeyStringRequest.pb(apikeys.GetKeyStringRequest()) transcode.return_value = { "method": "post", @@ -6084,6 +6152,7 @@ def test_get_key_string_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apikeys.GetKeyStringResponse() + post_with_metadata.return_value = apikeys.GetKeyStringResponse(), metadata client.get_key_string( request, @@ -6095,6 +6164,7 @@ def test_get_key_string_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_key_rest_bad_request(request_type=apikeys.UpdateKeyRequest): @@ -6280,10 +6350,13 @@ def test_update_key_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiKeysRestInterceptor, "post_update_key" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_update_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_update_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.UpdateKeyRequest.pb(apikeys.UpdateKeyRequest()) transcode.return_value = { "method": "post", @@ -6305,6 +6378,7 @@ def test_update_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_key( request, @@ -6316,6 +6390,7 @@ def test_update_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_key_rest_bad_request(request_type=apikeys.DeleteKeyRequest): @@ -6392,10 +6467,13 @@ def test_delete_key_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiKeysRestInterceptor, "post_delete_key" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_delete_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_delete_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.DeleteKeyRequest.pb(apikeys.DeleteKeyRequest()) transcode.return_value = { "method": "post", @@ -6417,6 +6495,7 @@ def test_delete_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_key( request, @@ -6428,6 +6507,7 @@ def test_delete_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undelete_key_rest_bad_request(request_type=apikeys.UndeleteKeyRequest): @@ -6504,10 +6584,13 @@ def test_undelete_key_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApiKeysRestInterceptor, "post_undelete_key" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_undelete_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_undelete_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.UndeleteKeyRequest.pb(apikeys.UndeleteKeyRequest()) transcode.return_value = { "method": "post", @@ -6529,6 +6612,7 @@ def test_undelete_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undelete_key( request, @@ -6540,6 +6624,7 @@ def test_undelete_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lookup_key_rest_bad_request(request_type=apikeys.LookupKeyRequest): @@ -6622,10 +6707,13 @@ def test_lookup_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiKeysRestInterceptor, "post_lookup_key" ) as post, mock.patch.object( + transports.ApiKeysRestInterceptor, "post_lookup_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiKeysRestInterceptor, "pre_lookup_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apikeys.LookupKeyRequest.pb(apikeys.LookupKeyRequest()) transcode.return_value = { "method": "post", @@ -6647,6 +6735,7 @@ def test_lookup_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apikeys.LookupKeyResponse() + post_with_metadata.return_value = apikeys.LookupKeyResponse(), metadata client.lookup_key( request, @@ -6658,6 +6747,7 @@ def test_lookup_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-apigee-connect/CHANGELOG.md b/packages/google-cloud-apigee-connect/CHANGELOG.md index b68f04b80d7f..c31b0f653605 100644 --- a/packages/google-cloud-apigee-connect/CHANGELOG.md +++ b/packages/google-cloud-apigee-connect/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-connect-v1.11.0...google-cloud-apigee-connect-v1.12.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-connect-v1.10.1...google-cloud-apigee-connect-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py index b57059b8dea1..2f838a29e0f9 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/connection_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -479,6 +481,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py index 4c037bad3e35..c1037a88e617 100644 --- a/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py +++ b/packages/google-cloud-apigee-connect/google/cloud/apigeeconnect_v1/services/tether/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json index d5e0b1dcce51..9a387c22e351 100644 --- a/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json +++ b/packages/google-cloud-apigee-connect/samples/generated_samples/snippet_metadata_google.cloud.apigeeconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-connect", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py index e9418bd984a8..19c40db4e952 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_connection_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -55,6 +56,13 @@ ) from google.cloud.apigeeconnect_v1.types import connection +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -322,6 +330,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConnectionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConnectionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py index 9018fd010c76..7bf3a4bb46bd 100644 --- a/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py +++ b/packages/google-cloud-apigee-connect/tests/unit/gapic/apigeeconnect_v1/test_tether.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -57,6 +58,13 @@ ) from google.cloud.apigeeconnect_v1.types import tether +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -279,6 +287,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TetherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TetherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-apigee-registry/CHANGELOG.md b/packages/google-cloud-apigee-registry/CHANGELOG.md index bc08b6b0f6c6..cd8794a29972 100644 --- a/packages/google-cloud-apigee-registry/CHANGELOG.md +++ b/packages/google-cloud-apigee-registry/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.6.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-registry-v0.6.14...google-cloud-apigee-registry-v0.6.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.6.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apigee-registry-v0.6.13...google-cloud-apigee-registry-v0.6.14) (2024-12-12) diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py index 0b6dbde2b051..e51340f75942 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.15" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py index 0b6dbde2b051..e51340f75942 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.14" # {x-release-please-version} +__version__ = "0.6.15" # {x-release-please-version} diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py index a7110deae9eb..beaeaaa6a4bf 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1121,16 +1150,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1176,16 +1209,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1408,16 +1445,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1530,16 +1571,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1590,16 +1635,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1645,16 +1694,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1700,16 +1753,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/transports/rest.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/transports/rest.py index 4e00f6d48821..9a32fe004c08 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/transports/rest.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/provisioning/transports/rest.py @@ -122,12 +122,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Provisioning server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Provisioning server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: provisioning_service.DeleteInstanceRequest, @@ -148,12 +171,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Provisioning server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Provisioning server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: provisioning_service.GetInstanceRequest, @@ -173,12 +219,35 @@ def post_get_instance( ) -> provisioning_service.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Provisioning server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: provisioning_service.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[provisioning_service.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Provisioning server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -663,6 +732,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -804,6 +877,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -952,6 +1029,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py index b9fdde2a8a2b..ec0f6b96537b 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -589,6 +591,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4901,16 +4930,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4956,16 +4989,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5188,16 +5225,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -5310,16 +5351,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -5370,16 +5415,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -5425,16 +5474,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5480,16 +5533,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/transports/rest.py b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/transports/rest.py index 5a42b2c8b1a9..453047dd6733 100644 --- a/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/transports/rest.py +++ b/packages/google-cloud-apigee-registry/google/cloud/apigee_registry_v1/services/registry/transports/rest.py @@ -357,12 +357,35 @@ def pre_create_api( def post_create_api(self, response: registry_models.Api) -> registry_models.Api: """Post-rpc interceptor for create_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_create_api` interceptor runs + before the `post_create_api_with_metadata` interceptor. """ return response + def post_create_api_with_metadata( + self, + response: registry_models.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_create_api_with_metadata` + interceptor in new development instead of the `post_create_api` interceptor. + When both interceptors are used, this `post_create_api_with_metadata` interceptor runs after the + `post_create_api` interceptor. The (possibly modified) response returned by + `post_create_api` will be passed to + `post_create_api_with_metadata`. + """ + return response, metadata + def pre_create_api_deployment( self, request: registry_service.CreateApiDeploymentRequest, @@ -383,12 +406,35 @@ def post_create_api_deployment( ) -> registry_models.ApiDeployment: """Post-rpc interceptor for create_api_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_create_api_deployment` interceptor runs + before the `post_create_api_deployment_with_metadata` interceptor. """ return response + def post_create_api_deployment_with_metadata( + self, + response: registry_models.ApiDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_create_api_deployment_with_metadata` + interceptor in new development instead of the `post_create_api_deployment` interceptor. + When both interceptors are used, this `post_create_api_deployment_with_metadata` interceptor runs after the + `post_create_api_deployment` interceptor. The (possibly modified) response returned by + `post_create_api_deployment` will be passed to + `post_create_api_deployment_with_metadata`. + """ + return response, metadata + def pre_create_api_spec( self, request: registry_service.CreateApiSpecRequest, @@ -408,12 +454,35 @@ def post_create_api_spec( ) -> registry_models.ApiSpec: """Post-rpc interceptor for create_api_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_create_api_spec` interceptor runs + before the `post_create_api_spec_with_metadata` interceptor. """ return response + def post_create_api_spec_with_metadata( + self, + response: registry_models.ApiSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_create_api_spec_with_metadata` + interceptor in new development instead of the `post_create_api_spec` interceptor. + When both interceptors are used, this `post_create_api_spec_with_metadata` interceptor runs after the + `post_create_api_spec` interceptor. The (possibly modified) response returned by + `post_create_api_spec` will be passed to + `post_create_api_spec_with_metadata`. + """ + return response, metadata + def pre_create_api_version( self, request: registry_service.CreateApiVersionRequest, @@ -434,12 +503,35 @@ def post_create_api_version( ) -> registry_models.ApiVersion: """Post-rpc interceptor for create_api_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_create_api_version` interceptor runs + before the `post_create_api_version_with_metadata` interceptor. """ return response + def post_create_api_version_with_metadata( + self, + response: registry_models.ApiVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_create_api_version_with_metadata` + interceptor in new development instead of the `post_create_api_version` interceptor. + When both interceptors are used, this `post_create_api_version_with_metadata` interceptor runs after the + `post_create_api_version` interceptor. The (possibly modified) response returned by + `post_create_api_version` will be passed to + `post_create_api_version_with_metadata`. + """ + return response, metadata + def pre_create_artifact( self, request: registry_service.CreateArtifactRequest, @@ -459,12 +551,35 @@ def post_create_artifact( ) -> registry_models.Artifact: """Post-rpc interceptor for create_artifact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_artifact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_create_artifact` interceptor runs + before the `post_create_artifact_with_metadata` interceptor. """ return response + def post_create_artifact_with_metadata( + self, + response: registry_models.Artifact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.Artifact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_artifact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_create_artifact_with_metadata` + interceptor in new development instead of the `post_create_artifact` interceptor. + When both interceptors are used, this `post_create_artifact_with_metadata` interceptor runs after the + `post_create_artifact` interceptor. The (possibly modified) response returned by + `post_create_artifact` will be passed to + `post_create_artifact_with_metadata`. + """ + return response, metadata + def pre_delete_api( self, request: registry_service.DeleteApiRequest, @@ -514,12 +629,35 @@ def post_delete_api_deployment_revision( ) -> registry_models.ApiDeployment: """Post-rpc interceptor for delete_api_deployment_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_api_deployment_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_delete_api_deployment_revision` interceptor runs + before the `post_delete_api_deployment_revision_with_metadata` interceptor. """ return response + def post_delete_api_deployment_revision_with_metadata( + self, + response: registry_models.ApiDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_api_deployment_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_delete_api_deployment_revision_with_metadata` + interceptor in new development instead of the `post_delete_api_deployment_revision` interceptor. + When both interceptors are used, this `post_delete_api_deployment_revision_with_metadata` interceptor runs after the + `post_delete_api_deployment_revision` interceptor. The (possibly modified) response returned by + `post_delete_api_deployment_revision` will be passed to + `post_delete_api_deployment_revision_with_metadata`. + """ + return response, metadata + def pre_delete_api_spec( self, request: registry_service.DeleteApiSpecRequest, @@ -554,12 +692,35 @@ def post_delete_api_spec_revision( ) -> registry_models.ApiSpec: """Post-rpc interceptor for delete_api_spec_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_api_spec_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_delete_api_spec_revision` interceptor runs + before the `post_delete_api_spec_revision_with_metadata` interceptor. """ return response + def post_delete_api_spec_revision_with_metadata( + self, + response: registry_models.ApiSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_api_spec_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_delete_api_spec_revision_with_metadata` + interceptor in new development instead of the `post_delete_api_spec_revision` interceptor. + When both interceptors are used, this `post_delete_api_spec_revision_with_metadata` interceptor runs after the + `post_delete_api_spec_revision` interceptor. The (possibly modified) response returned by + `post_delete_api_spec_revision` will be passed to + `post_delete_api_spec_revision_with_metadata`. + """ + return response, metadata + def pre_delete_api_version( self, request: registry_service.DeleteApiVersionRequest, @@ -604,12 +765,35 @@ def pre_get_api( def post_get_api(self, response: registry_models.Api) -> registry_models.Api: """Post-rpc interceptor for get_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_api` interceptor runs + before the `post_get_api_with_metadata` interceptor. """ return response + def post_get_api_with_metadata( + self, + response: registry_models.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_api_with_metadata` + interceptor in new development instead of the `post_get_api` interceptor. + When both interceptors are used, this `post_get_api_with_metadata` interceptor runs after the + `post_get_api` interceptor. The (possibly modified) response returned by + `post_get_api` will be passed to + `post_get_api_with_metadata`. + """ + return response, metadata + def pre_get_api_deployment( self, request: registry_service.GetApiDeploymentRequest, @@ -630,12 +814,35 @@ def post_get_api_deployment( ) -> registry_models.ApiDeployment: """Post-rpc interceptor for get_api_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_api_deployment` interceptor runs + before the `post_get_api_deployment_with_metadata` interceptor. """ return response + def post_get_api_deployment_with_metadata( + self, + response: registry_models.ApiDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_api_deployment_with_metadata` + interceptor in new development instead of the `post_get_api_deployment` interceptor. + When both interceptors are used, this `post_get_api_deployment_with_metadata` interceptor runs after the + `post_get_api_deployment` interceptor. The (possibly modified) response returned by + `post_get_api_deployment` will be passed to + `post_get_api_deployment_with_metadata`. + """ + return response, metadata + def pre_get_api_spec( self, request: registry_service.GetApiSpecRequest, @@ -655,12 +862,35 @@ def post_get_api_spec( ) -> registry_models.ApiSpec: """Post-rpc interceptor for get_api_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_api_spec` interceptor runs + before the `post_get_api_spec_with_metadata` interceptor. """ return response + def post_get_api_spec_with_metadata( + self, + response: registry_models.ApiSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_api_spec_with_metadata` + interceptor in new development instead of the `post_get_api_spec` interceptor. + When both interceptors are used, this `post_get_api_spec_with_metadata` interceptor runs after the + `post_get_api_spec` interceptor. The (possibly modified) response returned by + `post_get_api_spec` will be passed to + `post_get_api_spec_with_metadata`. + """ + return response, metadata + def pre_get_api_spec_contents( self, request: registry_service.GetApiSpecContentsRequest, @@ -681,12 +911,35 @@ def post_get_api_spec_contents( ) -> httpbody_pb2.HttpBody: """Post-rpc interceptor for get_api_spec_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_spec_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_api_spec_contents` interceptor runs + before the `post_get_api_spec_contents_with_metadata` interceptor. """ return response + def post_get_api_spec_contents_with_metadata( + self, + response: httpbody_pb2.HttpBody, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[httpbody_pb2.HttpBody, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_spec_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_api_spec_contents_with_metadata` + interceptor in new development instead of the `post_get_api_spec_contents` interceptor. + When both interceptors are used, this `post_get_api_spec_contents_with_metadata` interceptor runs after the + `post_get_api_spec_contents` interceptor. The (possibly modified) response returned by + `post_get_api_spec_contents` will be passed to + `post_get_api_spec_contents_with_metadata`. + """ + return response, metadata + def pre_get_api_version( self, request: registry_service.GetApiVersionRequest, @@ -706,12 +959,35 @@ def post_get_api_version( ) -> registry_models.ApiVersion: """Post-rpc interceptor for get_api_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_api_version` interceptor runs + before the `post_get_api_version_with_metadata` interceptor. """ return response + def post_get_api_version_with_metadata( + self, + response: registry_models.ApiVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_api_version_with_metadata` + interceptor in new development instead of the `post_get_api_version` interceptor. + When both interceptors are used, this `post_get_api_version_with_metadata` interceptor runs after the + `post_get_api_version` interceptor. The (possibly modified) response returned by + `post_get_api_version` will be passed to + `post_get_api_version_with_metadata`. + """ + return response, metadata + def pre_get_artifact( self, request: registry_service.GetArtifactRequest, @@ -731,12 +1007,35 @@ def post_get_artifact( ) -> registry_models.Artifact: """Post-rpc interceptor for get_artifact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_artifact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_artifact` interceptor runs + before the `post_get_artifact_with_metadata` interceptor. """ return response + def post_get_artifact_with_metadata( + self, + response: registry_models.Artifact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.Artifact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_artifact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_artifact_with_metadata` + interceptor in new development instead of the `post_get_artifact` interceptor. + When both interceptors are used, this `post_get_artifact_with_metadata` interceptor runs after the + `post_get_artifact` interceptor. The (possibly modified) response returned by + `post_get_artifact` will be passed to + `post_get_artifact_with_metadata`. + """ + return response, metadata + def pre_get_artifact_contents( self, request: registry_service.GetArtifactContentsRequest, @@ -757,12 +1056,35 @@ def post_get_artifact_contents( ) -> httpbody_pb2.HttpBody: """Post-rpc interceptor for get_artifact_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_artifact_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_get_artifact_contents` interceptor runs + before the `post_get_artifact_contents_with_metadata` interceptor. """ return response + def post_get_artifact_contents_with_metadata( + self, + response: httpbody_pb2.HttpBody, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[httpbody_pb2.HttpBody, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_artifact_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_get_artifact_contents_with_metadata` + interceptor in new development instead of the `post_get_artifact_contents` interceptor. + When both interceptors are used, this `post_get_artifact_contents_with_metadata` interceptor runs after the + `post_get_artifact_contents` interceptor. The (possibly modified) response returned by + `post_get_artifact_contents` will be passed to + `post_get_artifact_contents_with_metadata`. + """ + return response, metadata + def pre_list_api_deployment_revisions( self, request: registry_service.ListApiDeploymentRevisionsRequest, @@ -783,12 +1105,38 @@ def post_list_api_deployment_revisions( ) -> registry_service.ListApiDeploymentRevisionsResponse: """Post-rpc interceptor for list_api_deployment_revisions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_deployment_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_api_deployment_revisions` interceptor runs + before the `post_list_api_deployment_revisions_with_metadata` interceptor. """ return response + def post_list_api_deployment_revisions_with_metadata( + self, + response: registry_service.ListApiDeploymentRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListApiDeploymentRevisionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_api_deployment_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_api_deployment_revisions_with_metadata` + interceptor in new development instead of the `post_list_api_deployment_revisions` interceptor. + When both interceptors are used, this `post_list_api_deployment_revisions_with_metadata` interceptor runs after the + `post_list_api_deployment_revisions` interceptor. The (possibly modified) response returned by + `post_list_api_deployment_revisions` will be passed to + `post_list_api_deployment_revisions_with_metadata`. + """ + return response, metadata + def pre_list_api_deployments( self, request: registry_service.ListApiDeploymentsRequest, @@ -809,12 +1157,38 @@ def post_list_api_deployments( ) -> registry_service.ListApiDeploymentsResponse: """Post-rpc interceptor for list_api_deployments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_api_deployments` interceptor runs + before the `post_list_api_deployments_with_metadata` interceptor. """ return response + def post_list_api_deployments_with_metadata( + self, + response: registry_service.ListApiDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListApiDeploymentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_api_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_api_deployments_with_metadata` + interceptor in new development instead of the `post_list_api_deployments` interceptor. + When both interceptors are used, this `post_list_api_deployments_with_metadata` interceptor runs after the + `post_list_api_deployments` interceptor. The (possibly modified) response returned by + `post_list_api_deployments` will be passed to + `post_list_api_deployments_with_metadata`. + """ + return response, metadata + def pre_list_apis( self, request: registry_service.ListApisRequest, @@ -834,12 +1208,37 @@ def post_list_apis( ) -> registry_service.ListApisResponse: """Post-rpc interceptor for list_apis - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_apis_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_apis` interceptor runs + before the `post_list_apis_with_metadata` interceptor. """ return response + def post_list_apis_with_metadata( + self, + response: registry_service.ListApisResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListApisResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_apis + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_apis_with_metadata` + interceptor in new development instead of the `post_list_apis` interceptor. + When both interceptors are used, this `post_list_apis_with_metadata` interceptor runs after the + `post_list_apis` interceptor. The (possibly modified) response returned by + `post_list_apis` will be passed to + `post_list_apis_with_metadata`. + """ + return response, metadata + def pre_list_api_spec_revisions( self, request: registry_service.ListApiSpecRevisionsRequest, @@ -860,12 +1259,38 @@ def post_list_api_spec_revisions( ) -> registry_service.ListApiSpecRevisionsResponse: """Post-rpc interceptor for list_api_spec_revisions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_spec_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_api_spec_revisions` interceptor runs + before the `post_list_api_spec_revisions_with_metadata` interceptor. """ return response + def post_list_api_spec_revisions_with_metadata( + self, + response: registry_service.ListApiSpecRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListApiSpecRevisionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_api_spec_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_api_spec_revisions_with_metadata` + interceptor in new development instead of the `post_list_api_spec_revisions` interceptor. + When both interceptors are used, this `post_list_api_spec_revisions_with_metadata` interceptor runs after the + `post_list_api_spec_revisions` interceptor. The (possibly modified) response returned by + `post_list_api_spec_revisions` will be passed to + `post_list_api_spec_revisions_with_metadata`. + """ + return response, metadata + def pre_list_api_specs( self, request: registry_service.ListApiSpecsRequest, @@ -885,12 +1310,37 @@ def post_list_api_specs( ) -> registry_service.ListApiSpecsResponse: """Post-rpc interceptor for list_api_specs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_specs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_api_specs` interceptor runs + before the `post_list_api_specs_with_metadata` interceptor. """ return response + def post_list_api_specs_with_metadata( + self, + response: registry_service.ListApiSpecsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListApiSpecsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_api_specs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_api_specs_with_metadata` + interceptor in new development instead of the `post_list_api_specs` interceptor. + When both interceptors are used, this `post_list_api_specs_with_metadata` interceptor runs after the + `post_list_api_specs` interceptor. The (possibly modified) response returned by + `post_list_api_specs` will be passed to + `post_list_api_specs_with_metadata`. + """ + return response, metadata + def pre_list_api_versions( self, request: registry_service.ListApiVersionsRequest, @@ -910,12 +1360,38 @@ def post_list_api_versions( ) -> registry_service.ListApiVersionsResponse: """Post-rpc interceptor for list_api_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_api_versions` interceptor runs + before the `post_list_api_versions_with_metadata` interceptor. """ return response + def post_list_api_versions_with_metadata( + self, + response: registry_service.ListApiVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListApiVersionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_api_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_api_versions_with_metadata` + interceptor in new development instead of the `post_list_api_versions` interceptor. + When both interceptors are used, this `post_list_api_versions_with_metadata` interceptor runs after the + `post_list_api_versions` interceptor. The (possibly modified) response returned by + `post_list_api_versions` will be passed to + `post_list_api_versions_with_metadata`. + """ + return response, metadata + def pre_list_artifacts( self, request: registry_service.ListArtifactsRequest, @@ -935,12 +1411,37 @@ def post_list_artifacts( ) -> registry_service.ListArtifactsResponse: """Post-rpc interceptor for list_artifacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_artifacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_list_artifacts` interceptor runs + before the `post_list_artifacts_with_metadata` interceptor. """ return response + def post_list_artifacts_with_metadata( + self, + response: registry_service.ListArtifactsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + registry_service.ListArtifactsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_artifacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_list_artifacts_with_metadata` + interceptor in new development instead of the `post_list_artifacts` interceptor. + When both interceptors are used, this `post_list_artifacts_with_metadata` interceptor runs after the + `post_list_artifacts` interceptor. The (possibly modified) response returned by + `post_list_artifacts` will be passed to + `post_list_artifacts_with_metadata`. + """ + return response, metadata + def pre_replace_artifact( self, request: registry_service.ReplaceArtifactRequest, @@ -960,12 +1461,35 @@ def post_replace_artifact( ) -> registry_models.Artifact: """Post-rpc interceptor for replace_artifact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_replace_artifact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_replace_artifact` interceptor runs + before the `post_replace_artifact_with_metadata` interceptor. """ return response + def post_replace_artifact_with_metadata( + self, + response: registry_models.Artifact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.Artifact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for replace_artifact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_replace_artifact_with_metadata` + interceptor in new development instead of the `post_replace_artifact` interceptor. + When both interceptors are used, this `post_replace_artifact_with_metadata` interceptor runs after the + `post_replace_artifact` interceptor. The (possibly modified) response returned by + `post_replace_artifact` will be passed to + `post_replace_artifact_with_metadata`. + """ + return response, metadata + def pre_rollback_api_deployment( self, request: registry_service.RollbackApiDeploymentRequest, @@ -986,12 +1510,35 @@ def post_rollback_api_deployment( ) -> registry_models.ApiDeployment: """Post-rpc interceptor for rollback_api_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_api_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_rollback_api_deployment` interceptor runs + before the `post_rollback_api_deployment_with_metadata` interceptor. """ return response + def post_rollback_api_deployment_with_metadata( + self, + response: registry_models.ApiDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback_api_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_rollback_api_deployment_with_metadata` + interceptor in new development instead of the `post_rollback_api_deployment` interceptor. + When both interceptors are used, this `post_rollback_api_deployment_with_metadata` interceptor runs after the + `post_rollback_api_deployment` interceptor. The (possibly modified) response returned by + `post_rollback_api_deployment` will be passed to + `post_rollback_api_deployment_with_metadata`. + """ + return response, metadata + def pre_rollback_api_spec( self, request: registry_service.RollbackApiSpecRequest, @@ -1011,12 +1558,35 @@ def post_rollback_api_spec( ) -> registry_models.ApiSpec: """Post-rpc interceptor for rollback_api_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_api_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_rollback_api_spec` interceptor runs + before the `post_rollback_api_spec_with_metadata` interceptor. """ return response + def post_rollback_api_spec_with_metadata( + self, + response: registry_models.ApiSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback_api_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_rollback_api_spec_with_metadata` + interceptor in new development instead of the `post_rollback_api_spec` interceptor. + When both interceptors are used, this `post_rollback_api_spec_with_metadata` interceptor runs after the + `post_rollback_api_spec` interceptor. The (possibly modified) response returned by + `post_rollback_api_spec` will be passed to + `post_rollback_api_spec_with_metadata`. + """ + return response, metadata + def pre_tag_api_deployment_revision( self, request: registry_service.TagApiDeploymentRevisionRequest, @@ -1037,12 +1607,35 @@ def post_tag_api_deployment_revision( ) -> registry_models.ApiDeployment: """Post-rpc interceptor for tag_api_deployment_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_tag_api_deployment_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_tag_api_deployment_revision` interceptor runs + before the `post_tag_api_deployment_revision_with_metadata` interceptor. """ return response + def post_tag_api_deployment_revision_with_metadata( + self, + response: registry_models.ApiDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for tag_api_deployment_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_tag_api_deployment_revision_with_metadata` + interceptor in new development instead of the `post_tag_api_deployment_revision` interceptor. + When both interceptors are used, this `post_tag_api_deployment_revision_with_metadata` interceptor runs after the + `post_tag_api_deployment_revision` interceptor. The (possibly modified) response returned by + `post_tag_api_deployment_revision` will be passed to + `post_tag_api_deployment_revision_with_metadata`. + """ + return response, metadata + def pre_tag_api_spec_revision( self, request: registry_service.TagApiSpecRevisionRequest, @@ -1063,12 +1656,35 @@ def post_tag_api_spec_revision( ) -> registry_models.ApiSpec: """Post-rpc interceptor for tag_api_spec_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_tag_api_spec_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_tag_api_spec_revision` interceptor runs + before the `post_tag_api_spec_revision_with_metadata` interceptor. """ return response + def post_tag_api_spec_revision_with_metadata( + self, + response: registry_models.ApiSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for tag_api_spec_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_tag_api_spec_revision_with_metadata` + interceptor in new development instead of the `post_tag_api_spec_revision` interceptor. + When both interceptors are used, this `post_tag_api_spec_revision_with_metadata` interceptor runs after the + `post_tag_api_spec_revision` interceptor. The (possibly modified) response returned by + `post_tag_api_spec_revision` will be passed to + `post_tag_api_spec_revision_with_metadata`. + """ + return response, metadata + def pre_update_api( self, request: registry_service.UpdateApiRequest, @@ -1086,12 +1702,35 @@ def pre_update_api( def post_update_api(self, response: registry_models.Api) -> registry_models.Api: """Post-rpc interceptor for update_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_update_api` interceptor runs + before the `post_update_api_with_metadata` interceptor. """ return response + def post_update_api_with_metadata( + self, + response: registry_models.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_update_api_with_metadata` + interceptor in new development instead of the `post_update_api` interceptor. + When both interceptors are used, this `post_update_api_with_metadata` interceptor runs after the + `post_update_api` interceptor. The (possibly modified) response returned by + `post_update_api` will be passed to + `post_update_api_with_metadata`. + """ + return response, metadata + def pre_update_api_deployment( self, request: registry_service.UpdateApiDeploymentRequest, @@ -1112,12 +1751,35 @@ def post_update_api_deployment( ) -> registry_models.ApiDeployment: """Post-rpc interceptor for update_api_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_update_api_deployment` interceptor runs + before the `post_update_api_deployment_with_metadata` interceptor. """ return response + def post_update_api_deployment_with_metadata( + self, + response: registry_models.ApiDeployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiDeployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_update_api_deployment_with_metadata` + interceptor in new development instead of the `post_update_api_deployment` interceptor. + When both interceptors are used, this `post_update_api_deployment_with_metadata` interceptor runs after the + `post_update_api_deployment` interceptor. The (possibly modified) response returned by + `post_update_api_deployment` will be passed to + `post_update_api_deployment_with_metadata`. + """ + return response, metadata + def pre_update_api_spec( self, request: registry_service.UpdateApiSpecRequest, @@ -1137,12 +1799,35 @@ def post_update_api_spec( ) -> registry_models.ApiSpec: """Post-rpc interceptor for update_api_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_update_api_spec` interceptor runs + before the `post_update_api_spec_with_metadata` interceptor. """ return response + def post_update_api_spec_with_metadata( + self, + response: registry_models.ApiSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_update_api_spec_with_metadata` + interceptor in new development instead of the `post_update_api_spec` interceptor. + When both interceptors are used, this `post_update_api_spec_with_metadata` interceptor runs after the + `post_update_api_spec` interceptor. The (possibly modified) response returned by + `post_update_api_spec` will be passed to + `post_update_api_spec_with_metadata`. + """ + return response, metadata + def pre_update_api_version( self, request: registry_service.UpdateApiVersionRequest, @@ -1163,12 +1848,35 @@ def post_update_api_version( ) -> registry_models.ApiVersion: """Post-rpc interceptor for update_api_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Registry server but before - it is returned to user code. + it is returned to user code. This `post_update_api_version` interceptor runs + before the `post_update_api_version_with_metadata` interceptor. """ return response + def post_update_api_version_with_metadata( + self, + response: registry_models.ApiVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[registry_models.ApiVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Registry server but before it is returned to user code. + + We recommend only using this `post_update_api_version_with_metadata` + interceptor in new development instead of the `post_update_api_version` interceptor. + When both interceptors are used, this `post_update_api_version_with_metadata` interceptor runs after the + `post_update_api_version` interceptor. The (possibly modified) response returned by + `post_update_api_version` will be passed to + `post_update_api_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1601,6 +2309,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1759,6 +2471,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1923,6 +2639,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2078,6 +2798,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2241,6 +2965,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_artifact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_artifact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2609,6 +3337,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_api_deployment_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_api_deployment_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2876,6 +3611,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_api_spec_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_api_spec_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3239,6 +3978,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3392,6 +4135,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3548,6 +4295,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3740,6 +4491,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_spec_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_spec_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3887,6 +4642,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4040,6 +4799,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_artifact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_artifact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4232,6 +4995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_artifact_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_artifact_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4378,6 +5145,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_deployment_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_api_deployment_revisions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4527,6 +5301,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_api_deployments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4669,6 +5447,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_apis(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_apis_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4816,6 +5598,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_spec_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_api_spec_revisions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4960,6 +5746,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_specs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_api_specs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5108,6 +5898,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_api_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5254,6 +6048,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_artifacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_artifacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5421,6 +6219,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_replace_artifact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_replace_artifact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5579,6 +6381,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rollback_api_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_api_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5747,6 +6553,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rollback_api_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_api_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5906,6 +6716,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_tag_api_deployment_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_tag_api_deployment_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6069,6 +6883,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_tag_api_spec_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_tag_api_spec_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6218,6 +7036,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6376,6 +7198,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6540,6 +7366,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6695,6 +7525,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json index 30dc0e5f152e..0df5d9cb9934 100644 --- a/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json +++ b/packages/google-cloud-apigee-registry/samples/generated_samples/snippet_metadata_google.cloud.apigeeregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apigee-registry", - "version": "0.6.14" + "version": "0.6.15" }, "snippets": [ { diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py index 084db6f89135..31ade30c1d78 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_provisioning.py @@ -75,6 +75,13 @@ ) from google.cloud.apigee_registry_v1.types import provisioning_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ProvisioningClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ProvisioningClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3049,10 +3099,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ProvisioningRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProvisioningRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning_service.CreateInstanceRequest.pb( provisioning_service.CreateInstanceRequest() ) @@ -3076,6 +3129,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -3087,6 +3141,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -3167,10 +3222,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ProvisioningRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProvisioningRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning_service.DeleteInstanceRequest.pb( provisioning_service.DeleteInstanceRequest() ) @@ -3194,6 +3252,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -3205,6 +3264,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request( @@ -3293,10 +3353,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProvisioningRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProvisioningRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning_service.GetInstanceRequest.pb( provisioning_service.GetInstanceRequest() ) @@ -3322,6 +3385,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning_service.Instance() + post_with_metadata.return_value = provisioning_service.Instance(), metadata client.get_instance( request, @@ -3333,6 +3397,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py index c41ab4b6268a..a70b2936f9d8 100644 --- a/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py +++ b/packages/google-cloud-apigee-registry/tests/unit/gapic/apigee_registry_v1/test_registry.py @@ -69,6 +69,13 @@ ) from google.cloud.apigee_registry_v1.types import registry_models, registry_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegistryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegistryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -23045,10 +23095,13 @@ def test_list_apis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_apis" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_list_apis_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_apis" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListApisRequest.pb( registry_service.ListApisRequest() ) @@ -23074,6 +23127,7 @@ def test_list_apis_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListApisResponse() + post_with_metadata.return_value = registry_service.ListApisResponse(), metadata client.list_apis( request, @@ -23085,6 +23139,7 @@ def test_list_apis_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_rest_bad_request(request_type=registry_service.GetApiRequest): @@ -23175,10 +23230,13 @@ def test_get_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_api" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetApiRequest.pb(registry_service.GetApiRequest()) transcode.return_value = { "method": "post", @@ -23200,6 +23258,7 @@ def test_get_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.Api() + post_with_metadata.return_value = registry_models.Api(), metadata client.get_api( request, @@ -23211,6 +23270,7 @@ def test_get_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_api_rest_bad_request(request_type=registry_service.CreateApiRequest): @@ -23380,10 +23440,13 @@ def test_create_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_create_api" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_create_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_create_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.CreateApiRequest.pb( registry_service.CreateApiRequest() ) @@ -23407,6 +23470,7 @@ def test_create_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.Api() + post_with_metadata.return_value = registry_models.Api(), metadata client.create_api( request, @@ -23418,6 +23482,7 @@ def test_create_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_rest_bad_request(request_type=registry_service.UpdateApiRequest): @@ -23587,10 +23652,13 @@ def test_update_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_update_api" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_update_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_update_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.UpdateApiRequest.pb( registry_service.UpdateApiRequest() ) @@ -23614,6 +23682,7 @@ def test_update_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.Api() + post_with_metadata.return_value = registry_models.Api(), metadata client.update_api( request, @@ -23625,6 +23694,7 @@ def test_update_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_rest_bad_request(request_type=registry_service.DeleteApiRequest): @@ -23812,10 +23882,13 @@ def test_list_api_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_api_versions" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_list_api_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_api_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListApiVersionsRequest.pb( registry_service.ListApiVersionsRequest() ) @@ -23841,6 +23914,10 @@ def test_list_api_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListApiVersionsResponse() + post_with_metadata.return_value = ( + registry_service.ListApiVersionsResponse(), + metadata, + ) client.list_api_versions( request, @@ -23852,6 +23929,7 @@ def test_list_api_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_version_rest_bad_request( @@ -23944,10 +24022,13 @@ def test_get_api_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_api_version" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_api_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_api_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetApiVersionRequest.pb( registry_service.GetApiVersionRequest() ) @@ -23971,6 +24052,7 @@ def test_get_api_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiVersion() + post_with_metadata.return_value = registry_models.ApiVersion(), metadata client.get_api_version( request, @@ -23982,6 +24064,7 @@ def test_get_api_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_api_version_rest_bad_request( @@ -24147,10 +24230,13 @@ def test_create_api_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_create_api_version" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_create_api_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_create_api_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.CreateApiVersionRequest.pb( registry_service.CreateApiVersionRequest() ) @@ -24174,6 +24260,7 @@ def test_create_api_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiVersion() + post_with_metadata.return_value = registry_models.ApiVersion(), metadata client.create_api_version( request, @@ -24185,6 +24272,7 @@ def test_create_api_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_version_rest_bad_request( @@ -24358,10 +24446,13 @@ def test_update_api_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_update_api_version" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_update_api_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_update_api_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.UpdateApiVersionRequest.pb( registry_service.UpdateApiVersionRequest() ) @@ -24385,6 +24476,7 @@ def test_update_api_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiVersion() + post_with_metadata.return_value = registry_models.ApiVersion(), metadata client.update_api_version( request, @@ -24396,6 +24488,7 @@ def test_update_api_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_version_rest_bad_request( @@ -24593,10 +24686,13 @@ def test_list_api_specs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_api_specs" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_list_api_specs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_api_specs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListApiSpecsRequest.pb( registry_service.ListApiSpecsRequest() ) @@ -24622,6 +24718,10 @@ def test_list_api_specs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListApiSpecsResponse() + post_with_metadata.return_value = ( + registry_service.ListApiSpecsResponse(), + metadata, + ) client.list_api_specs( request, @@ -24633,6 +24733,7 @@ def test_list_api_specs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_spec_rest_bad_request(request_type=registry_service.GetApiSpecRequest): @@ -24733,10 +24834,13 @@ def test_get_api_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_api_spec" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_api_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_api_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetApiSpecRequest.pb( registry_service.GetApiSpecRequest() ) @@ -24760,6 +24864,7 @@ def test_get_api_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiSpec() + post_with_metadata.return_value = registry_models.ApiSpec(), metadata client.get_api_spec( request, @@ -24771,6 +24876,7 @@ def test_get_api_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_spec_contents_rest_bad_request( @@ -24856,10 +24962,13 @@ def test_get_api_spec_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_api_spec_contents" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_api_spec_contents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_api_spec_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetApiSpecContentsRequest.pb( registry_service.GetApiSpecContentsRequest() ) @@ -24883,6 +24992,7 @@ def test_get_api_spec_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = httpbody_pb2.HttpBody() + post_with_metadata.return_value = httpbody_pb2.HttpBody(), metadata client.get_api_spec_contents( request, @@ -24894,6 +25004,7 @@ def test_get_api_spec_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_api_spec_rest_bad_request( @@ -25079,10 +25190,13 @@ def test_create_api_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_create_api_spec" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_create_api_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_create_api_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.CreateApiSpecRequest.pb( registry_service.CreateApiSpecRequest() ) @@ -25106,6 +25220,7 @@ def test_create_api_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiSpec() + post_with_metadata.return_value = registry_models.ApiSpec(), metadata client.create_api_spec( request, @@ -25117,6 +25232,7 @@ def test_create_api_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_spec_rest_bad_request( @@ -25306,10 +25422,13 @@ def test_update_api_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_update_api_spec" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_update_api_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_update_api_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.UpdateApiSpecRequest.pb( registry_service.UpdateApiSpecRequest() ) @@ -25333,6 +25452,7 @@ def test_update_api_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiSpec() + post_with_metadata.return_value = registry_models.ApiSpec(), metadata client.update_api_spec( request, @@ -25344,6 +25464,7 @@ def test_update_api_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_spec_rest_bad_request( @@ -25557,10 +25678,13 @@ def test_tag_api_spec_revision_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_tag_api_spec_revision" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_tag_api_spec_revision_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_tag_api_spec_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.TagApiSpecRevisionRequest.pb( registry_service.TagApiSpecRevisionRequest() ) @@ -25584,6 +25708,7 @@ def test_tag_api_spec_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiSpec() + post_with_metadata.return_value = registry_models.ApiSpec(), metadata client.tag_api_spec_revision( request, @@ -25595,6 +25720,7 @@ def test_tag_api_spec_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_api_spec_revisions_rest_bad_request( @@ -25681,10 +25807,13 @@ def test_list_api_spec_revisions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_api_spec_revisions" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_list_api_spec_revisions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_api_spec_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListApiSpecRevisionsRequest.pb( registry_service.ListApiSpecRevisionsRequest() ) @@ -25710,6 +25839,10 @@ def test_list_api_spec_revisions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListApiSpecRevisionsResponse() + post_with_metadata.return_value = ( + registry_service.ListApiSpecRevisionsResponse(), + metadata, + ) client.list_api_spec_revisions( request, @@ -25721,6 +25854,7 @@ def test_list_api_spec_revisions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_api_spec_rest_bad_request( @@ -25823,10 +25957,13 @@ def test_rollback_api_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_rollback_api_spec" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_rollback_api_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_rollback_api_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.RollbackApiSpecRequest.pb( registry_service.RollbackApiSpecRequest() ) @@ -25850,6 +25987,7 @@ def test_rollback_api_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiSpec() + post_with_metadata.return_value = registry_models.ApiSpec(), metadata client.rollback_api_spec( request, @@ -25861,6 +25999,7 @@ def test_rollback_api_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_spec_revision_rest_bad_request( @@ -25963,10 +26102,14 @@ def test_delete_api_spec_revision_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_delete_api_spec_revision" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, + "post_delete_api_spec_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_delete_api_spec_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.DeleteApiSpecRevisionRequest.pb( registry_service.DeleteApiSpecRevisionRequest() ) @@ -25990,6 +26133,7 @@ def test_delete_api_spec_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiSpec() + post_with_metadata.return_value = registry_models.ApiSpec(), metadata client.delete_api_spec_revision( request, @@ -26001,6 +26145,7 @@ def test_delete_api_spec_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_api_deployments_rest_bad_request( @@ -26083,10 +26228,13 @@ def test_list_api_deployments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_api_deployments" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_list_api_deployments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_api_deployments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListApiDeploymentsRequest.pb( registry_service.ListApiDeploymentsRequest() ) @@ -26112,6 +26260,10 @@ def test_list_api_deployments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListApiDeploymentsResponse() + post_with_metadata.return_value = ( + registry_service.ListApiDeploymentsResponse(), + metadata, + ) client.list_api_deployments( request, @@ -26123,6 +26275,7 @@ def test_list_api_deployments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_deployment_rest_bad_request( @@ -26225,10 +26378,13 @@ def test_get_api_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_api_deployment" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_api_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_api_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetApiDeploymentRequest.pb( registry_service.GetApiDeploymentRequest() ) @@ -26254,6 +26410,7 @@ def test_get_api_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiDeployment() + post_with_metadata.return_value = registry_models.ApiDeployment(), metadata client.get_api_deployment( request, @@ -26265,6 +26422,7 @@ def test_get_api_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_api_deployment_rest_bad_request( @@ -26448,10 +26606,13 @@ def test_create_api_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_create_api_deployment" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_create_api_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_create_api_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.CreateApiDeploymentRequest.pb( registry_service.CreateApiDeploymentRequest() ) @@ -26477,6 +26638,7 @@ def test_create_api_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiDeployment() + post_with_metadata.return_value = registry_models.ApiDeployment(), metadata client.create_api_deployment( request, @@ -26488,6 +26650,7 @@ def test_create_api_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_deployment_rest_bad_request( @@ -26679,10 +26842,13 @@ def test_update_api_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_update_api_deployment" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_update_api_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_update_api_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.UpdateApiDeploymentRequest.pb( registry_service.UpdateApiDeploymentRequest() ) @@ -26708,6 +26874,7 @@ def test_update_api_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiDeployment() + post_with_metadata.return_value = registry_models.ApiDeployment(), metadata client.update_api_deployment( request, @@ -26719,6 +26886,7 @@ def test_update_api_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_deployment_rest_bad_request( @@ -26932,10 +27100,14 @@ def test_tag_api_deployment_revision_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_tag_api_deployment_revision" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, + "post_tag_api_deployment_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_tag_api_deployment_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.TagApiDeploymentRevisionRequest.pb( registry_service.TagApiDeploymentRevisionRequest() ) @@ -26961,6 +27133,7 @@ def test_tag_api_deployment_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiDeployment() + post_with_metadata.return_value = registry_models.ApiDeployment(), metadata client.tag_api_deployment_revision( request, @@ -26972,6 +27145,7 @@ def test_tag_api_deployment_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_api_deployment_revisions_rest_bad_request( @@ -27060,10 +27234,14 @@ def test_list_api_deployment_revisions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_api_deployment_revisions" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, + "post_list_api_deployment_revisions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_api_deployment_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListApiDeploymentRevisionsRequest.pb( registry_service.ListApiDeploymentRevisionsRequest() ) @@ -27089,6 +27267,10 @@ def test_list_api_deployment_revisions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListApiDeploymentRevisionsResponse() + post_with_metadata.return_value = ( + registry_service.ListApiDeploymentRevisionsResponse(), + metadata, + ) client.list_api_deployment_revisions( request, @@ -27100,6 +27282,7 @@ def test_list_api_deployment_revisions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_api_deployment_rest_bad_request( @@ -27202,10 +27385,13 @@ def test_rollback_api_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_rollback_api_deployment" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_rollback_api_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_rollback_api_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.RollbackApiDeploymentRequest.pb( registry_service.RollbackApiDeploymentRequest() ) @@ -27231,6 +27417,7 @@ def test_rollback_api_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiDeployment() + post_with_metadata.return_value = registry_models.ApiDeployment(), metadata client.rollback_api_deployment( request, @@ -27242,6 +27429,7 @@ def test_rollback_api_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_deployment_revision_rest_bad_request( @@ -27344,10 +27532,14 @@ def test_delete_api_deployment_revision_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_delete_api_deployment_revision" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, + "post_delete_api_deployment_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_delete_api_deployment_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.DeleteApiDeploymentRevisionRequest.pb( registry_service.DeleteApiDeploymentRevisionRequest() ) @@ -27373,6 +27565,7 @@ def test_delete_api_deployment_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.ApiDeployment() + post_with_metadata.return_value = registry_models.ApiDeployment(), metadata client.delete_api_deployment_revision( request, @@ -27384,6 +27577,7 @@ def test_delete_api_deployment_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_artifacts_rest_bad_request( @@ -27466,10 +27660,13 @@ def test_list_artifacts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_list_artifacts" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_list_artifacts_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_list_artifacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ListArtifactsRequest.pb( registry_service.ListArtifactsRequest() ) @@ -27495,6 +27692,10 @@ def test_list_artifacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_service.ListArtifactsResponse() + post_with_metadata.return_value = ( + registry_service.ListArtifactsResponse(), + metadata, + ) client.list_artifacts( request, @@ -27506,6 +27707,7 @@ def test_list_artifacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_artifact_rest_bad_request( @@ -27596,10 +27798,13 @@ def test_get_artifact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_artifact" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_artifact_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_artifact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetArtifactRequest.pb( registry_service.GetArtifactRequest() ) @@ -27623,6 +27828,7 @@ def test_get_artifact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.Artifact() + post_with_metadata.return_value = registry_models.Artifact(), metadata client.get_artifact( request, @@ -27634,6 +27840,7 @@ def test_get_artifact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_artifact_contents_rest_bad_request( @@ -27715,10 +27922,13 @@ def test_get_artifact_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_get_artifact_contents" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_get_artifact_contents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_get_artifact_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.GetArtifactContentsRequest.pb( registry_service.GetArtifactContentsRequest() ) @@ -27742,6 +27952,7 @@ def test_get_artifact_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = httpbody_pb2.HttpBody() + post_with_metadata.return_value = httpbody_pb2.HttpBody(), metadata client.get_artifact_contents( request, @@ -27753,6 +27964,7 @@ def test_get_artifact_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_artifact_rest_bad_request( @@ -27919,10 +28131,13 @@ def test_create_artifact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_create_artifact" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_create_artifact_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_create_artifact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.CreateArtifactRequest.pb( registry_service.CreateArtifactRequest() ) @@ -27946,6 +28161,7 @@ def test_create_artifact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.Artifact() + post_with_metadata.return_value = registry_models.Artifact(), metadata client.create_artifact( request, @@ -27957,6 +28173,7 @@ def test_create_artifact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_replace_artifact_rest_bad_request( @@ -28127,10 +28344,13 @@ def test_replace_artifact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegistryRestInterceptor, "post_replace_artifact" ) as post, mock.patch.object( + transports.RegistryRestInterceptor, "post_replace_artifact_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegistryRestInterceptor, "pre_replace_artifact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = registry_service.ReplaceArtifactRequest.pb( registry_service.ReplaceArtifactRequest() ) @@ -28154,6 +28374,7 @@ def test_replace_artifact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = registry_models.Artifact() + post_with_metadata.return_value = registry_models.Artifact(), metadata client.replace_artifact( request, @@ -28165,6 +28386,7 @@ def test_replace_artifact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_artifact_rest_bad_request( diff --git a/packages/google-cloud-apihub/CHANGELOG.md b/packages/google-cloud-apihub/CHANGELOG.md index b0465aa80d28..d7a824c86e26 100644 --- a/packages/google-cloud-apihub/CHANGELOG.md +++ b/packages/google-cloud-apihub/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.2.3...google-cloud-apihub-v0.2.4) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [0.2.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apihub-v0.2.2...google-cloud-apihub-v0.2.3) (2024-12-12) diff --git a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py index 355df6b536f8..668eac0d72ce 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py index 355df6b536f8..668eac0d72ce 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.4" # {x-release-please-version} diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py index 9796f05470ca..02924683eff5 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -653,6 +655,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5215,16 +5244,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5270,16 +5303,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5436,16 +5473,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5491,16 +5532,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py index f1623721d63a..87af2246f375 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub/transports/rest.py @@ -350,12 +350,35 @@ def pre_create_api( def post_create_api(self, response: common_fields.Api) -> common_fields.Api: """Post-rpc interceptor for create_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_create_api` interceptor runs + before the `post_create_api_with_metadata` interceptor. """ return response + def post_create_api_with_metadata( + self, + response: common_fields.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_create_api_with_metadata` + interceptor in new development instead of the `post_create_api` interceptor. + When both interceptors are used, this `post_create_api_with_metadata` interceptor runs after the + `post_create_api` interceptor. The (possibly modified) response returned by + `post_create_api` will be passed to + `post_create_api_with_metadata`. + """ + return response, metadata + def pre_create_attribute( self, request: apihub_service.CreateAttributeRequest, @@ -375,12 +398,35 @@ def post_create_attribute( ) -> common_fields.Attribute: """Post-rpc interceptor for create_attribute - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_create_attribute` interceptor runs + before the `post_create_attribute_with_metadata` interceptor. """ return response + def post_create_attribute_with_metadata( + self, + response: common_fields.Attribute, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Attribute, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_create_attribute_with_metadata` + interceptor in new development instead of the `post_create_attribute` interceptor. + When both interceptors are used, this `post_create_attribute_with_metadata` interceptor runs after the + `post_create_attribute` interceptor. The (possibly modified) response returned by + `post_create_attribute` will be passed to + `post_create_attribute_with_metadata`. + """ + return response, metadata + def pre_create_deployment( self, request: apihub_service.CreateDeploymentRequest, @@ -400,12 +446,35 @@ def post_create_deployment( ) -> common_fields.Deployment: """Post-rpc interceptor for create_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_create_deployment` interceptor runs + before the `post_create_deployment_with_metadata` interceptor. """ return response + def post_create_deployment_with_metadata( + self, + response: common_fields.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_create_deployment_with_metadata` + interceptor in new development instead of the `post_create_deployment` interceptor. + When both interceptors are used, this `post_create_deployment_with_metadata` interceptor runs after the + `post_create_deployment` interceptor. The (possibly modified) response returned by + `post_create_deployment` will be passed to + `post_create_deployment_with_metadata`. + """ + return response, metadata + def pre_create_external_api( self, request: apihub_service.CreateExternalApiRequest, @@ -425,12 +494,35 @@ def post_create_external_api( ) -> common_fields.ExternalApi: """Post-rpc interceptor for create_external_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_external_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_create_external_api` interceptor runs + before the `post_create_external_api_with_metadata` interceptor. """ return response + def post_create_external_api_with_metadata( + self, + response: common_fields.ExternalApi, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.ExternalApi, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_external_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_create_external_api_with_metadata` + interceptor in new development instead of the `post_create_external_api` interceptor. + When both interceptors are used, this `post_create_external_api_with_metadata` interceptor runs after the + `post_create_external_api` interceptor. The (possibly modified) response returned by + `post_create_external_api` will be passed to + `post_create_external_api_with_metadata`. + """ + return response, metadata + def pre_create_spec( self, request: apihub_service.CreateSpecRequest, @@ -448,12 +540,35 @@ def pre_create_spec( def post_create_spec(self, response: common_fields.Spec) -> common_fields.Spec: """Post-rpc interceptor for create_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_create_spec` interceptor runs + before the `post_create_spec_with_metadata` interceptor. """ return response + def post_create_spec_with_metadata( + self, + response: common_fields.Spec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Spec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_create_spec_with_metadata` + interceptor in new development instead of the `post_create_spec` interceptor. + When both interceptors are used, this `post_create_spec_with_metadata` interceptor runs after the + `post_create_spec` interceptor. The (possibly modified) response returned by + `post_create_spec` will be passed to + `post_create_spec_with_metadata`. + """ + return response, metadata + def pre_create_version( self, request: apihub_service.CreateVersionRequest, @@ -473,12 +588,35 @@ def post_create_version( ) -> common_fields.Version: """Post-rpc interceptor for create_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_create_version` interceptor runs + before the `post_create_version_with_metadata` interceptor. """ return response + def post_create_version_with_metadata( + self, + response: common_fields.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_create_version_with_metadata` + interceptor in new development instead of the `post_create_version` interceptor. + When both interceptors are used, this `post_create_version_with_metadata` interceptor runs after the + `post_create_version` interceptor. The (possibly modified) response returned by + `post_create_version` will be passed to + `post_create_version_with_metadata`. + """ + return response, metadata + def pre_delete_api( self, request: apihub_service.DeleteApiRequest, @@ -578,12 +716,35 @@ def pre_get_api( def post_get_api(self, response: common_fields.Api) -> common_fields.Api: """Post-rpc interceptor for get_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_api` interceptor runs + before the `post_get_api_with_metadata` interceptor. """ return response + def post_get_api_with_metadata( + self, + response: common_fields.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_api_with_metadata` + interceptor in new development instead of the `post_get_api` interceptor. + When both interceptors are used, this `post_get_api_with_metadata` interceptor runs after the + `post_get_api` interceptor. The (possibly modified) response returned by + `post_get_api` will be passed to + `post_get_api_with_metadata`. + """ + return response, metadata + def pre_get_api_operation( self, request: apihub_service.GetApiOperationRequest, @@ -603,12 +764,35 @@ def post_get_api_operation( ) -> common_fields.ApiOperation: """Post-rpc interceptor for get_api_operation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_operation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_api_operation` interceptor runs + before the `post_get_api_operation_with_metadata` interceptor. """ return response + def post_get_api_operation_with_metadata( + self, + response: common_fields.ApiOperation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.ApiOperation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_operation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_api_operation_with_metadata` + interceptor in new development instead of the `post_get_api_operation` interceptor. + When both interceptors are used, this `post_get_api_operation_with_metadata` interceptor runs after the + `post_get_api_operation` interceptor. The (possibly modified) response returned by + `post_get_api_operation` will be passed to + `post_get_api_operation_with_metadata`. + """ + return response, metadata + def pre_get_attribute( self, request: apihub_service.GetAttributeRequest, @@ -628,12 +812,35 @@ def post_get_attribute( ) -> common_fields.Attribute: """Post-rpc interceptor for get_attribute - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_attribute` interceptor runs + before the `post_get_attribute_with_metadata` interceptor. """ return response + def post_get_attribute_with_metadata( + self, + response: common_fields.Attribute, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Attribute, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_attribute_with_metadata` + interceptor in new development instead of the `post_get_attribute` interceptor. + When both interceptors are used, this `post_get_attribute_with_metadata` interceptor runs after the + `post_get_attribute` interceptor. The (possibly modified) response returned by + `post_get_attribute` will be passed to + `post_get_attribute_with_metadata`. + """ + return response, metadata + def pre_get_definition( self, request: apihub_service.GetDefinitionRequest, @@ -653,12 +860,35 @@ def post_get_definition( ) -> common_fields.Definition: """Post-rpc interceptor for get_definition - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_definition_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_definition` interceptor runs + before the `post_get_definition_with_metadata` interceptor. """ return response + def post_get_definition_with_metadata( + self, + response: common_fields.Definition, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Definition, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_definition + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_definition_with_metadata` + interceptor in new development instead of the `post_get_definition` interceptor. + When both interceptors are used, this `post_get_definition_with_metadata` interceptor runs after the + `post_get_definition` interceptor. The (possibly modified) response returned by + `post_get_definition` will be passed to + `post_get_definition_with_metadata`. + """ + return response, metadata + def pre_get_deployment( self, request: apihub_service.GetDeploymentRequest, @@ -678,12 +908,35 @@ def post_get_deployment( ) -> common_fields.Deployment: """Post-rpc interceptor for get_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_deployment` interceptor runs + before the `post_get_deployment_with_metadata` interceptor. """ return response + def post_get_deployment_with_metadata( + self, + response: common_fields.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_deployment_with_metadata` + interceptor in new development instead of the `post_get_deployment` interceptor. + When both interceptors are used, this `post_get_deployment_with_metadata` interceptor runs after the + `post_get_deployment` interceptor. The (possibly modified) response returned by + `post_get_deployment` will be passed to + `post_get_deployment_with_metadata`. + """ + return response, metadata + def pre_get_external_api( self, request: apihub_service.GetExternalApiRequest, @@ -703,12 +956,35 @@ def post_get_external_api( ) -> common_fields.ExternalApi: """Post-rpc interceptor for get_external_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_external_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_external_api` interceptor runs + before the `post_get_external_api_with_metadata` interceptor. """ return response + def post_get_external_api_with_metadata( + self, + response: common_fields.ExternalApi, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.ExternalApi, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_external_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_external_api_with_metadata` + interceptor in new development instead of the `post_get_external_api` interceptor. + When both interceptors are used, this `post_get_external_api_with_metadata` interceptor runs after the + `post_get_external_api` interceptor. The (possibly modified) response returned by + `post_get_external_api` will be passed to + `post_get_external_api_with_metadata`. + """ + return response, metadata + def pre_get_spec( self, request: apihub_service.GetSpecRequest, @@ -724,12 +1000,35 @@ def pre_get_spec( def post_get_spec(self, response: common_fields.Spec) -> common_fields.Spec: """Post-rpc interceptor for get_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_spec` interceptor runs + before the `post_get_spec_with_metadata` interceptor. """ return response + def post_get_spec_with_metadata( + self, + response: common_fields.Spec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Spec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_spec_with_metadata` + interceptor in new development instead of the `post_get_spec` interceptor. + When both interceptors are used, this `post_get_spec_with_metadata` interceptor runs after the + `post_get_spec` interceptor. The (possibly modified) response returned by + `post_get_spec` will be passed to + `post_get_spec_with_metadata`. + """ + return response, metadata + def pre_get_spec_contents( self, request: apihub_service.GetSpecContentsRequest, @@ -749,12 +1048,35 @@ def post_get_spec_contents( ) -> common_fields.SpecContents: """Post-rpc interceptor for get_spec_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_spec_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_spec_contents` interceptor runs + before the `post_get_spec_contents_with_metadata` interceptor. """ return response + def post_get_spec_contents_with_metadata( + self, + response: common_fields.SpecContents, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.SpecContents, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_spec_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_spec_contents_with_metadata` + interceptor in new development instead of the `post_get_spec_contents` interceptor. + When both interceptors are used, this `post_get_spec_contents_with_metadata` interceptor runs after the + `post_get_spec_contents` interceptor. The (possibly modified) response returned by + `post_get_spec_contents` will be passed to + `post_get_spec_contents_with_metadata`. + """ + return response, metadata + def pre_get_version( self, request: apihub_service.GetVersionRequest, @@ -774,12 +1096,35 @@ def post_get_version( ) -> common_fields.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: common_fields.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_list_api_operations( self, request: apihub_service.ListApiOperationsRequest, @@ -799,12 +1144,38 @@ def post_list_api_operations( ) -> apihub_service.ListApiOperationsResponse: """Post-rpc interceptor for list_api_operations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_api_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_api_operations` interceptor runs + before the `post_list_api_operations_with_metadata` interceptor. """ return response + def post_list_api_operations_with_metadata( + self, + response: apihub_service.ListApiOperationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListApiOperationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_api_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_api_operations_with_metadata` + interceptor in new development instead of the `post_list_api_operations` interceptor. + When both interceptors are used, this `post_list_api_operations_with_metadata` interceptor runs after the + `post_list_api_operations` interceptor. The (possibly modified) response returned by + `post_list_api_operations` will be passed to + `post_list_api_operations_with_metadata`. + """ + return response, metadata + def pre_list_apis( self, request: apihub_service.ListApisRequest, @@ -822,12 +1193,37 @@ def post_list_apis( ) -> apihub_service.ListApisResponse: """Post-rpc interceptor for list_apis - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_apis_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_apis` interceptor runs + before the `post_list_apis_with_metadata` interceptor. """ return response + def post_list_apis_with_metadata( + self, + response: apihub_service.ListApisResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListApisResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_apis + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_apis_with_metadata` + interceptor in new development instead of the `post_list_apis` interceptor. + When both interceptors are used, this `post_list_apis_with_metadata` interceptor runs after the + `post_list_apis` interceptor. The (possibly modified) response returned by + `post_list_apis` will be passed to + `post_list_apis_with_metadata`. + """ + return response, metadata + def pre_list_attributes( self, request: apihub_service.ListAttributesRequest, @@ -847,12 +1243,37 @@ def post_list_attributes( ) -> apihub_service.ListAttributesResponse: """Post-rpc interceptor for list_attributes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_attributes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_attributes` interceptor runs + before the `post_list_attributes_with_metadata` interceptor. """ return response + def post_list_attributes_with_metadata( + self, + response: apihub_service.ListAttributesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListAttributesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_attributes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_attributes_with_metadata` + interceptor in new development instead of the `post_list_attributes` interceptor. + When both interceptors are used, this `post_list_attributes_with_metadata` interceptor runs after the + `post_list_attributes` interceptor. The (possibly modified) response returned by + `post_list_attributes` will be passed to + `post_list_attributes_with_metadata`. + """ + return response, metadata + def pre_list_deployments( self, request: apihub_service.ListDeploymentsRequest, @@ -872,12 +1293,37 @@ def post_list_deployments( ) -> apihub_service.ListDeploymentsResponse: """Post-rpc interceptor for list_deployments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_deployments` interceptor runs + before the `post_list_deployments_with_metadata` interceptor. """ return response + def post_list_deployments_with_metadata( + self, + response: apihub_service.ListDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_deployments_with_metadata` + interceptor in new development instead of the `post_list_deployments` interceptor. + When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the + `post_list_deployments` interceptor. The (possibly modified) response returned by + `post_list_deployments` will be passed to + `post_list_deployments_with_metadata`. + """ + return response, metadata + def pre_list_external_apis( self, request: apihub_service.ListExternalApisRequest, @@ -897,12 +1343,37 @@ def post_list_external_apis( ) -> apihub_service.ListExternalApisResponse: """Post-rpc interceptor for list_external_apis - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_external_apis_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_external_apis` interceptor runs + before the `post_list_external_apis_with_metadata` interceptor. """ return response + def post_list_external_apis_with_metadata( + self, + response: apihub_service.ListExternalApisResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListExternalApisResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_external_apis + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_external_apis_with_metadata` + interceptor in new development instead of the `post_list_external_apis` interceptor. + When both interceptors are used, this `post_list_external_apis_with_metadata` interceptor runs after the + `post_list_external_apis` interceptor. The (possibly modified) response returned by + `post_list_external_apis` will be passed to + `post_list_external_apis_with_metadata`. + """ + return response, metadata + def pre_list_specs( self, request: apihub_service.ListSpecsRequest, @@ -922,12 +1393,37 @@ def post_list_specs( ) -> apihub_service.ListSpecsResponse: """Post-rpc interceptor for list_specs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_specs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_specs` interceptor runs + before the `post_list_specs_with_metadata` interceptor. """ return response + def post_list_specs_with_metadata( + self, + response: apihub_service.ListSpecsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListSpecsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_specs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_specs_with_metadata` + interceptor in new development instead of the `post_list_specs` interceptor. + When both interceptors are used, this `post_list_specs_with_metadata` interceptor runs after the + `post_list_specs` interceptor. The (possibly modified) response returned by + `post_list_specs` will be passed to + `post_list_specs_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: apihub_service.ListVersionsRequest, @@ -947,12 +1443,37 @@ def post_list_versions( ) -> apihub_service.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: apihub_service.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_search_resources( self, request: apihub_service.SearchResourcesRequest, @@ -972,12 +1493,37 @@ def post_search_resources( ) -> apihub_service.SearchResourcesResponse: """Post-rpc interceptor for search_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_search_resources` interceptor runs + before the `post_search_resources_with_metadata` interceptor. """ return response + def post_search_resources_with_metadata( + self, + response: apihub_service.SearchResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.SearchResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for search_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_search_resources_with_metadata` + interceptor in new development instead of the `post_search_resources` interceptor. + When both interceptors are used, this `post_search_resources_with_metadata` interceptor runs after the + `post_search_resources` interceptor. The (possibly modified) response returned by + `post_search_resources` will be passed to + `post_search_resources_with_metadata`. + """ + return response, metadata + def pre_update_api( self, request: apihub_service.UpdateApiRequest, @@ -995,12 +1541,35 @@ def pre_update_api( def post_update_api(self, response: common_fields.Api) -> common_fields.Api: """Post-rpc interceptor for update_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_update_api` interceptor runs + before the `post_update_api_with_metadata` interceptor. """ return response + def post_update_api_with_metadata( + self, + response: common_fields.Api, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Api, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_update_api_with_metadata` + interceptor in new development instead of the `post_update_api` interceptor. + When both interceptors are used, this `post_update_api_with_metadata` interceptor runs after the + `post_update_api` interceptor. The (possibly modified) response returned by + `post_update_api` will be passed to + `post_update_api_with_metadata`. + """ + return response, metadata + def pre_update_attribute( self, request: apihub_service.UpdateAttributeRequest, @@ -1020,12 +1589,35 @@ def post_update_attribute( ) -> common_fields.Attribute: """Post-rpc interceptor for update_attribute - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_attribute_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_update_attribute` interceptor runs + before the `post_update_attribute_with_metadata` interceptor. """ return response + def post_update_attribute_with_metadata( + self, + response: common_fields.Attribute, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Attribute, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_attribute + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_update_attribute_with_metadata` + interceptor in new development instead of the `post_update_attribute` interceptor. + When both interceptors are used, this `post_update_attribute_with_metadata` interceptor runs after the + `post_update_attribute` interceptor. The (possibly modified) response returned by + `post_update_attribute` will be passed to + `post_update_attribute_with_metadata`. + """ + return response, metadata + def pre_update_deployment( self, request: apihub_service.UpdateDeploymentRequest, @@ -1045,12 +1637,35 @@ def post_update_deployment( ) -> common_fields.Deployment: """Post-rpc interceptor for update_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_update_deployment` interceptor runs + before the `post_update_deployment_with_metadata` interceptor. """ return response + def post_update_deployment_with_metadata( + self, + response: common_fields.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_update_deployment_with_metadata` + interceptor in new development instead of the `post_update_deployment` interceptor. + When both interceptors are used, this `post_update_deployment_with_metadata` interceptor runs after the + `post_update_deployment` interceptor. The (possibly modified) response returned by + `post_update_deployment` will be passed to + `post_update_deployment_with_metadata`. + """ + return response, metadata + def pre_update_external_api( self, request: apihub_service.UpdateExternalApiRequest, @@ -1070,12 +1685,35 @@ def post_update_external_api( ) -> common_fields.ExternalApi: """Post-rpc interceptor for update_external_api - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_external_api_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_update_external_api` interceptor runs + before the `post_update_external_api_with_metadata` interceptor. """ return response + def post_update_external_api_with_metadata( + self, + response: common_fields.ExternalApi, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.ExternalApi, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_external_api + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_update_external_api_with_metadata` + interceptor in new development instead of the `post_update_external_api` interceptor. + When both interceptors are used, this `post_update_external_api_with_metadata` interceptor runs after the + `post_update_external_api` interceptor. The (possibly modified) response returned by + `post_update_external_api` will be passed to + `post_update_external_api_with_metadata`. + """ + return response, metadata + def pre_update_spec( self, request: apihub_service.UpdateSpecRequest, @@ -1093,12 +1731,35 @@ def pre_update_spec( def post_update_spec(self, response: common_fields.Spec) -> common_fields.Spec: """Post-rpc interceptor for update_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_update_spec` interceptor runs + before the `post_update_spec_with_metadata` interceptor. """ return response + def post_update_spec_with_metadata( + self, + response: common_fields.Spec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Spec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_update_spec_with_metadata` + interceptor in new development instead of the `post_update_spec` interceptor. + When both interceptors are used, this `post_update_spec_with_metadata` interceptor runs after the + `post_update_spec` interceptor. The (possibly modified) response returned by + `post_update_spec` will be passed to + `post_update_spec_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: apihub_service.UpdateVersionRequest, @@ -1118,12 +1779,35 @@ def post_update_version( ) -> common_fields.Version: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHub server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: common_fields.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHub server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1481,6 +2165,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1642,6 +2330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_attribute_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1805,6 +2497,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1965,6 +2661,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_external_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_external_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2118,6 +2818,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2271,6 +2975,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3077,6 +3785,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3234,6 +3946,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_operation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_operation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3385,6 +4101,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attribute_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3537,6 +4257,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_definition(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_definition_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3688,6 +4412,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3838,6 +4566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_external_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_external_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3982,6 +4714,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4130,6 +4866,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_spec_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_spec_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4275,6 +5015,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4426,6 +5170,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_api_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_api_operations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4571,6 +5319,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_apis(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_apis_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4718,6 +5470,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_attributes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_attributes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4871,6 +5627,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deployments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5024,6 +5784,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_external_apis(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_external_apis_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5168,6 +5932,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_specs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_specs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5317,6 +6085,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5476,6 +6248,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5625,6 +6401,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5786,6 +6566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_attribute(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_attribute_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5949,6 +6733,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6109,6 +6897,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_external_api(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_external_api_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6262,6 +7054,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6415,6 +7211,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py index ed70b2364c1d..013bba7770b3 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -511,6 +513,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1384,16 +1413,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1439,16 +1472,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1605,16 +1642,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1660,16 +1701,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py index d283dbab1a25..17714ce6df09 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_dependencies/transports/rest.py @@ -132,12 +132,35 @@ def post_create_dependency( ) -> common_fields.Dependency: """Post-rpc interceptor for create_dependency - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_dependency_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubDependencies server but before - it is returned to user code. + it is returned to user code. This `post_create_dependency` interceptor runs + before the `post_create_dependency_with_metadata` interceptor. """ return response + def post_create_dependency_with_metadata( + self, + response: common_fields.Dependency, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Dependency, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_dependency + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubDependencies server but before it is returned to user code. + + We recommend only using this `post_create_dependency_with_metadata` + interceptor in new development instead of the `post_create_dependency` interceptor. + When both interceptors are used, this `post_create_dependency_with_metadata` interceptor runs after the + `post_create_dependency` interceptor. The (possibly modified) response returned by + `post_create_dependency` will be passed to + `post_create_dependency_with_metadata`. + """ + return response, metadata + def pre_delete_dependency( self, request: apihub_service.DeleteDependencyRequest, @@ -171,12 +194,35 @@ def post_get_dependency( ) -> common_fields.Dependency: """Post-rpc interceptor for get_dependency - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_dependency_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubDependencies server but before - it is returned to user code. + it is returned to user code. This `post_get_dependency` interceptor runs + before the `post_get_dependency_with_metadata` interceptor. """ return response + def post_get_dependency_with_metadata( + self, + response: common_fields.Dependency, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Dependency, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_dependency + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubDependencies server but before it is returned to user code. + + We recommend only using this `post_get_dependency_with_metadata` + interceptor in new development instead of the `post_get_dependency` interceptor. + When both interceptors are used, this `post_get_dependency_with_metadata` interceptor runs after the + `post_get_dependency` interceptor. The (possibly modified) response returned by + `post_get_dependency` will be passed to + `post_get_dependency_with_metadata`. + """ + return response, metadata + def pre_list_dependencies( self, request: apihub_service.ListDependenciesRequest, @@ -196,12 +242,37 @@ def post_list_dependencies( ) -> apihub_service.ListDependenciesResponse: """Post-rpc interceptor for list_dependencies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_dependencies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubDependencies server but before - it is returned to user code. + it is returned to user code. This `post_list_dependencies` interceptor runs + before the `post_list_dependencies_with_metadata` interceptor. """ return response + def post_list_dependencies_with_metadata( + self, + response: apihub_service.ListDependenciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apihub_service.ListDependenciesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_dependencies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubDependencies server but before it is returned to user code. + + We recommend only using this `post_list_dependencies_with_metadata` + interceptor in new development instead of the `post_list_dependencies` interceptor. + When both interceptors are used, this `post_list_dependencies_with_metadata` interceptor runs after the + `post_list_dependencies` interceptor. The (possibly modified) response returned by + `post_list_dependencies` will be passed to + `post_list_dependencies_with_metadata`. + """ + return response, metadata + def pre_update_dependency( self, request: apihub_service.UpdateDependencyRequest, @@ -221,12 +292,35 @@ def post_update_dependency( ) -> common_fields.Dependency: """Post-rpc interceptor for update_dependency - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_dependency_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubDependencies server but before - it is returned to user code. + it is returned to user code. This `post_update_dependency` interceptor runs + before the `post_update_dependency_with_metadata` interceptor. """ return response + def post_update_dependency_with_metadata( + self, + response: common_fields.Dependency, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.Dependency, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_dependency + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubDependencies server but before it is returned to user code. + + We recommend only using this `post_update_dependency_with_metadata` + interceptor in new development instead of the `post_update_dependency` interceptor. + When both interceptors are used, this `post_update_dependency_with_metadata` interceptor runs after the + `post_update_dependency` interceptor. The (possibly modified) response returned by + `post_update_dependency` will be passed to + `post_update_dependency_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -595,6 +689,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_dependency(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dependency_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -854,6 +952,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_dependency(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dependency_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1002,6 +1104,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_dependencies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_dependencies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1161,6 +1267,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_dependency(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dependency_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py index 4d6b0b55a5af..30aa8d7e5514 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -502,6 +504,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1066,16 +1095,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1121,16 +1154,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1287,16 +1324,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1342,16 +1383,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py index 3e6fcd8fed38..8e2ff4a9d4fb 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/api_hub_plugin/transports/rest.py @@ -119,12 +119,35 @@ def post_disable_plugin( ) -> plugin_service.Plugin: """Post-rpc interceptor for disable_plugin - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_plugin_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubPlugin server but before - it is returned to user code. + it is returned to user code. This `post_disable_plugin` interceptor runs + before the `post_disable_plugin_with_metadata` interceptor. """ return response + def post_disable_plugin_with_metadata( + self, + response: plugin_service.Plugin, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[plugin_service.Plugin, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_plugin + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubPlugin server but before it is returned to user code. + + We recommend only using this `post_disable_plugin_with_metadata` + interceptor in new development instead of the `post_disable_plugin` interceptor. + When both interceptors are used, this `post_disable_plugin_with_metadata` interceptor runs after the + `post_disable_plugin` interceptor. The (possibly modified) response returned by + `post_disable_plugin` will be passed to + `post_disable_plugin_with_metadata`. + """ + return response, metadata + def pre_enable_plugin( self, request: plugin_service.EnablePluginRequest, @@ -144,12 +167,35 @@ def post_enable_plugin( ) -> plugin_service.Plugin: """Post-rpc interceptor for enable_plugin - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_plugin_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubPlugin server but before - it is returned to user code. + it is returned to user code. This `post_enable_plugin` interceptor runs + before the `post_enable_plugin_with_metadata` interceptor. """ return response + def post_enable_plugin_with_metadata( + self, + response: plugin_service.Plugin, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[plugin_service.Plugin, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_plugin + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubPlugin server but before it is returned to user code. + + We recommend only using this `post_enable_plugin_with_metadata` + interceptor in new development instead of the `post_enable_plugin` interceptor. + When both interceptors are used, this `post_enable_plugin_with_metadata` interceptor runs after the + `post_enable_plugin` interceptor. The (possibly modified) response returned by + `post_enable_plugin` will be passed to + `post_enable_plugin_with_metadata`. + """ + return response, metadata + def pre_get_plugin( self, request: plugin_service.GetPluginRequest, @@ -167,12 +213,35 @@ def pre_get_plugin( def post_get_plugin(self, response: plugin_service.Plugin) -> plugin_service.Plugin: """Post-rpc interceptor for get_plugin - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_plugin_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ApiHubPlugin server but before - it is returned to user code. + it is returned to user code. This `post_get_plugin` interceptor runs + before the `post_get_plugin_with_metadata` interceptor. """ return response + def post_get_plugin_with_metadata( + self, + response: plugin_service.Plugin, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[plugin_service.Plugin, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_plugin + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ApiHubPlugin server but before it is returned to user code. + + We recommend only using this `post_get_plugin_with_metadata` + interceptor in new development instead of the `post_get_plugin` interceptor. + When both interceptors are used, this `post_get_plugin_with_metadata` interceptor runs after the + `post_get_plugin` interceptor. The (possibly modified) response returned by + `post_get_plugin` will be passed to + `post_get_plugin_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -531,6 +600,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_plugin(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_plugin_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -683,6 +756,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_plugin(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_plugin_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -829,6 +906,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_plugin(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_plugin_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py index 285335177055..a2b1763f1ef7 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1187,16 +1216,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1242,16 +1275,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1408,16 +1445,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1463,16 +1504,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py index 437f5503445b..c64b62e8d03f 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/host_project_registration_service/transports/rest.py @@ -120,12 +120,38 @@ def post_create_host_project_registration( ) -> host_project_registration_service.HostProjectRegistration: """Post-rpc interceptor for create_host_project_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_host_project_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HostProjectRegistrationService server but before - it is returned to user code. + it is returned to user code. This `post_create_host_project_registration` interceptor runs + before the `post_create_host_project_registration_with_metadata` interceptor. """ return response + def post_create_host_project_registration_with_metadata( + self, + response: host_project_registration_service.HostProjectRegistration, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + host_project_registration_service.HostProjectRegistration, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_host_project_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HostProjectRegistrationService server but before it is returned to user code. + + We recommend only using this `post_create_host_project_registration_with_metadata` + interceptor in new development instead of the `post_create_host_project_registration` interceptor. + When both interceptors are used, this `post_create_host_project_registration_with_metadata` interceptor runs after the + `post_create_host_project_registration` interceptor. The (possibly modified) response returned by + `post_create_host_project_registration` will be passed to + `post_create_host_project_registration_with_metadata`. + """ + return response, metadata + def pre_get_host_project_registration( self, request: host_project_registration_service.GetHostProjectRegistrationRequest, @@ -146,12 +172,38 @@ def post_get_host_project_registration( ) -> host_project_registration_service.HostProjectRegistration: """Post-rpc interceptor for get_host_project_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_host_project_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HostProjectRegistrationService server but before - it is returned to user code. + it is returned to user code. This `post_get_host_project_registration` interceptor runs + before the `post_get_host_project_registration_with_metadata` interceptor. """ return response + def post_get_host_project_registration_with_metadata( + self, + response: host_project_registration_service.HostProjectRegistration, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + host_project_registration_service.HostProjectRegistration, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_host_project_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HostProjectRegistrationService server but before it is returned to user code. + + We recommend only using this `post_get_host_project_registration_with_metadata` + interceptor in new development instead of the `post_get_host_project_registration` interceptor. + When both interceptors are used, this `post_get_host_project_registration_with_metadata` interceptor runs after the + `post_get_host_project_registration` interceptor. The (possibly modified) response returned by + `post_get_host_project_registration` will be passed to + `post_get_host_project_registration_with_metadata`. + """ + return response, metadata + def pre_list_host_project_registrations( self, request: host_project_registration_service.ListHostProjectRegistrationsRequest, @@ -173,12 +225,38 @@ def post_list_host_project_registrations( ) -> host_project_registration_service.ListHostProjectRegistrationsResponse: """Post-rpc interceptor for list_host_project_registrations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_host_project_registrations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HostProjectRegistrationService server but before - it is returned to user code. + it is returned to user code. This `post_list_host_project_registrations` interceptor runs + before the `post_list_host_project_registrations_with_metadata` interceptor. """ return response + def post_list_host_project_registrations_with_metadata( + self, + response: host_project_registration_service.ListHostProjectRegistrationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + host_project_registration_service.ListHostProjectRegistrationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_host_project_registrations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HostProjectRegistrationService server but before it is returned to user code. + + We recommend only using this `post_list_host_project_registrations_with_metadata` + interceptor in new development instead of the `post_list_host_project_registrations` interceptor. + When both interceptors are used, this `post_list_host_project_registrations_with_metadata` interceptor runs after the + `post_list_host_project_registrations` interceptor. The (possibly modified) response returned by + `post_list_host_project_registrations` will be passed to + `post_list_host_project_registrations_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -558,6 +636,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_host_project_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_host_project_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -718,6 +803,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_host_project_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_host_project_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -875,6 +967,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_host_project_registrations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_host_project_registrations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py index 0754a3fee0d0..ea50a3d4a2a3 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -509,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1175,16 +1204,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1230,16 +1263,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1396,16 +1433,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1451,16 +1492,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py index 4f2819108c83..402e2bf2f4e7 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/linting_service/transports/rest.py @@ -124,12 +124,35 @@ def post_get_style_guide( ) -> linting_service.StyleGuide: """Post-rpc interceptor for get_style_guide - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_style_guide_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LintingService server but before - it is returned to user code. + it is returned to user code. This `post_get_style_guide` interceptor runs + before the `post_get_style_guide_with_metadata` interceptor. """ return response + def post_get_style_guide_with_metadata( + self, + response: linting_service.StyleGuide, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[linting_service.StyleGuide, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_style_guide + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LintingService server but before it is returned to user code. + + We recommend only using this `post_get_style_guide_with_metadata` + interceptor in new development instead of the `post_get_style_guide` interceptor. + When both interceptors are used, this `post_get_style_guide_with_metadata` interceptor runs after the + `post_get_style_guide` interceptor. The (possibly modified) response returned by + `post_get_style_guide` will be passed to + `post_get_style_guide_with_metadata`. + """ + return response, metadata + def pre_get_style_guide_contents( self, request: linting_service.GetStyleGuideContentsRequest, @@ -150,12 +173,37 @@ def post_get_style_guide_contents( ) -> linting_service.StyleGuideContents: """Post-rpc interceptor for get_style_guide_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_style_guide_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LintingService server but before - it is returned to user code. + it is returned to user code. This `post_get_style_guide_contents` interceptor runs + before the `post_get_style_guide_contents_with_metadata` interceptor. """ return response + def post_get_style_guide_contents_with_metadata( + self, + response: linting_service.StyleGuideContents, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + linting_service.StyleGuideContents, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_style_guide_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LintingService server but before it is returned to user code. + + We recommend only using this `post_get_style_guide_contents_with_metadata` + interceptor in new development instead of the `post_get_style_guide_contents` interceptor. + When both interceptors are used, this `post_get_style_guide_contents_with_metadata` interceptor runs after the + `post_get_style_guide_contents` interceptor. The (possibly modified) response returned by + `post_get_style_guide_contents` will be passed to + `post_get_style_guide_contents_with_metadata`. + """ + return response, metadata + def pre_lint_spec( self, request: linting_service.LintSpecRequest, @@ -189,12 +237,35 @@ def post_update_style_guide( ) -> linting_service.StyleGuide: """Post-rpc interceptor for update_style_guide - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_style_guide_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LintingService server but before - it is returned to user code. + it is returned to user code. This `post_update_style_guide` interceptor runs + before the `post_update_style_guide_with_metadata` interceptor. """ return response + def post_update_style_guide_with_metadata( + self, + response: linting_service.StyleGuide, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[linting_service.StyleGuide, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_style_guide + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LintingService server but before it is returned to user code. + + We recommend only using this `post_update_style_guide_with_metadata` + interceptor in new development instead of the `post_update_style_guide` interceptor. + When both interceptors are used, this `post_update_style_guide_with_metadata` interceptor runs after the + `post_update_style_guide` interceptor. The (possibly modified) response returned by + `post_update_style_guide` will be passed to + `post_update_style_guide_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -549,6 +620,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_style_guide(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_style_guide_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -694,6 +769,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_style_guide_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_style_guide_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -966,6 +1045,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_style_guide(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_style_guide_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py index 0782e35b0559..b8332bf9edd9 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -485,6 +487,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1110,16 +1139,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1165,16 +1198,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1331,16 +1368,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1386,16 +1427,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py index d10fa7b8c6a6..d1c285a2421d 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/provisioning/transports/rest.py @@ -120,12 +120,35 @@ def post_create_api_hub_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_api_hub_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_api_hub_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Provisioning server but before - it is returned to user code. + it is returned to user code. This `post_create_api_hub_instance` interceptor runs + before the `post_create_api_hub_instance_with_metadata` interceptor. """ return response + def post_create_api_hub_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_api_hub_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Provisioning server but before it is returned to user code. + + We recommend only using this `post_create_api_hub_instance_with_metadata` + interceptor in new development instead of the `post_create_api_hub_instance` interceptor. + When both interceptors are used, this `post_create_api_hub_instance_with_metadata` interceptor runs after the + `post_create_api_hub_instance` interceptor. The (possibly modified) response returned by + `post_create_api_hub_instance` will be passed to + `post_create_api_hub_instance_with_metadata`. + """ + return response, metadata + def pre_get_api_hub_instance( self, request: provisioning_service.GetApiHubInstanceRequest, @@ -146,12 +169,35 @@ def post_get_api_hub_instance( ) -> common_fields.ApiHubInstance: """Post-rpc interceptor for get_api_hub_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_api_hub_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Provisioning server but before - it is returned to user code. + it is returned to user code. This `post_get_api_hub_instance` interceptor runs + before the `post_get_api_hub_instance_with_metadata` interceptor. """ return response + def post_get_api_hub_instance_with_metadata( + self, + response: common_fields.ApiHubInstance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common_fields.ApiHubInstance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_api_hub_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Provisioning server but before it is returned to user code. + + We recommend only using this `post_get_api_hub_instance_with_metadata` + interceptor in new development instead of the `post_get_api_hub_instance` interceptor. + When both interceptors are used, this `post_get_api_hub_instance_with_metadata` interceptor runs after the + `post_get_api_hub_instance` interceptor. The (possibly modified) response returned by + `post_get_api_hub_instance` will be passed to + `post_get_api_hub_instance_with_metadata`. + """ + return response, metadata + def pre_lookup_api_hub_instance( self, request: provisioning_service.LookupApiHubInstanceRequest, @@ -172,12 +218,38 @@ def post_lookup_api_hub_instance( ) -> provisioning_service.LookupApiHubInstanceResponse: """Post-rpc interceptor for lookup_api_hub_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_api_hub_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Provisioning server but before - it is returned to user code. + it is returned to user code. This `post_lookup_api_hub_instance` interceptor runs + before the `post_lookup_api_hub_instance_with_metadata` interceptor. """ return response + def post_lookup_api_hub_instance_with_metadata( + self, + response: provisioning_service.LookupApiHubInstanceResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + provisioning_service.LookupApiHubInstanceResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_api_hub_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Provisioning server but before it is returned to user code. + + We recommend only using this `post_lookup_api_hub_instance_with_metadata` + interceptor in new development instead of the `post_lookup_api_hub_instance` interceptor. + When both interceptors are used, this `post_lookup_api_hub_instance_with_metadata` interceptor runs after the + `post_lookup_api_hub_instance` interceptor. The (possibly modified) response returned by + `post_lookup_api_hub_instance` will be passed to + `post_lookup_api_hub_instance_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -594,6 +666,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_api_hub_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_api_hub_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -742,6 +818,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_api_hub_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_api_hub_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -889,6 +969,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_api_hub_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_api_hub_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py index 0ca48901fd15..5710d3963e17 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1410,16 +1439,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1465,16 +1498,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1631,16 +1668,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1686,16 +1727,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py index 1b3597fec0b4..a96822070361 100644 --- a/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py +++ b/packages/google-cloud-apihub/google/cloud/apihub_v1/services/runtime_project_attachment_service/transports/rest.py @@ -133,12 +133,38 @@ def post_create_runtime_project_attachment( ) -> runtime_project_attachment_service.RuntimeProjectAttachment: """Post-rpc interceptor for create_runtime_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_runtime_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuntimeProjectAttachmentService server but before - it is returned to user code. + it is returned to user code. This `post_create_runtime_project_attachment` interceptor runs + before the `post_create_runtime_project_attachment_with_metadata` interceptor. """ return response + def post_create_runtime_project_attachment_with_metadata( + self, + response: runtime_project_attachment_service.RuntimeProjectAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + runtime_project_attachment_service.RuntimeProjectAttachment, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_runtime_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuntimeProjectAttachmentService server but before it is returned to user code. + + We recommend only using this `post_create_runtime_project_attachment_with_metadata` + interceptor in new development instead of the `post_create_runtime_project_attachment` interceptor. + When both interceptors are used, this `post_create_runtime_project_attachment_with_metadata` interceptor runs after the + `post_create_runtime_project_attachment` interceptor. The (possibly modified) response returned by + `post_create_runtime_project_attachment` will be passed to + `post_create_runtime_project_attachment_with_metadata`. + """ + return response, metadata + def pre_delete_runtime_project_attachment( self, request: runtime_project_attachment_service.DeleteRuntimeProjectAttachmentRequest, @@ -174,12 +200,38 @@ def post_get_runtime_project_attachment( ) -> runtime_project_attachment_service.RuntimeProjectAttachment: """Post-rpc interceptor for get_runtime_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_runtime_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuntimeProjectAttachmentService server but before - it is returned to user code. + it is returned to user code. This `post_get_runtime_project_attachment` interceptor runs + before the `post_get_runtime_project_attachment_with_metadata` interceptor. """ return response + def post_get_runtime_project_attachment_with_metadata( + self, + response: runtime_project_attachment_service.RuntimeProjectAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + runtime_project_attachment_service.RuntimeProjectAttachment, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_runtime_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuntimeProjectAttachmentService server but before it is returned to user code. + + We recommend only using this `post_get_runtime_project_attachment_with_metadata` + interceptor in new development instead of the `post_get_runtime_project_attachment` interceptor. + When both interceptors are used, this `post_get_runtime_project_attachment_with_metadata` interceptor runs after the + `post_get_runtime_project_attachment` interceptor. The (possibly modified) response returned by + `post_get_runtime_project_attachment` will be passed to + `post_get_runtime_project_attachment_with_metadata`. + """ + return response, metadata + def pre_list_runtime_project_attachments( self, request: runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest, @@ -201,12 +253,38 @@ def post_list_runtime_project_attachments( ) -> runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse: """Post-rpc interceptor for list_runtime_project_attachments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_runtime_project_attachments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuntimeProjectAttachmentService server but before - it is returned to user code. + it is returned to user code. This `post_list_runtime_project_attachments` interceptor runs + before the `post_list_runtime_project_attachments_with_metadata` interceptor. """ return response + def post_list_runtime_project_attachments_with_metadata( + self, + response: runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_runtime_project_attachments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuntimeProjectAttachmentService server but before it is returned to user code. + + We recommend only using this `post_list_runtime_project_attachments_with_metadata` + interceptor in new development instead of the `post_list_runtime_project_attachments` interceptor. + When both interceptors are used, this `post_list_runtime_project_attachments_with_metadata` interceptor runs after the + `post_list_runtime_project_attachments` interceptor. The (possibly modified) response returned by + `post_list_runtime_project_attachments` will be passed to + `post_list_runtime_project_attachments_with_metadata`. + """ + return response, metadata + def pre_lookup_runtime_project_attachment( self, request: runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest, @@ -228,12 +306,38 @@ def post_lookup_runtime_project_attachment( ) -> runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse: """Post-rpc interceptor for lookup_runtime_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_runtime_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuntimeProjectAttachmentService server but before - it is returned to user code. + it is returned to user code. This `post_lookup_runtime_project_attachment` interceptor runs + before the `post_lookup_runtime_project_attachment_with_metadata` interceptor. """ return response + def post_lookup_runtime_project_attachment_with_metadata( + self, + response: runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_runtime_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuntimeProjectAttachmentService server but before it is returned to user code. + + We recommend only using this `post_lookup_runtime_project_attachment_with_metadata` + interceptor in new development instead of the `post_lookup_runtime_project_attachment` interceptor. + When both interceptors are used, this `post_lookup_runtime_project_attachment_with_metadata` interceptor runs after the + `post_lookup_runtime_project_attachment` interceptor. The (possibly modified) response returned by + `post_lookup_runtime_project_attachment` will be passed to + `post_lookup_runtime_project_attachment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -612,6 +716,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_runtime_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_runtime_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -885,6 +996,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_runtime_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_runtime_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1042,6 +1160,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_runtime_project_attachments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_runtime_project_attachments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1199,6 +1324,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_runtime_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_lookup_runtime_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json index b8256fb0854f..d55f2e461314 100644 --- a/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json +++ b/packages/google-cloud-apihub/samples/generated_samples/snippet_metadata_google.cloud.apihub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apihub", - "version": "0.2.3" + "version": "0.2.4" }, "snippets": [ { diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py index 5bfa4b5cc4e7..47e24a257b5f 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub.py @@ -59,6 +59,13 @@ from google.cloud.apihub_v1.services.api_hub import ApiHubClient, pagers, transports from google.cloud.apihub_v1.types import apihub_service, common_fields +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -276,6 +283,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ApiHubClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ApiHubClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8170,10 +8220,13 @@ def test_create_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_create_api" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_create_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateApiRequest.pb( apihub_service.CreateApiRequest() ) @@ -8197,6 +8250,7 @@ def test_create_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Api() + post_with_metadata.return_value = common_fields.Api(), metadata client.create_api( request, @@ -8208,6 +8262,7 @@ def test_create_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_rest_bad_request(request_type=apihub_service.GetApiRequest): @@ -8296,10 +8351,13 @@ def test_get_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_api" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetApiRequest.pb(apihub_service.GetApiRequest()) transcode.return_value = { "method": "post", @@ -8321,6 +8379,7 @@ def test_get_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Api() + post_with_metadata.return_value = common_fields.Api(), metadata client.get_api( request, @@ -8332,6 +8391,7 @@ def test_get_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_apis_rest_bad_request(request_type=apihub_service.ListApisRequest): @@ -8412,10 +8472,13 @@ def test_list_apis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_apis" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_apis_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_apis" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListApisRequest.pb(apihub_service.ListApisRequest()) transcode.return_value = { "method": "post", @@ -8439,6 +8502,7 @@ def test_list_apis_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListApisResponse() + post_with_metadata.return_value = apihub_service.ListApisResponse(), metadata client.list_apis( request, @@ -8450,6 +8514,7 @@ def test_list_apis_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_api_rest_bad_request(request_type=apihub_service.UpdateApiRequest): @@ -8636,10 +8701,13 @@ def test_update_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_update_api" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_update_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateApiRequest.pb( apihub_service.UpdateApiRequest() ) @@ -8663,6 +8731,7 @@ def test_update_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Api() + post_with_metadata.return_value = common_fields.Api(), metadata client.update_api( request, @@ -8674,6 +8743,7 @@ def test_update_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_api_rest_bad_request(request_type=apihub_service.DeleteApiRequest): @@ -8973,10 +9043,13 @@ def test_create_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_create_version" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_create_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateVersionRequest.pb( apihub_service.CreateVersionRequest() ) @@ -9000,6 +9073,7 @@ def test_create_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Version() + post_with_metadata.return_value = common_fields.Version(), metadata client.create_version( request, @@ -9011,6 +9085,7 @@ def test_create_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=apihub_service.GetVersionRequest): @@ -9109,10 +9184,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetVersionRequest.pb( apihub_service.GetVersionRequest() ) @@ -9136,6 +9214,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Version() + post_with_metadata.return_value = common_fields.Version(), metadata client.get_version( request, @@ -9147,6 +9226,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_versions_rest_bad_request( @@ -9229,10 +9309,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListVersionsRequest.pb( apihub_service.ListVersionsRequest() ) @@ -9258,6 +9341,10 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListVersionsResponse() + post_with_metadata.return_value = ( + apihub_service.ListVersionsResponse(), + metadata, + ) client.list_versions( request, @@ -9269,6 +9356,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request( @@ -9471,10 +9559,13 @@ def test_update_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateVersionRequest.pb( apihub_service.UpdateVersionRequest() ) @@ -9498,6 +9589,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Version() + post_with_metadata.return_value = common_fields.Version(), metadata client.update_version( request, @@ -9509,6 +9601,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request( @@ -9827,10 +9920,13 @@ def test_create_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_create_spec" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_create_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateSpecRequest.pb( apihub_service.CreateSpecRequest() ) @@ -9854,6 +9950,7 @@ def test_create_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Spec() + post_with_metadata.return_value = common_fields.Spec(), metadata client.create_spec( request, @@ -9865,6 +9962,7 @@ def test_create_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_spec_rest_bad_request(request_type=apihub_service.GetSpecRequest): @@ -9955,10 +10053,13 @@ def test_get_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_spec" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetSpecRequest.pb(apihub_service.GetSpecRequest()) transcode.return_value = { "method": "post", @@ -9980,6 +10081,7 @@ def test_get_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Spec() + post_with_metadata.return_value = common_fields.Spec(), metadata client.get_spec( request, @@ -9991,6 +10093,7 @@ def test_get_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_spec_contents_rest_bad_request( @@ -10079,10 +10182,13 @@ def test_get_spec_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_spec_contents" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_spec_contents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_spec_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetSpecContentsRequest.pb( apihub_service.GetSpecContentsRequest() ) @@ -10106,6 +10212,7 @@ def test_get_spec_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.SpecContents() + post_with_metadata.return_value = common_fields.SpecContents(), metadata client.get_spec_contents( request, @@ -10117,6 +10224,7 @@ def test_get_spec_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_specs_rest_bad_request(request_type=apihub_service.ListSpecsRequest): @@ -10201,10 +10309,13 @@ def test_list_specs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_specs" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_specs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_specs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListSpecsRequest.pb( apihub_service.ListSpecsRequest() ) @@ -10230,6 +10341,7 @@ def test_list_specs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListSpecsResponse() + post_with_metadata.return_value = apihub_service.ListSpecsResponse(), metadata client.list_specs( request, @@ -10241,6 +10353,7 @@ def test_list_specs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_spec_rest_bad_request(request_type=apihub_service.UpdateSpecRequest): @@ -10452,10 +10565,13 @@ def test_update_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_update_spec" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_update_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateSpecRequest.pb( apihub_service.UpdateSpecRequest() ) @@ -10479,6 +10595,7 @@ def test_update_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Spec() + post_with_metadata.return_value = common_fields.Spec(), metadata client.update_spec( request, @@ -10490,6 +10607,7 @@ def test_update_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_spec_rest_bad_request(request_type=apihub_service.DeleteSpecRequest): @@ -10687,10 +10805,13 @@ def test_get_api_operation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_api_operation" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_api_operation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_api_operation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetApiOperationRequest.pb( apihub_service.GetApiOperationRequest() ) @@ -10714,6 +10835,7 @@ def test_get_api_operation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.ApiOperation() + post_with_metadata.return_value = common_fields.ApiOperation(), metadata client.get_api_operation( request, @@ -10725,6 +10847,7 @@ def test_get_api_operation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_api_operations_rest_bad_request( @@ -10811,10 +10934,13 @@ def test_list_api_operations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_api_operations" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_api_operations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_api_operations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListApiOperationsRequest.pb( apihub_service.ListApiOperationsRequest() ) @@ -10840,6 +10966,10 @@ def test_list_api_operations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListApiOperationsResponse() + post_with_metadata.return_value = ( + apihub_service.ListApiOperationsResponse(), + metadata, + ) client.list_api_operations( request, @@ -10851,6 +10981,7 @@ def test_list_api_operations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_definition_rest_bad_request( @@ -10941,10 +11072,13 @@ def test_get_definition_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_definition" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_definition_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_definition" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetDefinitionRequest.pb( apihub_service.GetDefinitionRequest() ) @@ -10968,6 +11102,7 @@ def test_get_definition_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Definition() + post_with_metadata.return_value = common_fields.Definition(), metadata client.get_definition( request, @@ -10979,6 +11114,7 @@ def test_get_definition_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_deployment_rest_bad_request( @@ -11167,10 +11303,13 @@ def test_create_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_create_deployment" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_create_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateDeploymentRequest.pb( apihub_service.CreateDeploymentRequest() ) @@ -11194,6 +11333,7 @@ def test_create_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Deployment() + post_with_metadata.return_value = common_fields.Deployment(), metadata client.create_deployment( request, @@ -11205,6 +11345,7 @@ def test_create_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deployment_rest_bad_request( @@ -11297,10 +11438,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_deployment" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetDeploymentRequest.pb( apihub_service.GetDeploymentRequest() ) @@ -11324,6 +11468,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Deployment() + post_with_metadata.return_value = common_fields.Deployment(), metadata client.get_deployment( request, @@ -11335,6 +11480,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_deployments_rest_bad_request( @@ -11417,10 +11563,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_deployments" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_deployments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_deployments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListDeploymentsRequest.pb( apihub_service.ListDeploymentsRequest() ) @@ -11446,6 +11595,10 @@ def test_list_deployments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListDeploymentsResponse() + post_with_metadata.return_value = ( + apihub_service.ListDeploymentsResponse(), + metadata, + ) client.list_deployments( request, @@ -11457,6 +11610,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_deployment_rest_bad_request( @@ -11649,10 +11803,13 @@ def test_update_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_update_deployment" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_update_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateDeploymentRequest.pb( apihub_service.UpdateDeploymentRequest() ) @@ -11676,6 +11833,7 @@ def test_update_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Deployment() + post_with_metadata.return_value = common_fields.Deployment(), metadata client.update_deployment( request, @@ -11687,6 +11845,7 @@ def test_update_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_deployment_rest_bad_request( @@ -11980,10 +12139,13 @@ def test_create_attribute_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_create_attribute" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_attribute_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_create_attribute" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateAttributeRequest.pb( apihub_service.CreateAttributeRequest() ) @@ -12007,6 +12169,7 @@ def test_create_attribute_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Attribute() + post_with_metadata.return_value = common_fields.Attribute(), metadata client.create_attribute( request, @@ -12018,6 +12181,7 @@ def test_create_attribute_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attribute_rest_bad_request( @@ -12117,10 +12281,13 @@ def test_get_attribute_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_attribute" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_attribute_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_attribute" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetAttributeRequest.pb( apihub_service.GetAttributeRequest() ) @@ -12144,6 +12311,7 @@ def test_get_attribute_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Attribute() + post_with_metadata.return_value = common_fields.Attribute(), metadata client.get_attribute( request, @@ -12155,6 +12323,7 @@ def test_get_attribute_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_attribute_rest_bad_request( @@ -12345,10 +12514,13 @@ def test_update_attribute_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_update_attribute" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_attribute_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_update_attribute" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateAttributeRequest.pb( apihub_service.UpdateAttributeRequest() ) @@ -12372,6 +12544,7 @@ def test_update_attribute_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Attribute() + post_with_metadata.return_value = common_fields.Attribute(), metadata client.update_attribute( request, @@ -12383,6 +12556,7 @@ def test_update_attribute_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_attribute_rest_bad_request( @@ -12572,10 +12746,13 @@ def test_list_attributes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_attributes" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_attributes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_attributes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListAttributesRequest.pb( apihub_service.ListAttributesRequest() ) @@ -12601,6 +12778,10 @@ def test_list_attributes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListAttributesResponse() + post_with_metadata.return_value = ( + apihub_service.ListAttributesResponse(), + metadata, + ) client.list_attributes( request, @@ -12612,6 +12793,7 @@ def test_list_attributes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_resources_rest_bad_request( @@ -12694,10 +12876,13 @@ def test_search_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_search_resources" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_search_resources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_search_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.SearchResourcesRequest.pb( apihub_service.SearchResourcesRequest() ) @@ -12723,6 +12908,10 @@ def test_search_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.SearchResourcesResponse() + post_with_metadata.return_value = ( + apihub_service.SearchResourcesResponse(), + metadata, + ) client.search_resources( request, @@ -12734,6 +12923,7 @@ def test_search_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_external_api_rest_bad_request( @@ -12902,10 +13092,13 @@ def test_create_external_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_create_external_api" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_create_external_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_create_external_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateExternalApiRequest.pb( apihub_service.CreateExternalApiRequest() ) @@ -12929,6 +13122,7 @@ def test_create_external_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.ExternalApi() + post_with_metadata.return_value = common_fields.ExternalApi(), metadata client.create_external_api( request, @@ -12940,6 +13134,7 @@ def test_create_external_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_external_api_rest_bad_request( @@ -13030,10 +13225,13 @@ def test_get_external_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_get_external_api" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_get_external_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_get_external_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetExternalApiRequest.pb( apihub_service.GetExternalApiRequest() ) @@ -13057,6 +13255,7 @@ def test_get_external_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.ExternalApi() + post_with_metadata.return_value = common_fields.ExternalApi(), metadata client.get_external_api( request, @@ -13068,6 +13267,7 @@ def test_get_external_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_external_api_rest_bad_request( @@ -13244,10 +13444,13 @@ def test_update_external_api_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_update_external_api" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_update_external_api_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_update_external_api" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateExternalApiRequest.pb( apihub_service.UpdateExternalApiRequest() ) @@ -13271,6 +13474,7 @@ def test_update_external_api_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.ExternalApi() + post_with_metadata.return_value = common_fields.ExternalApi(), metadata client.update_external_api( request, @@ -13282,6 +13486,7 @@ def test_update_external_api_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_external_api_rest_bad_request( @@ -13471,10 +13676,13 @@ def test_list_external_apis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubRestInterceptor, "post_list_external_apis" ) as post, mock.patch.object( + transports.ApiHubRestInterceptor, "post_list_external_apis_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubRestInterceptor, "pre_list_external_apis" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListExternalApisRequest.pb( apihub_service.ListExternalApisRequest() ) @@ -13500,6 +13708,10 @@ def test_list_external_apis_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListExternalApisResponse() + post_with_metadata.return_value = ( + apihub_service.ListExternalApisResponse(), + metadata, + ) client.list_external_apis( request, @@ -13511,6 +13723,7 @@ def test_list_external_apis_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py index 0f2b7a7b4cbf..40f5450f62c0 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_dependencies.py @@ -63,6 +63,13 @@ ) from google.cloud.apihub_v1.types import apihub_service, common_fields +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ApiHubDependenciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ApiHubDependenciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2188,10 +2238,14 @@ def test_create_dependency_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "post_create_dependency" ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, + "post_create_dependency_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "pre_create_dependency" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.CreateDependencyRequest.pb( apihub_service.CreateDependencyRequest() ) @@ -2215,6 +2269,7 @@ def test_create_dependency_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Dependency() + post_with_metadata.return_value = common_fields.Dependency(), metadata client.create_dependency( request, @@ -2226,6 +2281,7 @@ def test_create_dependency_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_dependency_rest_bad_request( @@ -2316,10 +2372,14 @@ def test_get_dependency_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "post_get_dependency" ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, + "post_get_dependency_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "pre_get_dependency" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.GetDependencyRequest.pb( apihub_service.GetDependencyRequest() ) @@ -2343,6 +2403,7 @@ def test_get_dependency_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Dependency() + post_with_metadata.return_value = common_fields.Dependency(), metadata client.get_dependency( request, @@ -2354,6 +2415,7 @@ def test_get_dependency_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_dependency_rest_bad_request( @@ -2535,10 +2597,14 @@ def test_update_dependency_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "post_update_dependency" ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, + "post_update_dependency_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "pre_update_dependency" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.UpdateDependencyRequest.pb( apihub_service.UpdateDependencyRequest() ) @@ -2562,6 +2628,7 @@ def test_update_dependency_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.Dependency() + post_with_metadata.return_value = common_fields.Dependency(), metadata client.update_dependency( request, @@ -2573,6 +2640,7 @@ def test_update_dependency_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_dependency_rest_bad_request( @@ -2766,10 +2834,14 @@ def test_list_dependencies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "post_list_dependencies" ) as post, mock.patch.object( + transports.ApiHubDependenciesRestInterceptor, + "post_list_dependencies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ApiHubDependenciesRestInterceptor, "pre_list_dependencies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apihub_service.ListDependenciesRequest.pb( apihub_service.ListDependenciesRequest() ) @@ -2795,6 +2867,10 @@ def test_list_dependencies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apihub_service.ListDependenciesResponse() + post_with_metadata.return_value = ( + apihub_service.ListDependenciesResponse(), + metadata, + ) client.list_dependencies( request, @@ -2806,6 +2882,7 @@ def test_list_dependencies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py index d90cab162dae..81ac46692326 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_api_hub_plugin.py @@ -60,6 +60,13 @@ ) from google.cloud.apihub_v1.types import common_fields, plugin_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ApiHubPluginClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ApiHubPluginClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1580,10 +1630,13 @@ def test_get_plugin_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubPluginRestInterceptor, "post_get_plugin" ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_get_plugin_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubPluginRestInterceptor, "pre_get_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = plugin_service.GetPluginRequest.pb( plugin_service.GetPluginRequest() ) @@ -1607,6 +1660,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = plugin_service.Plugin() + post_with_metadata.return_value = plugin_service.Plugin(), metadata client.get_plugin( request, @@ -1618,6 +1672,7 @@ def test_get_plugin_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_plugin_rest_bad_request( @@ -1708,10 +1763,13 @@ def test_enable_plugin_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubPluginRestInterceptor, "post_enable_plugin" ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_enable_plugin_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubPluginRestInterceptor, "pre_enable_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = plugin_service.EnablePluginRequest.pb( plugin_service.EnablePluginRequest() ) @@ -1735,6 +1793,7 @@ def test_enable_plugin_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = plugin_service.Plugin() + post_with_metadata.return_value = plugin_service.Plugin(), metadata client.enable_plugin( request, @@ -1746,6 +1805,7 @@ def test_enable_plugin_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_plugin_rest_bad_request( @@ -1836,10 +1896,13 @@ def test_disable_plugin_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApiHubPluginRestInterceptor, "post_disable_plugin" ) as post, mock.patch.object( + transports.ApiHubPluginRestInterceptor, "post_disable_plugin_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApiHubPluginRestInterceptor, "pre_disable_plugin" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = plugin_service.DisablePluginRequest.pb( plugin_service.DisablePluginRequest() ) @@ -1863,6 +1926,7 @@ def test_disable_plugin_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = plugin_service.Plugin() + post_with_metadata.return_value = plugin_service.Plugin(), metadata client.disable_plugin( request, @@ -1874,6 +1938,7 @@ def test_disable_plugin_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py index 645a06d5663f..098a53c808bd 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_host_project_registration_service.py @@ -62,6 +62,13 @@ ) from google.cloud.apihub_v1.types import host_project_registration_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = HostProjectRegistrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = HostProjectRegistrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1922,11 +1972,15 @@ def test_create_host_project_registration_rest_interceptors(null_interceptor): transports.HostProjectRegistrationServiceRestInterceptor, "post_create_host_project_registration", ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_create_host_project_registration_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.HostProjectRegistrationServiceRestInterceptor, "pre_create_host_project_registration", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( host_project_registration_service.CreateHostProjectRegistrationRequest.pb( host_project_registration_service.CreateHostProjectRegistrationRequest() @@ -1958,6 +2012,10 @@ def test_create_host_project_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = host_project_registration_service.HostProjectRegistration() + post_with_metadata.return_value = ( + host_project_registration_service.HostProjectRegistration(), + metadata, + ) client.create_host_project_registration( request, @@ -1969,6 +2027,7 @@ def test_create_host_project_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_host_project_registration_rest_bad_request( @@ -2064,11 +2123,15 @@ def test_get_host_project_registration_rest_interceptors(null_interceptor): transports.HostProjectRegistrationServiceRestInterceptor, "post_get_host_project_registration", ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_get_host_project_registration_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.HostProjectRegistrationServiceRestInterceptor, "pre_get_host_project_registration", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( host_project_registration_service.GetHostProjectRegistrationRequest.pb( host_project_registration_service.GetHostProjectRegistrationRequest() @@ -2098,6 +2161,10 @@ def test_get_host_project_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = host_project_registration_service.HostProjectRegistration() + post_with_metadata.return_value = ( + host_project_registration_service.HostProjectRegistration(), + metadata, + ) client.get_host_project_registration( request, @@ -2109,6 +2176,7 @@ def test_get_host_project_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_host_project_registrations_rest_bad_request( @@ -2200,11 +2268,15 @@ def test_list_host_project_registrations_rest_interceptors(null_interceptor): transports.HostProjectRegistrationServiceRestInterceptor, "post_list_host_project_registrations", ) as post, mock.patch.object( + transports.HostProjectRegistrationServiceRestInterceptor, + "post_list_host_project_registrations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.HostProjectRegistrationServiceRestInterceptor, "pre_list_host_project_registrations", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( host_project_registration_service.ListHostProjectRegistrationsRequest.pb( host_project_registration_service.ListHostProjectRegistrationsRequest() @@ -2236,6 +2308,10 @@ def test_list_host_project_registrations_rest_interceptors(null_interceptor): post.return_value = ( host_project_registration_service.ListHostProjectRegistrationsResponse() ) + post_with_metadata.return_value = ( + host_project_registration_service.ListHostProjectRegistrationsResponse(), + metadata, + ) client.list_host_project_registrations( request, @@ -2247,6 +2323,7 @@ def test_list_host_project_registrations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py index 0971bf3da968..b394a274a688 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_linting_service.py @@ -61,6 +61,13 @@ ) from google.cloud.apihub_v1.types import common_fields, linting_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -314,6 +321,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LintingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LintingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1734,10 +1784,13 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LintingServiceRestInterceptor, "post_get_style_guide" ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, "post_get_style_guide_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LintingServiceRestInterceptor, "pre_get_style_guide" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = linting_service.GetStyleGuideRequest.pb( linting_service.GetStyleGuideRequest() ) @@ -1761,6 +1814,7 @@ def test_get_style_guide_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = linting_service.StyleGuide() + post_with_metadata.return_value = linting_service.StyleGuide(), metadata client.get_style_guide( request, @@ -1772,6 +1826,7 @@ def test_get_style_guide_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_style_guide_rest_bad_request( @@ -1938,10 +1993,14 @@ def test_update_style_guide_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LintingServiceRestInterceptor, "post_update_style_guide" ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, + "post_update_style_guide_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LintingServiceRestInterceptor, "pre_update_style_guide" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = linting_service.UpdateStyleGuideRequest.pb( linting_service.UpdateStyleGuideRequest() ) @@ -1965,6 +2024,7 @@ def test_update_style_guide_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = linting_service.StyleGuide() + post_with_metadata.return_value = linting_service.StyleGuide(), metadata client.update_style_guide( request, @@ -1976,6 +2036,7 @@ def test_update_style_guide_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_style_guide_contents_rest_bad_request( @@ -2066,10 +2127,14 @@ def test_get_style_guide_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LintingServiceRestInterceptor, "post_get_style_guide_contents" ) as post, mock.patch.object( + transports.LintingServiceRestInterceptor, + "post_get_style_guide_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LintingServiceRestInterceptor, "pre_get_style_guide_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = linting_service.GetStyleGuideContentsRequest.pb( linting_service.GetStyleGuideContentsRequest() ) @@ -2095,6 +2160,7 @@ def test_get_style_guide_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = linting_service.StyleGuideContents() + post_with_metadata.return_value = linting_service.StyleGuideContents(), metadata client.get_style_guide_contents( request, @@ -2106,6 +2172,7 @@ def test_get_style_guide_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lint_spec_rest_bad_request(request_type=linting_service.LintSpecRequest): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py index d1d2ff5a7526..fd8034bca2e5 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_provisioning.py @@ -67,6 +67,13 @@ from google.cloud.apihub_v1.services.provisioning import ProvisioningClient, transports from google.cloud.apihub_v1.types import common_fields, provisioning_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ProvisioningClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ProvisioningClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1693,10 +1743,14 @@ def test_create_api_hub_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ProvisioningRestInterceptor, "post_create_api_hub_instance" ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, + "post_create_api_hub_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ProvisioningRestInterceptor, "pre_create_api_hub_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning_service.CreateApiHubInstanceRequest.pb( provisioning_service.CreateApiHubInstanceRequest() ) @@ -1720,6 +1774,7 @@ def test_create_api_hub_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_api_hub_instance( request, @@ -1731,6 +1786,7 @@ def test_create_api_hub_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_api_hub_instance_rest_bad_request( @@ -1825,10 +1881,14 @@ def test_get_api_hub_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProvisioningRestInterceptor, "post_get_api_hub_instance" ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, + "post_get_api_hub_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ProvisioningRestInterceptor, "pre_get_api_hub_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning_service.GetApiHubInstanceRequest.pb( provisioning_service.GetApiHubInstanceRequest() ) @@ -1854,6 +1914,7 @@ def test_get_api_hub_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common_fields.ApiHubInstance() + post_with_metadata.return_value = common_fields.ApiHubInstance(), metadata client.get_api_hub_instance( request, @@ -1865,6 +1926,7 @@ def test_get_api_hub_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lookup_api_hub_instance_rest_bad_request( @@ -1948,10 +2010,14 @@ def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProvisioningRestInterceptor, "post_lookup_api_hub_instance" ) as post, mock.patch.object( + transports.ProvisioningRestInterceptor, + "post_lookup_api_hub_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ProvisioningRestInterceptor, "pre_lookup_api_hub_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning_service.LookupApiHubInstanceRequest.pb( provisioning_service.LookupApiHubInstanceRequest() ) @@ -1977,6 +2043,10 @@ def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning_service.LookupApiHubInstanceResponse() + post_with_metadata.return_value = ( + provisioning_service.LookupApiHubInstanceResponse(), + metadata, + ) client.lookup_api_hub_instance( request, @@ -1988,6 +2058,7 @@ def test_lookup_api_hub_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py index 895ab6aec87d..83a647839f9a 100644 --- a/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py +++ b/packages/google-cloud-apihub/tests/unit/gapic/apihub_v1/test_runtime_project_attachment_service.py @@ -62,6 +62,13 @@ ) from google.cloud.apihub_v1.types import runtime_project_attachment_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RuntimeProjectAttachmentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RuntimeProjectAttachmentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2311,11 +2361,15 @@ def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): transports.RuntimeProjectAttachmentServiceRestInterceptor, "post_create_runtime_project_attachment", ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_create_runtime_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RuntimeProjectAttachmentServiceRestInterceptor, "pre_create_runtime_project_attachment", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest.pb( runtime_project_attachment_service.CreateRuntimeProjectAttachmentRequest() ) @@ -2347,6 +2401,10 @@ def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) + post_with_metadata.return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment(), + metadata, + ) client.create_runtime_project_attachment( request, @@ -2358,6 +2416,7 @@ def test_create_runtime_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_runtime_project_attachment_rest_bad_request( @@ -2453,11 +2512,15 @@ def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): transports.RuntimeProjectAttachmentServiceRestInterceptor, "post_get_runtime_project_attachment", ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_get_runtime_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RuntimeProjectAttachmentServiceRestInterceptor, "pre_get_runtime_project_attachment", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest.pb( runtime_project_attachment_service.GetRuntimeProjectAttachmentRequest() @@ -2491,6 +2554,10 @@ def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): post.return_value = ( runtime_project_attachment_service.RuntimeProjectAttachment() ) + post_with_metadata.return_value = ( + runtime_project_attachment_service.RuntimeProjectAttachment(), + metadata, + ) client.get_runtime_project_attachment( request, @@ -2502,6 +2569,7 @@ def test_get_runtime_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_runtime_project_attachments_rest_bad_request( @@ -2593,11 +2661,15 @@ def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): transports.RuntimeProjectAttachmentServiceRestInterceptor, "post_list_runtime_project_attachments", ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_list_runtime_project_attachments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RuntimeProjectAttachmentServiceRestInterceptor, "pre_list_runtime_project_attachments", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest.pb( runtime_project_attachment_service.ListRuntimeProjectAttachmentsRequest() ) @@ -2627,6 +2699,10 @@ def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): post.return_value = ( runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse() ) + post_with_metadata.return_value = ( + runtime_project_attachment_service.ListRuntimeProjectAttachmentsResponse(), + metadata, + ) client.list_runtime_project_attachments( request, @@ -2638,6 +2714,7 @@ def test_list_runtime_project_attachments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_runtime_project_attachment_rest_bad_request( @@ -2843,11 +2920,15 @@ def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): transports.RuntimeProjectAttachmentServiceRestInterceptor, "post_lookup_runtime_project_attachment", ) as post, mock.patch.object( + transports.RuntimeProjectAttachmentServiceRestInterceptor, + "post_lookup_runtime_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RuntimeProjectAttachmentServiceRestInterceptor, "pre_lookup_runtime_project_attachment", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest.pb( runtime_project_attachment_service.LookupRuntimeProjectAttachmentRequest() ) @@ -2877,6 +2958,10 @@ def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): post.return_value = ( runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse() ) + post_with_metadata.return_value = ( + runtime_project_attachment_service.LookupRuntimeProjectAttachmentResponse(), + metadata, + ) client.lookup_runtime_project_attachment( request, @@ -2888,6 +2973,7 @@ def test_lookup_runtime_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-appengine-admin/CHANGELOG.md b/packages/google-cloud-appengine-admin/CHANGELOG.md index 8bd03642a603..7af4028caced 100644 --- a/packages/google-cloud-appengine-admin/CHANGELOG.md +++ b/packages/google-cloud-appengine-admin/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-appengine-admin-v1.13.0...google-cloud-appengine-admin-v1.14.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) +* Add support for reading selective GAPIC generation methods from service YAML ([a961bc0](https://github.com/googleapis/google-cloud-python/commit/a961bc029201b72fc4923490aeb3d82781853e6a)) + ## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-appengine-admin-v1.12.1...google-cloud-appengine-admin-v1.13.0) (2024-12-12) diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py index 43155ded0db3..2159c8af6f8e 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py index 43155ded0db3..2159c8af6f8e 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.13.0" # {x-release-please-version} +__version__ = "1.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py index 7897b1713042..170669d29ecc 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -464,6 +466,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/transports/rest.py index 9d60835f0d57..e6bd11f7cf32 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/applications/transports/rest.py @@ -126,12 +126,35 @@ def post_create_application( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Applications server but before - it is returned to user code. + it is returned to user code. This `post_create_application` interceptor runs + before the `post_create_application_with_metadata` interceptor. """ return response + def post_create_application_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Applications server but before it is returned to user code. + + We recommend only using this `post_create_application_with_metadata` + interceptor in new development instead of the `post_create_application` interceptor. + When both interceptors are used, this `post_create_application_with_metadata` interceptor runs after the + `post_create_application` interceptor. The (possibly modified) response returned by + `post_create_application` will be passed to + `post_create_application_with_metadata`. + """ + return response, metadata + def pre_get_application( self, request: appengine.GetApplicationRequest, @@ -151,12 +174,35 @@ def post_get_application( ) -> application.Application: """Post-rpc interceptor for get_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Applications server but before - it is returned to user code. + it is returned to user code. This `post_get_application` interceptor runs + before the `post_get_application_with_metadata` interceptor. """ return response + def post_get_application_with_metadata( + self, + response: application.Application, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[application.Application, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Applications server but before it is returned to user code. + + We recommend only using this `post_get_application_with_metadata` + interceptor in new development instead of the `post_get_application` interceptor. + When both interceptors are used, this `post_get_application_with_metadata` interceptor runs after the + `post_get_application` interceptor. The (possibly modified) response returned by + `post_get_application` will be passed to + `post_get_application_with_metadata`. + """ + return response, metadata + def pre_repair_application( self, request: appengine.RepairApplicationRequest, @@ -176,12 +222,35 @@ def post_repair_application( ) -> operations_pb2.Operation: """Post-rpc interceptor for repair_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_repair_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Applications server but before - it is returned to user code. + it is returned to user code. This `post_repair_application` interceptor runs + before the `post_repair_application_with_metadata` interceptor. """ return response + def post_repair_application_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for repair_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Applications server but before it is returned to user code. + + We recommend only using this `post_repair_application_with_metadata` + interceptor in new development instead of the `post_repair_application` interceptor. + When both interceptors are used, this `post_repair_application_with_metadata` interceptor runs after the + `post_repair_application` interceptor. The (possibly modified) response returned by + `post_repair_application` will be passed to + `post_repair_application_with_metadata`. + """ + return response, metadata + def pre_update_application( self, request: appengine.UpdateApplicationRequest, @@ -201,12 +270,35 @@ def post_update_application( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Applications server but before - it is returned to user code. + it is returned to user code. This `post_update_application` interceptor runs + before the `post_update_application_with_metadata` interceptor. """ return response + def post_update_application_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Applications server but before it is returned to user code. + + We recommend only using this `post_update_application_with_metadata` + interceptor in new development instead of the `post_update_application` interceptor. + When both interceptors are used, this `post_update_application_with_metadata` interceptor runs after the + `post_update_application` interceptor. The (possibly modified) response returned by + `post_update_application` will be passed to + `post_update_application_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ApplicationsRestStub: @@ -461,6 +553,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -604,6 +700,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -754,6 +854,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_repair_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_repair_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -903,6 +1007,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py index 3ad8ad264dc2..0ca7ef3c1b76 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -469,6 +471,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/transports/rest.py index 5710cbbe06f0..f365434ad67c 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_certificates/transports/rest.py @@ -131,12 +131,37 @@ def post_create_authorized_certificate( ) -> certificate.AuthorizedCertificate: """Post-rpc interceptor for create_authorized_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_authorized_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AuthorizedCertificates server but before - it is returned to user code. + it is returned to user code. This `post_create_authorized_certificate` interceptor runs + before the `post_create_authorized_certificate_with_metadata` interceptor. """ return response + def post_create_authorized_certificate_with_metadata( + self, + response: certificate.AuthorizedCertificate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate.AuthorizedCertificate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_authorized_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AuthorizedCertificates server but before it is returned to user code. + + We recommend only using this `post_create_authorized_certificate_with_metadata` + interceptor in new development instead of the `post_create_authorized_certificate` interceptor. + When both interceptors are used, this `post_create_authorized_certificate_with_metadata` interceptor runs after the + `post_create_authorized_certificate` interceptor. The (possibly modified) response returned by + `post_create_authorized_certificate` will be passed to + `post_create_authorized_certificate_with_metadata`. + """ + return response, metadata + def pre_delete_authorized_certificate( self, request: appengine.DeleteAuthorizedCertificateRequest, @@ -172,12 +197,37 @@ def post_get_authorized_certificate( ) -> certificate.AuthorizedCertificate: """Post-rpc interceptor for get_authorized_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_authorized_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AuthorizedCertificates server but before - it is returned to user code. + it is returned to user code. This `post_get_authorized_certificate` interceptor runs + before the `post_get_authorized_certificate_with_metadata` interceptor. """ return response + def post_get_authorized_certificate_with_metadata( + self, + response: certificate.AuthorizedCertificate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate.AuthorizedCertificate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_authorized_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AuthorizedCertificates server but before it is returned to user code. + + We recommend only using this `post_get_authorized_certificate_with_metadata` + interceptor in new development instead of the `post_get_authorized_certificate` interceptor. + When both interceptors are used, this `post_get_authorized_certificate_with_metadata` interceptor runs after the + `post_get_authorized_certificate` interceptor. The (possibly modified) response returned by + `post_get_authorized_certificate` will be passed to + `post_get_authorized_certificate_with_metadata`. + """ + return response, metadata + def pre_list_authorized_certificates( self, request: appengine.ListAuthorizedCertificatesRequest, @@ -198,12 +248,38 @@ def post_list_authorized_certificates( ) -> appengine.ListAuthorizedCertificatesResponse: """Post-rpc interceptor for list_authorized_certificates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_authorized_certificates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AuthorizedCertificates server but before - it is returned to user code. + it is returned to user code. This `post_list_authorized_certificates` interceptor runs + before the `post_list_authorized_certificates_with_metadata` interceptor. """ return response + def post_list_authorized_certificates_with_metadata( + self, + response: appengine.ListAuthorizedCertificatesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + appengine.ListAuthorizedCertificatesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_authorized_certificates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AuthorizedCertificates server but before it is returned to user code. + + We recommend only using this `post_list_authorized_certificates_with_metadata` + interceptor in new development instead of the `post_list_authorized_certificates` interceptor. + When both interceptors are used, this `post_list_authorized_certificates_with_metadata` interceptor runs after the + `post_list_authorized_certificates` interceptor. The (possibly modified) response returned by + `post_list_authorized_certificates` will be passed to + `post_list_authorized_certificates_with_metadata`. + """ + return response, metadata + def pre_update_authorized_certificate( self, request: appengine.UpdateAuthorizedCertificateRequest, @@ -224,12 +300,37 @@ def post_update_authorized_certificate( ) -> certificate.AuthorizedCertificate: """Post-rpc interceptor for update_authorized_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_authorized_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AuthorizedCertificates server but before - it is returned to user code. + it is returned to user code. This `post_update_authorized_certificate` interceptor runs + before the `post_update_authorized_certificate_with_metadata` interceptor. """ return response + def post_update_authorized_certificate_with_metadata( + self, + response: certificate.AuthorizedCertificate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate.AuthorizedCertificate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_authorized_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AuthorizedCertificates server but before it is returned to user code. + + We recommend only using this `post_update_authorized_certificate_with_metadata` + interceptor in new development instead of the `post_update_authorized_certificate` interceptor. + When both interceptors are used, this `post_update_authorized_certificate_with_metadata` interceptor runs after the + `post_update_authorized_certificate` interceptor. The (possibly modified) response returned by + `post_update_authorized_certificate` will be passed to + `post_update_authorized_certificate_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AuthorizedCertificatesRestStub: @@ -454,6 +555,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_authorized_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_authorized_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -719,6 +827,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_authorized_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_authorized_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -870,6 +982,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_authorized_certificates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_authorized_certificates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1030,6 +1146,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_authorized_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_authorized_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py index 9483909886d3..6c91e010a627 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/transports/rest.py index 0441ac22e8a9..16c1350a16b3 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/authorized_domains/transports/rest.py @@ -101,12 +101,37 @@ def post_list_authorized_domains( ) -> appengine.ListAuthorizedDomainsResponse: """Post-rpc interceptor for list_authorized_domains - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_authorized_domains_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AuthorizedDomains server but before - it is returned to user code. + it is returned to user code. This `post_list_authorized_domains` interceptor runs + before the `post_list_authorized_domains_with_metadata` interceptor. """ return response + def post_list_authorized_domains_with_metadata( + self, + response: appengine.ListAuthorizedDomainsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + appengine.ListAuthorizedDomainsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_authorized_domains + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AuthorizedDomains server but before it is returned to user code. + + We recommend only using this `post_list_authorized_domains_with_metadata` + interceptor in new development instead of the `post_list_authorized_domains` interceptor. + When both interceptors are used, this `post_list_authorized_domains_with_metadata` interceptor runs after the + `post_list_authorized_domains` interceptor. The (possibly modified) response returned by + `post_list_authorized_domains` will be passed to + `post_list_authorized_domains_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AuthorizedDomainsRestStub: @@ -321,6 +346,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_authorized_domains(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_authorized_domains_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py index 8fae3ea02743..43e3bcf28a90 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -467,6 +469,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/transports/rest.py index a57146f94322..c119e7e9aa9e 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/domain_mappings/transports/rest.py @@ -134,12 +134,35 @@ def post_create_domain_mapping( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_domain_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_domain_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DomainMappings server but before - it is returned to user code. + it is returned to user code. This `post_create_domain_mapping` interceptor runs + before the `post_create_domain_mapping_with_metadata` interceptor. """ return response + def post_create_domain_mapping_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_domain_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DomainMappings server but before it is returned to user code. + + We recommend only using this `post_create_domain_mapping_with_metadata` + interceptor in new development instead of the `post_create_domain_mapping` interceptor. + When both interceptors are used, this `post_create_domain_mapping_with_metadata` interceptor runs after the + `post_create_domain_mapping` interceptor. The (possibly modified) response returned by + `post_create_domain_mapping` will be passed to + `post_create_domain_mapping_with_metadata`. + """ + return response, metadata + def pre_delete_domain_mapping( self, request: appengine.DeleteDomainMappingRequest, @@ -159,12 +182,35 @@ def post_delete_domain_mapping( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_domain_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_domain_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DomainMappings server but before - it is returned to user code. + it is returned to user code. This `post_delete_domain_mapping` interceptor runs + before the `post_delete_domain_mapping_with_metadata` interceptor. """ return response + def post_delete_domain_mapping_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_domain_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DomainMappings server but before it is returned to user code. + + We recommend only using this `post_delete_domain_mapping_with_metadata` + interceptor in new development instead of the `post_delete_domain_mapping` interceptor. + When both interceptors are used, this `post_delete_domain_mapping_with_metadata` interceptor runs after the + `post_delete_domain_mapping` interceptor. The (possibly modified) response returned by + `post_delete_domain_mapping` will be passed to + `post_delete_domain_mapping_with_metadata`. + """ + return response, metadata + def pre_get_domain_mapping( self, request: appengine.GetDomainMappingRequest, @@ -184,12 +230,35 @@ def post_get_domain_mapping( ) -> domain_mapping.DomainMapping: """Post-rpc interceptor for get_domain_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_domain_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DomainMappings server but before - it is returned to user code. + it is returned to user code. This `post_get_domain_mapping` interceptor runs + before the `post_get_domain_mapping_with_metadata` interceptor. """ return response + def post_get_domain_mapping_with_metadata( + self, + response: domain_mapping.DomainMapping, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domain_mapping.DomainMapping, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_domain_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DomainMappings server but before it is returned to user code. + + We recommend only using this `post_get_domain_mapping_with_metadata` + interceptor in new development instead of the `post_get_domain_mapping` interceptor. + When both interceptors are used, this `post_get_domain_mapping_with_metadata` interceptor runs after the + `post_get_domain_mapping` interceptor. The (possibly modified) response returned by + `post_get_domain_mapping` will be passed to + `post_get_domain_mapping_with_metadata`. + """ + return response, metadata + def pre_list_domain_mappings( self, request: appengine.ListDomainMappingsRequest, @@ -209,12 +278,37 @@ def post_list_domain_mappings( ) -> appengine.ListDomainMappingsResponse: """Post-rpc interceptor for list_domain_mappings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_domain_mappings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DomainMappings server but before - it is returned to user code. + it is returned to user code. This `post_list_domain_mappings` interceptor runs + before the `post_list_domain_mappings_with_metadata` interceptor. """ return response + def post_list_domain_mappings_with_metadata( + self, + response: appengine.ListDomainMappingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + appengine.ListDomainMappingsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_domain_mappings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DomainMappings server but before it is returned to user code. + + We recommend only using this `post_list_domain_mappings_with_metadata` + interceptor in new development instead of the `post_list_domain_mappings` interceptor. + When both interceptors are used, this `post_list_domain_mappings_with_metadata` interceptor runs after the + `post_list_domain_mappings` interceptor. The (possibly modified) response returned by + `post_list_domain_mappings` will be passed to + `post_list_domain_mappings_with_metadata`. + """ + return response, metadata + def pre_update_domain_mapping( self, request: appengine.UpdateDomainMappingRequest, @@ -234,12 +328,35 @@ def post_update_domain_mapping( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_domain_mapping - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_domain_mapping_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DomainMappings server but before - it is returned to user code. + it is returned to user code. This `post_update_domain_mapping` interceptor runs + before the `post_update_domain_mapping_with_metadata` interceptor. """ return response + def post_update_domain_mapping_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_domain_mapping + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DomainMappings server but before it is returned to user code. + + We recommend only using this `post_update_domain_mapping_with_metadata` + interceptor in new development instead of the `post_update_domain_mapping` interceptor. + When both interceptors are used, this `post_update_domain_mapping_with_metadata` interceptor runs after the + `post_update_domain_mapping` interceptor. The (possibly modified) response returned by + `post_update_domain_mapping` will be passed to + `post_update_domain_mapping_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DomainMappingsRestStub: @@ -496,6 +613,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_domain_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_domain_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -641,6 +762,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_domain_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_domain_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -785,6 +910,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_domain_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_domain_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -930,6 +1059,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_domain_mappings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_domain_mappings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1083,6 +1216,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_domain_mapping(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_domain_mapping_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py index 448017e11fff..0ab29153b93d 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -472,6 +474,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/transports/rest.py index c4fd9f51f6aa..a21bfa2fc4da 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/firewall/transports/rest.py @@ -139,12 +139,38 @@ def post_batch_update_ingress_rules( ) -> appengine.BatchUpdateIngressRulesResponse: """Post-rpc interceptor for batch_update_ingress_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_ingress_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewall server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_ingress_rules` interceptor runs + before the `post_batch_update_ingress_rules_with_metadata` interceptor. """ return response + def post_batch_update_ingress_rules_with_metadata( + self, + response: appengine.BatchUpdateIngressRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + appengine.BatchUpdateIngressRulesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_update_ingress_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewall server but before it is returned to user code. + + We recommend only using this `post_batch_update_ingress_rules_with_metadata` + interceptor in new development instead of the `post_batch_update_ingress_rules` interceptor. + When both interceptors are used, this `post_batch_update_ingress_rules_with_metadata` interceptor runs after the + `post_batch_update_ingress_rules` interceptor. The (possibly modified) response returned by + `post_batch_update_ingress_rules` will be passed to + `post_batch_update_ingress_rules_with_metadata`. + """ + return response, metadata + def pre_create_ingress_rule( self, request: appengine.CreateIngressRuleRequest, @@ -164,12 +190,35 @@ def post_create_ingress_rule( ) -> firewall.FirewallRule: """Post-rpc interceptor for create_ingress_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_ingress_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewall server but before - it is returned to user code. + it is returned to user code. This `post_create_ingress_rule` interceptor runs + before the `post_create_ingress_rule_with_metadata` interceptor. """ return response + def post_create_ingress_rule_with_metadata( + self, + response: firewall.FirewallRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firewall.FirewallRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_ingress_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewall server but before it is returned to user code. + + We recommend only using this `post_create_ingress_rule_with_metadata` + interceptor in new development instead of the `post_create_ingress_rule` interceptor. + When both interceptors are used, this `post_create_ingress_rule_with_metadata` interceptor runs after the + `post_create_ingress_rule` interceptor. The (possibly modified) response returned by + `post_create_ingress_rule` will be passed to + `post_create_ingress_rule_with_metadata`. + """ + return response, metadata + def pre_delete_ingress_rule( self, request: appengine.DeleteIngressRuleRequest, @@ -203,12 +252,35 @@ def post_get_ingress_rule( ) -> firewall.FirewallRule: """Post-rpc interceptor for get_ingress_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ingress_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewall server but before - it is returned to user code. + it is returned to user code. This `post_get_ingress_rule` interceptor runs + before the `post_get_ingress_rule_with_metadata` interceptor. """ return response + def post_get_ingress_rule_with_metadata( + self, + response: firewall.FirewallRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firewall.FirewallRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ingress_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewall server but before it is returned to user code. + + We recommend only using this `post_get_ingress_rule_with_metadata` + interceptor in new development instead of the `post_get_ingress_rule` interceptor. + When both interceptors are used, this `post_get_ingress_rule_with_metadata` interceptor runs after the + `post_get_ingress_rule` interceptor. The (possibly modified) response returned by + `post_get_ingress_rule` will be passed to + `post_get_ingress_rule_with_metadata`. + """ + return response, metadata + def pre_list_ingress_rules( self, request: appengine.ListIngressRulesRequest, @@ -228,12 +300,37 @@ def post_list_ingress_rules( ) -> appengine.ListIngressRulesResponse: """Post-rpc interceptor for list_ingress_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_ingress_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewall server but before - it is returned to user code. + it is returned to user code. This `post_list_ingress_rules` interceptor runs + before the `post_list_ingress_rules_with_metadata` interceptor. """ return response + def post_list_ingress_rules_with_metadata( + self, + response: appengine.ListIngressRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + appengine.ListIngressRulesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_ingress_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewall server but before it is returned to user code. + + We recommend only using this `post_list_ingress_rules_with_metadata` + interceptor in new development instead of the `post_list_ingress_rules` interceptor. + When both interceptors are used, this `post_list_ingress_rules_with_metadata` interceptor runs after the + `post_list_ingress_rules` interceptor. The (possibly modified) response returned by + `post_list_ingress_rules` will be passed to + `post_list_ingress_rules_with_metadata`. + """ + return response, metadata + def pre_update_ingress_rule( self, request: appengine.UpdateIngressRuleRequest, @@ -253,12 +350,35 @@ def post_update_ingress_rule( ) -> firewall.FirewallRule: """Post-rpc interceptor for update_ingress_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_ingress_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewall server but before - it is returned to user code. + it is returned to user code. This `post_update_ingress_rule` interceptor runs + before the `post_update_ingress_rule_with_metadata` interceptor. """ return response + def post_update_ingress_rule_with_metadata( + self, + response: firewall.FirewallRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[firewall.FirewallRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_ingress_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewall server but before it is returned to user code. + + We recommend only using this `post_update_ingress_rule_with_metadata` + interceptor in new development instead of the `post_update_ingress_rule` interceptor. + When both interceptors are used, this `post_update_ingress_rule_with_metadata` interceptor runs after the + `post_update_ingress_rule` interceptor. The (possibly modified) response returned by + `post_update_ingress_rule` will be passed to + `post_update_ingress_rule_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class FirewallRestStub: @@ -484,6 +604,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_ingress_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_ingress_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -638,6 +762,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_ingress_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_ingress_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -896,6 +1024,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ingress_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ingress_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1040,6 +1172,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_ingress_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_ingress_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1194,6 +1330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_ingress_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_ingress_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py index 518ad67b46a3..db07539db10a 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -490,6 +492,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/transports/rest.py index 5171380a9056..d6798dee3df3 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/instances/transports/rest.py @@ -124,12 +124,35 @@ def post_debug_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for debug_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_debug_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_debug_instance` interceptor runs + before the `post_debug_instance_with_metadata` interceptor. """ return response + def post_debug_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for debug_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_debug_instance_with_metadata` + interceptor in new development instead of the `post_debug_instance` interceptor. + When both interceptors are used, this `post_debug_instance_with_metadata` interceptor runs after the + `post_debug_instance` interceptor. The (possibly modified) response returned by + `post_debug_instance` will be passed to + `post_debug_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: appengine.DeleteInstanceRequest, @@ -149,12 +172,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: appengine.GetInstanceRequest, @@ -170,12 +216,35 @@ def pre_get_instance( def post_get_instance(self, response: instance.Instance) -> instance.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: instance.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[instance.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: appengine.ListInstancesRequest, @@ -193,12 +262,37 @@ def post_list_instances( ) -> appengine.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: appengine.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + appengine.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstancesRestStub: @@ -457,6 +551,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_debug_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_debug_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -602,6 +700,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -747,6 +849,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -891,6 +997,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py index ec46847cac94..6329e0cb666f 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/transports/rest.py index 6912599a409d..b47917bda3b6 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/services/transports/rest.py @@ -124,12 +124,35 @@ def post_delete_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Services server but before - it is returned to user code. + it is returned to user code. This `post_delete_service` interceptor runs + before the `post_delete_service_with_metadata` interceptor. """ return response + def post_delete_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Services server but before it is returned to user code. + + We recommend only using this `post_delete_service_with_metadata` + interceptor in new development instead of the `post_delete_service` interceptor. + When both interceptors are used, this `post_delete_service_with_metadata` interceptor runs after the + `post_delete_service` interceptor. The (possibly modified) response returned by + `post_delete_service` will be passed to + `post_delete_service_with_metadata`. + """ + return response, metadata + def pre_get_service( self, request: appengine.GetServiceRequest, @@ -145,12 +168,35 @@ def pre_get_service( def post_get_service(self, response: service.Service) -> service.Service: """Post-rpc interceptor for get_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Services server but before - it is returned to user code. + it is returned to user code. This `post_get_service` interceptor runs + before the `post_get_service_with_metadata` interceptor. """ return response + def post_get_service_with_metadata( + self, + response: service.Service, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Service, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Services server but before it is returned to user code. + + We recommend only using this `post_get_service_with_metadata` + interceptor in new development instead of the `post_get_service` interceptor. + When both interceptors are used, this `post_get_service_with_metadata` interceptor runs after the + `post_get_service` interceptor. The (possibly modified) response returned by + `post_get_service` will be passed to + `post_get_service_with_metadata`. + """ + return response, metadata + def pre_list_services( self, request: appengine.ListServicesRequest, @@ -168,12 +214,35 @@ def post_list_services( ) -> appengine.ListServicesResponse: """Post-rpc interceptor for list_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Services server but before - it is returned to user code. + it is returned to user code. This `post_list_services` interceptor runs + before the `post_list_services_with_metadata` interceptor. """ return response + def post_list_services_with_metadata( + self, + response: appengine.ListServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[appengine.ListServicesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Services server but before it is returned to user code. + + We recommend only using this `post_list_services_with_metadata` + interceptor in new development instead of the `post_list_services` interceptor. + When both interceptors are used, this `post_list_services_with_metadata` interceptor runs after the + `post_list_services` interceptor. The (possibly modified) response returned by + `post_list_services` will be passed to + `post_list_services_with_metadata`. + """ + return response, metadata + def pre_update_service( self, request: appengine.UpdateServiceRequest, @@ -191,12 +260,35 @@ def post_update_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Services server but before - it is returned to user code. + it is returned to user code. This `post_update_service` interceptor runs + before the `post_update_service_with_metadata` interceptor. """ return response + def post_update_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Services server but before it is returned to user code. + + We recommend only using this `post_update_service_with_metadata` + interceptor in new development instead of the `post_update_service` interceptor. + When both interceptors are used, this `post_update_service_with_metadata` interceptor runs after the + `post_update_service` interceptor. The (possibly modified) response returned by + `post_update_service` will be passed to + `post_update_service_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ServicesRestStub: @@ -447,6 +539,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -601,6 +697,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -743,6 +843,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -894,6 +998,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py index f94be581ee94..b1362d9aea5e 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -468,6 +470,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/transports/rest.py b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/transports/rest.py index 4c560816e40f..91d82041faa8 100644 --- a/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/transports/rest.py +++ b/packages/google-cloud-appengine-admin/google/cloud/appengine_admin_v1/services/versions/transports/rest.py @@ -132,12 +132,35 @@ def post_create_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_create_version` interceptor runs + before the `post_create_version_with_metadata` interceptor. """ return response + def post_create_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_create_version_with_metadata` + interceptor in new development instead of the `post_create_version` interceptor. + When both interceptors are used, this `post_create_version_with_metadata` interceptor runs after the + `post_create_version` interceptor. The (possibly modified) response returned by + `post_create_version` will be passed to + `post_create_version_with_metadata`. + """ + return response, metadata + def pre_delete_version( self, request: appengine.DeleteVersionRequest, @@ -155,12 +178,35 @@ def post_delete_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_delete_version` interceptor runs + before the `post_delete_version_with_metadata` interceptor. """ return response + def post_delete_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_delete_version_with_metadata` + interceptor in new development instead of the `post_delete_version` interceptor. + When both interceptors are used, this `post_delete_version_with_metadata` interceptor runs after the + `post_delete_version` interceptor. The (possibly modified) response returned by + `post_delete_version` will be passed to + `post_delete_version_with_metadata`. + """ + return response, metadata + def pre_get_version( self, request: appengine.GetVersionRequest, @@ -176,12 +222,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: appengine.ListVersionsRequest, @@ -199,12 +268,35 @@ def post_list_versions( ) -> appengine.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: appengine.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[appengine.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: appengine.UpdateVersionRequest, @@ -222,12 +314,35 @@ def post_update_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class VersionsRestStub: @@ -484,6 +599,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -629,6 +748,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -774,6 +897,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -916,6 +1043,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1067,6 +1198,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json index 91e976ba6059..8bb3f0d6498d 100644 --- a/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json +++ b/packages/google-cloud-appengine-admin/samples/generated_samples/snippet_metadata_google.appengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-appengine-admin", - "version": "1.13.0" + "version": "1.14.0" }, "snippets": [ { diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py index 1525ff0d0f9f..9d770acc2966 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_applications.py @@ -72,6 +72,13 @@ from google.cloud.appengine_admin_v1.types import appengine, application from google.cloud.appengine_admin_v1.types import operation as ga_operation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -315,6 +322,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ApplicationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ApplicationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2789,10 +2839,13 @@ def test_get_application_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ApplicationsRestInterceptor, "post_get_application" ) as post, mock.patch.object( + transports.ApplicationsRestInterceptor, "post_get_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApplicationsRestInterceptor, "pre_get_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetApplicationRequest.pb( appengine.GetApplicationRequest() ) @@ -2816,6 +2869,7 @@ def test_get_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = application.Application() + post_with_metadata.return_value = application.Application(), metadata client.get_application( request, @@ -2827,6 +2881,7 @@ def test_get_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_application_rest_bad_request( @@ -3001,10 +3056,13 @@ def test_create_application_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApplicationsRestInterceptor, "post_create_application" ) as post, mock.patch.object( + transports.ApplicationsRestInterceptor, "post_create_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApplicationsRestInterceptor, "pre_create_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.CreateApplicationRequest.pb( appengine.CreateApplicationRequest() ) @@ -3028,6 +3086,7 @@ def test_create_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_application( request, @@ -3039,6 +3098,7 @@ def test_create_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_application_rest_bad_request( @@ -3213,10 +3273,13 @@ def test_update_application_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApplicationsRestInterceptor, "post_update_application" ) as post, mock.patch.object( + transports.ApplicationsRestInterceptor, "post_update_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApplicationsRestInterceptor, "pre_update_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.UpdateApplicationRequest.pb( appengine.UpdateApplicationRequest() ) @@ -3240,6 +3303,7 @@ def test_update_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_application( request, @@ -3251,6 +3315,7 @@ def test_update_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_repair_application_rest_bad_request( @@ -3331,10 +3396,13 @@ def test_repair_application_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ApplicationsRestInterceptor, "post_repair_application" ) as post, mock.patch.object( + transports.ApplicationsRestInterceptor, "post_repair_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ApplicationsRestInterceptor, "pre_repair_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.RepairApplicationRequest.pb( appengine.RepairApplicationRequest() ) @@ -3358,6 +3426,7 @@ def test_repair_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.repair_application( request, @@ -3369,6 +3438,7 @@ def test_repair_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py index 8e695cca905e..b0f78a77fa36 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_certificates.py @@ -62,6 +62,13 @@ ) from google.cloud.appengine_admin_v1.types import appengine, certificate +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -337,6 +344,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AuthorizedCertificatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AuthorizedCertificatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3418,11 +3468,15 @@ def test_list_authorized_certificates_rest_interceptors(null_interceptor): transports.AuthorizedCertificatesRestInterceptor, "post_list_authorized_certificates", ) as post, mock.patch.object( + transports.AuthorizedCertificatesRestInterceptor, + "post_list_authorized_certificates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AuthorizedCertificatesRestInterceptor, "pre_list_authorized_certificates", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListAuthorizedCertificatesRequest.pb( appengine.ListAuthorizedCertificatesRequest() ) @@ -3448,6 +3502,10 @@ def test_list_authorized_certificates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListAuthorizedCertificatesResponse() + post_with_metadata.return_value = ( + appengine.ListAuthorizedCertificatesResponse(), + metadata, + ) client.list_authorized_certificates( request, @@ -3459,6 +3517,7 @@ def test_list_authorized_certificates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_authorized_certificate_rest_bad_request( @@ -3554,11 +3613,15 @@ def test_get_authorized_certificate_rest_interceptors(null_interceptor): transports.AuthorizedCertificatesRestInterceptor, "post_get_authorized_certificate", ) as post, mock.patch.object( + transports.AuthorizedCertificatesRestInterceptor, + "post_get_authorized_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AuthorizedCertificatesRestInterceptor, "pre_get_authorized_certificate", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetAuthorizedCertificateRequest.pb( appengine.GetAuthorizedCertificateRequest() ) @@ -3584,6 +3647,7 @@ def test_get_authorized_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate.AuthorizedCertificate() + post_with_metadata.return_value = certificate.AuthorizedCertificate(), metadata client.get_authorized_certificate( request, @@ -3595,6 +3659,7 @@ def test_get_authorized_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_authorized_certificate_rest_bad_request( @@ -3774,11 +3839,15 @@ def test_create_authorized_certificate_rest_interceptors(null_interceptor): transports.AuthorizedCertificatesRestInterceptor, "post_create_authorized_certificate", ) as post, mock.patch.object( + transports.AuthorizedCertificatesRestInterceptor, + "post_create_authorized_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AuthorizedCertificatesRestInterceptor, "pre_create_authorized_certificate", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.CreateAuthorizedCertificateRequest.pb( appengine.CreateAuthorizedCertificateRequest() ) @@ -3804,6 +3873,7 @@ def test_create_authorized_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate.AuthorizedCertificate() + post_with_metadata.return_value = certificate.AuthorizedCertificate(), metadata client.create_authorized_certificate( request, @@ -3815,6 +3885,7 @@ def test_create_authorized_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_authorized_certificate_rest_bad_request( @@ -3994,11 +4065,15 @@ def test_update_authorized_certificate_rest_interceptors(null_interceptor): transports.AuthorizedCertificatesRestInterceptor, "post_update_authorized_certificate", ) as post, mock.patch.object( + transports.AuthorizedCertificatesRestInterceptor, + "post_update_authorized_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AuthorizedCertificatesRestInterceptor, "pre_update_authorized_certificate", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.UpdateAuthorizedCertificateRequest.pb( appengine.UpdateAuthorizedCertificateRequest() ) @@ -4024,6 +4099,7 @@ def test_update_authorized_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate.AuthorizedCertificate() + post_with_metadata.return_value = certificate.AuthorizedCertificate(), metadata client.update_authorized_certificate( request, @@ -4035,6 +4111,7 @@ def test_update_authorized_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_authorized_certificate_rest_bad_request( diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py index 133b383b497a..1b7cf0233f44 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_authorized_domains.py @@ -60,6 +60,13 @@ ) from google.cloud.appengine_admin_v1.types import appengine, domain +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -327,6 +334,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AuthorizedDomainsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AuthorizedDomainsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1927,10 +1977,14 @@ def test_list_authorized_domains_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AuthorizedDomainsRestInterceptor, "post_list_authorized_domains" ) as post, mock.patch.object( + transports.AuthorizedDomainsRestInterceptor, + "post_list_authorized_domains_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AuthorizedDomainsRestInterceptor, "pre_list_authorized_domains" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListAuthorizedDomainsRequest.pb( appengine.ListAuthorizedDomainsRequest() ) @@ -1956,6 +2010,10 @@ def test_list_authorized_domains_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListAuthorizedDomainsResponse() + post_with_metadata.return_value = ( + appengine.ListAuthorizedDomainsResponse(), + metadata, + ) client.list_authorized_domains( request, @@ -1967,6 +2025,7 @@ def test_list_authorized_domains_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py index f870a31de214..e5f8ba8b3f11 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_domain_mappings.py @@ -73,6 +73,13 @@ from google.cloud.appengine_admin_v1.types import appengine, domain_mapping from google.cloud.appengine_admin_v1.types import operation as ga_operation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DomainMappingsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DomainMappingsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3317,10 +3367,14 @@ def test_list_domain_mappings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainMappingsRestInterceptor, "post_list_domain_mappings" ) as post, mock.patch.object( + transports.DomainMappingsRestInterceptor, + "post_list_domain_mappings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainMappingsRestInterceptor, "pre_list_domain_mappings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListDomainMappingsRequest.pb( appengine.ListDomainMappingsRequest() ) @@ -3346,6 +3400,10 @@ def test_list_domain_mappings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListDomainMappingsResponse() + post_with_metadata.return_value = ( + appengine.ListDomainMappingsResponse(), + metadata, + ) client.list_domain_mappings( request, @@ -3357,6 +3415,7 @@ def test_list_domain_mappings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_domain_mapping_rest_bad_request( @@ -3443,10 +3502,14 @@ def test_get_domain_mapping_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainMappingsRestInterceptor, "post_get_domain_mapping" ) as post, mock.patch.object( + transports.DomainMappingsRestInterceptor, + "post_get_domain_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainMappingsRestInterceptor, "pre_get_domain_mapping" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetDomainMappingRequest.pb( appengine.GetDomainMappingRequest() ) @@ -3472,6 +3535,7 @@ def test_get_domain_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domain_mapping.DomainMapping() + post_with_metadata.return_value = domain_mapping.DomainMapping(), metadata client.get_domain_mapping( request, @@ -3483,6 +3547,7 @@ def test_get_domain_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_domain_mapping_rest_bad_request( @@ -3642,10 +3707,14 @@ def test_create_domain_mapping_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainMappingsRestInterceptor, "post_create_domain_mapping" ) as post, mock.patch.object( + transports.DomainMappingsRestInterceptor, + "post_create_domain_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainMappingsRestInterceptor, "pre_create_domain_mapping" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.CreateDomainMappingRequest.pb( appengine.CreateDomainMappingRequest() ) @@ -3669,6 +3738,7 @@ def test_create_domain_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_domain_mapping( request, @@ -3680,6 +3750,7 @@ def test_create_domain_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_domain_mapping_rest_bad_request( @@ -3839,10 +3910,14 @@ def test_update_domain_mapping_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainMappingsRestInterceptor, "post_update_domain_mapping" ) as post, mock.patch.object( + transports.DomainMappingsRestInterceptor, + "post_update_domain_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainMappingsRestInterceptor, "pre_update_domain_mapping" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.UpdateDomainMappingRequest.pb( appengine.UpdateDomainMappingRequest() ) @@ -3866,6 +3941,7 @@ def test_update_domain_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_domain_mapping( request, @@ -3877,6 +3953,7 @@ def test_update_domain_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_domain_mapping_rest_bad_request( @@ -3957,10 +4034,14 @@ def test_delete_domain_mapping_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainMappingsRestInterceptor, "post_delete_domain_mapping" ) as post, mock.patch.object( + transports.DomainMappingsRestInterceptor, + "post_delete_domain_mapping_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainMappingsRestInterceptor, "pre_delete_domain_mapping" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.DeleteDomainMappingRequest.pb( appengine.DeleteDomainMappingRequest() ) @@ -3984,6 +4065,7 @@ def test_delete_domain_mapping_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_domain_mapping( request, @@ -3995,6 +4077,7 @@ def test_delete_domain_mapping_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py index e2c8f934a630..0c913b40a62b 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_firewall.py @@ -61,6 +61,13 @@ ) from google.cloud.appengine_admin_v1.types import appengine, firewall +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -291,6 +298,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FirewallClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FirewallClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3575,10 +3625,13 @@ def test_list_ingress_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallRestInterceptor, "post_list_ingress_rules" ) as post, mock.patch.object( + transports.FirewallRestInterceptor, "post_list_ingress_rules_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallRestInterceptor, "pre_list_ingress_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListIngressRulesRequest.pb( appengine.ListIngressRulesRequest() ) @@ -3604,6 +3657,7 @@ def test_list_ingress_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListIngressRulesResponse() + post_with_metadata.return_value = appengine.ListIngressRulesResponse(), metadata client.list_ingress_rules( request, @@ -3615,6 +3669,7 @@ def test_list_ingress_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_ingress_rules_rest_bad_request( @@ -3694,10 +3749,14 @@ def test_batch_update_ingress_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallRestInterceptor, "post_batch_update_ingress_rules" ) as post, mock.patch.object( + transports.FirewallRestInterceptor, + "post_batch_update_ingress_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirewallRestInterceptor, "pre_batch_update_ingress_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.BatchUpdateIngressRulesRequest.pb( appengine.BatchUpdateIngressRulesRequest() ) @@ -3723,6 +3782,10 @@ def test_batch_update_ingress_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.BatchUpdateIngressRulesResponse() + post_with_metadata.return_value = ( + appengine.BatchUpdateIngressRulesResponse(), + metadata, + ) client.batch_update_ingress_rules( request, @@ -3734,6 +3797,7 @@ def test_batch_update_ingress_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_ingress_rule_rest_bad_request( @@ -3895,10 +3959,13 @@ def test_create_ingress_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallRestInterceptor, "post_create_ingress_rule" ) as post, mock.patch.object( + transports.FirewallRestInterceptor, "post_create_ingress_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallRestInterceptor, "pre_create_ingress_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.CreateIngressRuleRequest.pb( appengine.CreateIngressRuleRequest() ) @@ -3922,6 +3989,7 @@ def test_create_ingress_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firewall.FirewallRule() + post_with_metadata.return_value = firewall.FirewallRule(), metadata client.create_ingress_rule( request, @@ -3933,6 +4001,7 @@ def test_create_ingress_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_ingress_rule_rest_bad_request( @@ -4021,10 +4090,13 @@ def test_get_ingress_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallRestInterceptor, "post_get_ingress_rule" ) as post, mock.patch.object( + transports.FirewallRestInterceptor, "post_get_ingress_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallRestInterceptor, "pre_get_ingress_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetIngressRuleRequest.pb( appengine.GetIngressRuleRequest() ) @@ -4048,6 +4120,7 @@ def test_get_ingress_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firewall.FirewallRule() + post_with_metadata.return_value = firewall.FirewallRule(), metadata client.get_ingress_rule( request, @@ -4059,6 +4132,7 @@ def test_get_ingress_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_ingress_rule_rest_bad_request( @@ -4220,10 +4294,13 @@ def test_update_ingress_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallRestInterceptor, "post_update_ingress_rule" ) as post, mock.patch.object( + transports.FirewallRestInterceptor, "post_update_ingress_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallRestInterceptor, "pre_update_ingress_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.UpdateIngressRuleRequest.pb( appengine.UpdateIngressRuleRequest() ) @@ -4247,6 +4324,7 @@ def test_update_ingress_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = firewall.FirewallRule() + post_with_metadata.return_value = firewall.FirewallRule(), metadata client.update_ingress_rule( request, @@ -4258,6 +4336,7 @@ def test_update_ingress_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_ingress_rule_rest_bad_request( diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py index 152bd013e85f..8eec63285926 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_instances.py @@ -73,6 +73,13 @@ from google.cloud.appengine_admin_v1.types import appengine, instance from google.cloud.appengine_admin_v1.types import operation as ga_operation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -306,6 +313,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2881,10 +2931,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListInstancesRequest.pb(appengine.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -2908,6 +2961,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListInstancesResponse() + post_with_metadata.return_value = appengine.ListInstancesResponse(), metadata client.list_instances( request, @@ -2919,6 +2973,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=appengine.GetInstanceRequest): @@ -3033,10 +3088,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetInstanceRequest.pb(appengine.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -3058,6 +3116,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = instance.Instance() + post_with_metadata.return_value = instance.Instance(), metadata client.get_instance( request, @@ -3069,6 +3128,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request(request_type=appengine.DeleteInstanceRequest): @@ -3149,10 +3209,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.InstancesRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.DeleteInstanceRequest.pb( appengine.DeleteInstanceRequest() ) @@ -3176,6 +3239,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -3187,6 +3251,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_debug_instance_rest_bad_request(request_type=appengine.DebugInstanceRequest): @@ -3267,10 +3332,13 @@ def test_debug_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.InstancesRestInterceptor, "post_debug_instance" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_debug_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_debug_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.DebugInstanceRequest.pb(appengine.DebugInstanceRequest()) transcode.return_value = { "method": "post", @@ -3292,6 +3360,7 @@ def test_debug_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.debug_instance( request, @@ -3303,6 +3372,7 @@ def test_debug_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py index 46a64bd956af..8dcdcf7fdebe 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_services.py @@ -74,6 +74,13 @@ from google.cloud.appengine_admin_v1.types import operation as ga_operation from google.cloud.appengine_admin_v1.types import service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2799,10 +2849,13 @@ def test_list_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServicesRestInterceptor, "post_list_services" ) as post, mock.patch.object( + transports.ServicesRestInterceptor, "post_list_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServicesRestInterceptor, "pre_list_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListServicesRequest.pb(appengine.ListServicesRequest()) transcode.return_value = { "method": "post", @@ -2826,6 +2879,7 @@ def test_list_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListServicesResponse() + post_with_metadata.return_value = appengine.ListServicesResponse(), metadata client.list_services( request, @@ -2837,6 +2891,7 @@ def test_list_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_service_rest_bad_request(request_type=appengine.GetServiceRequest): @@ -2919,10 +2974,13 @@ def test_get_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServicesRestInterceptor, "post_get_service" ) as post, mock.patch.object( + transports.ServicesRestInterceptor, "post_get_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServicesRestInterceptor, "pre_get_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetServiceRequest.pb(appengine.GetServiceRequest()) transcode.return_value = { "method": "post", @@ -2944,6 +3002,7 @@ def test_get_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Service() + post_with_metadata.return_value = service.Service(), metadata client.get_service( request, @@ -2955,6 +3014,7 @@ def test_get_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_service_rest_bad_request(request_type=appengine.UpdateServiceRequest): @@ -3105,10 +3165,13 @@ def test_update_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ServicesRestInterceptor, "post_update_service" ) as post, mock.patch.object( + transports.ServicesRestInterceptor, "post_update_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServicesRestInterceptor, "pre_update_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.UpdateServiceRequest.pb(appengine.UpdateServiceRequest()) transcode.return_value = { "method": "post", @@ -3130,6 +3193,7 @@ def test_update_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_service( request, @@ -3141,6 +3205,7 @@ def test_update_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_service_rest_bad_request(request_type=appengine.DeleteServiceRequest): @@ -3217,10 +3282,13 @@ def test_delete_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ServicesRestInterceptor, "post_delete_service" ) as post, mock.patch.object( + transports.ServicesRestInterceptor, "post_delete_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServicesRestInterceptor, "pre_delete_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.DeleteServiceRequest.pb(appengine.DeleteServiceRequest()) transcode.return_value = { "method": "post", @@ -3242,6 +3310,7 @@ def test_delete_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_service( request, @@ -3253,6 +3322,7 @@ def test_delete_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py index 0fa06d1169d6..6ee7a1591067 100644 --- a/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py +++ b/packages/google-cloud-appengine-admin/tests/unit/gapic/appengine_admin_v1/test_versions.py @@ -76,6 +76,13 @@ from google.cloud.appengine_admin_v1.types import operation as ga_operation from google.cloud.appengine_admin_v1.types import version +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -306,6 +313,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3221,10 +3271,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.ListVersionsRequest.pb(appengine.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -3248,6 +3301,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = appengine.ListVersionsResponse() + post_with_metadata.return_value = appengine.ListVersionsResponse(), metadata client.list_versions( request, @@ -3259,6 +3313,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=appengine.GetVersionRequest): @@ -3377,10 +3432,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.GetVersionRequest.pb(appengine.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -3402,6 +3460,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -3413,6 +3472,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_version_rest_bad_request(request_type=appengine.CreateVersionRequest): @@ -3719,10 +3779,13 @@ def test_create_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_create_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_create_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_create_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.CreateVersionRequest.pb(appengine.CreateVersionRequest()) transcode.return_value = { "method": "post", @@ -3744,6 +3807,7 @@ def test_create_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_version( request, @@ -3755,6 +3819,7 @@ def test_create_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request(request_type=appengine.UpdateVersionRequest): @@ -4061,10 +4126,13 @@ def test_update_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.UpdateVersionRequest.pb(appengine.UpdateVersionRequest()) transcode.return_value = { "method": "post", @@ -4086,6 +4154,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_version( request, @@ -4097,6 +4166,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=appengine.DeleteVersionRequest): @@ -4173,10 +4243,13 @@ def test_delete_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_delete_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_delete_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_delete_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = appengine.DeleteVersionRequest.pb(appengine.DeleteVersionRequest()) transcode.return_value = { "method": "post", @@ -4198,6 +4271,7 @@ def test_delete_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_version( request, @@ -4209,6 +4283,7 @@ def test_delete_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-appengine-logging/CHANGELOG.md b/packages/google-cloud-appengine-logging/CHANGELOG.md index 84cc20dad045..bff434c03597 100644 --- a/packages/google-cloud-appengine-logging/CHANGELOG.md +++ b/packages/google-cloud-appengine-logging/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-appengine-logging-v1.5.0...google-cloud-appengine-logging-v1.6.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [1.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-appengine-logging-v1.4.5...google-cloud-appengine-logging-v1.5.0) (2024-10-24) diff --git a/packages/google-cloud-appengine-logging/README.rst b/packages/google-cloud-appengine-logging/README.rst index 7be5f375b895..2ee14eb44584 100644 --- a/packages/google-cloud-appengine-logging/README.rst +++ b/packages/google-cloud-appengine-logging/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the App Engine Logging Protos.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the App Engine Logging Protos.: https://cloud.google.com/logging/docs/reference/v2/rpc/google.appengine.logging.v1 -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py index de27578dd493..186dbb3596a5 100644 --- a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py +++ b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py index de27578dd493..186dbb3596a5 100644 --- a/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py +++ b/packages/google-cloud-appengine-logging/google/cloud/appengine_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-appengine-logging/noxfile.py b/packages/google-cloud-appengine-logging/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-appengine-logging/noxfile.py +++ b/packages/google-cloud-appengine-logging/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-apphub/CHANGELOG.md b/packages/google-cloud-apphub/CHANGELOG.md index 6aeaae54915e..d60e8728a9b4 100644 --- a/packages/google-cloud-apphub/CHANGELOG.md +++ b/packages/google-cloud-apphub/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apphub-v0.1.5...google-cloud-apphub-v0.1.6) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.1.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-apphub-v0.1.4...google-cloud-apphub-v0.1.5) (2024-12-12) diff --git a/packages/google-cloud-apphub/README.rst b/packages/google-cloud-apphub/README.rst index b9be0e332436..5c861c95c098 100644 --- a/packages/google-cloud-apphub/README.rst +++ b/packages/google-cloud-apphub/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the App Hub API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the App Hub API.: https://cloud.google.com/app-hub/docs/overview -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py index e9c4bb5650f3..51d2795b9d6b 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.5" # {x-release-please-version} +__version__ = "0.1.6" # {x-release-please-version} diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py index bf107f464860..fae9c9b94be5 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -619,6 +621,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4291,16 +4320,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4346,16 +4379,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4578,16 +4615,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -4700,16 +4741,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -4760,16 +4805,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -4815,16 +4864,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4870,16 +4923,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/transports/rest.py b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/transports/rest.py index 1edd42cae957..e05a14801d28 100644 --- a/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/transports/rest.py +++ b/packages/google-cloud-apphub/google/cloud/apphub_v1/services/app_hub/transports/rest.py @@ -319,12 +319,35 @@ def post_create_application( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_create_application` interceptor runs + before the `post_create_application_with_metadata` interceptor. """ return response + def post_create_application_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_create_application_with_metadata` + interceptor in new development instead of the `post_create_application` interceptor. + When both interceptors are used, this `post_create_application_with_metadata` interceptor runs after the + `post_create_application` interceptor. The (possibly modified) response returned by + `post_create_application` will be passed to + `post_create_application_with_metadata`. + """ + return response, metadata + def pre_create_service( self, request: apphub_service.CreateServiceRequest, @@ -344,12 +367,35 @@ def post_create_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_create_service` interceptor runs + before the `post_create_service_with_metadata` interceptor. """ return response + def post_create_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_create_service_with_metadata` + interceptor in new development instead of the `post_create_service` interceptor. + When both interceptors are used, this `post_create_service_with_metadata` interceptor runs after the + `post_create_service` interceptor. The (possibly modified) response returned by + `post_create_service` will be passed to + `post_create_service_with_metadata`. + """ + return response, metadata + def pre_create_service_project_attachment( self, request: apphub_service.CreateServiceProjectAttachmentRequest, @@ -370,12 +416,35 @@ def post_create_service_project_attachment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_service_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_service_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_create_service_project_attachment` interceptor runs + before the `post_create_service_project_attachment_with_metadata` interceptor. """ return response + def post_create_service_project_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_service_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_create_service_project_attachment_with_metadata` + interceptor in new development instead of the `post_create_service_project_attachment` interceptor. + When both interceptors are used, this `post_create_service_project_attachment_with_metadata` interceptor runs after the + `post_create_service_project_attachment` interceptor. The (possibly modified) response returned by + `post_create_service_project_attachment` will be passed to + `post_create_service_project_attachment_with_metadata`. + """ + return response, metadata + def pre_create_workload( self, request: apphub_service.CreateWorkloadRequest, @@ -395,12 +464,35 @@ def post_create_workload( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_create_workload` interceptor runs + before the `post_create_workload_with_metadata` interceptor. """ return response + def post_create_workload_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_create_workload_with_metadata` + interceptor in new development instead of the `post_create_workload` interceptor. + When both interceptors are used, this `post_create_workload_with_metadata` interceptor runs after the + `post_create_workload` interceptor. The (possibly modified) response returned by + `post_create_workload` will be passed to + `post_create_workload_with_metadata`. + """ + return response, metadata + def pre_delete_application( self, request: apphub_service.DeleteApplicationRequest, @@ -420,12 +512,35 @@ def post_delete_application( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_delete_application` interceptor runs + before the `post_delete_application_with_metadata` interceptor. """ return response + def post_delete_application_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_delete_application_with_metadata` + interceptor in new development instead of the `post_delete_application` interceptor. + When both interceptors are used, this `post_delete_application_with_metadata` interceptor runs after the + `post_delete_application` interceptor. The (possibly modified) response returned by + `post_delete_application` will be passed to + `post_delete_application_with_metadata`. + """ + return response, metadata + def pre_delete_service( self, request: apphub_service.DeleteServiceRequest, @@ -445,12 +560,35 @@ def post_delete_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_delete_service` interceptor runs + before the `post_delete_service_with_metadata` interceptor. """ return response + def post_delete_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_delete_service_with_metadata` + interceptor in new development instead of the `post_delete_service` interceptor. + When both interceptors are used, this `post_delete_service_with_metadata` interceptor runs after the + `post_delete_service` interceptor. The (possibly modified) response returned by + `post_delete_service` will be passed to + `post_delete_service_with_metadata`. + """ + return response, metadata + def pre_delete_service_project_attachment( self, request: apphub_service.DeleteServiceProjectAttachmentRequest, @@ -471,12 +609,35 @@ def post_delete_service_project_attachment( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_service_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_service_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_delete_service_project_attachment` interceptor runs + before the `post_delete_service_project_attachment_with_metadata` interceptor. """ return response + def post_delete_service_project_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_service_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_delete_service_project_attachment_with_metadata` + interceptor in new development instead of the `post_delete_service_project_attachment` interceptor. + When both interceptors are used, this `post_delete_service_project_attachment_with_metadata` interceptor runs after the + `post_delete_service_project_attachment` interceptor. The (possibly modified) response returned by + `post_delete_service_project_attachment` will be passed to + `post_delete_service_project_attachment_with_metadata`. + """ + return response, metadata + def pre_delete_workload( self, request: apphub_service.DeleteWorkloadRequest, @@ -496,12 +657,35 @@ def post_delete_workload( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_delete_workload` interceptor runs + before the `post_delete_workload_with_metadata` interceptor. """ return response + def post_delete_workload_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_delete_workload_with_metadata` + interceptor in new development instead of the `post_delete_workload` interceptor. + When both interceptors are used, this `post_delete_workload_with_metadata` interceptor runs after the + `post_delete_workload` interceptor. The (possibly modified) response returned by + `post_delete_workload` will be passed to + `post_delete_workload_with_metadata`. + """ + return response, metadata + def pre_detach_service_project_attachment( self, request: apphub_service.DetachServiceProjectAttachmentRequest, @@ -522,12 +706,38 @@ def post_detach_service_project_attachment( ) -> apphub_service.DetachServiceProjectAttachmentResponse: """Post-rpc interceptor for detach_service_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_service_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_detach_service_project_attachment` interceptor runs + before the `post_detach_service_project_attachment_with_metadata` interceptor. """ return response + def post_detach_service_project_attachment_with_metadata( + self, + response: apphub_service.DetachServiceProjectAttachmentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.DetachServiceProjectAttachmentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for detach_service_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_detach_service_project_attachment_with_metadata` + interceptor in new development instead of the `post_detach_service_project_attachment` interceptor. + When both interceptors are used, this `post_detach_service_project_attachment_with_metadata` interceptor runs after the + `post_detach_service_project_attachment` interceptor. The (possibly modified) response returned by + `post_detach_service_project_attachment` will be passed to + `post_detach_service_project_attachment_with_metadata`. + """ + return response, metadata + def pre_get_application( self, request: apphub_service.GetApplicationRequest, @@ -547,12 +757,35 @@ def post_get_application( ) -> application.Application: """Post-rpc interceptor for get_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_get_application` interceptor runs + before the `post_get_application_with_metadata` interceptor. """ return response + def post_get_application_with_metadata( + self, + response: application.Application, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[application.Application, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_get_application_with_metadata` + interceptor in new development instead of the `post_get_application` interceptor. + When both interceptors are used, this `post_get_application_with_metadata` interceptor runs after the + `post_get_application` interceptor. The (possibly modified) response returned by + `post_get_application` will be passed to + `post_get_application_with_metadata`. + """ + return response, metadata + def pre_get_discovered_service( self, request: apphub_service.GetDiscoveredServiceRequest, @@ -573,12 +806,35 @@ def post_get_discovered_service( ) -> service.DiscoveredService: """Post-rpc interceptor for get_discovered_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_discovered_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_get_discovered_service` interceptor runs + before the `post_get_discovered_service_with_metadata` interceptor. """ return response + def post_get_discovered_service_with_metadata( + self, + response: service.DiscoveredService, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.DiscoveredService, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_discovered_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_get_discovered_service_with_metadata` + interceptor in new development instead of the `post_get_discovered_service` interceptor. + When both interceptors are used, this `post_get_discovered_service_with_metadata` interceptor runs after the + `post_get_discovered_service` interceptor. The (possibly modified) response returned by + `post_get_discovered_service` will be passed to + `post_get_discovered_service_with_metadata`. + """ + return response, metadata + def pre_get_discovered_workload( self, request: apphub_service.GetDiscoveredWorkloadRequest, @@ -599,12 +855,35 @@ def post_get_discovered_workload( ) -> workload.DiscoveredWorkload: """Post-rpc interceptor for get_discovered_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_discovered_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_get_discovered_workload` interceptor runs + before the `post_get_discovered_workload_with_metadata` interceptor. """ return response + def post_get_discovered_workload_with_metadata( + self, + response: workload.DiscoveredWorkload, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[workload.DiscoveredWorkload, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_discovered_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_get_discovered_workload_with_metadata` + interceptor in new development instead of the `post_get_discovered_workload` interceptor. + When both interceptors are used, this `post_get_discovered_workload_with_metadata` interceptor runs after the + `post_get_discovered_workload` interceptor. The (possibly modified) response returned by + `post_get_discovered_workload` will be passed to + `post_get_discovered_workload_with_metadata`. + """ + return response, metadata + def pre_get_service( self, request: apphub_service.GetServiceRequest, @@ -622,12 +901,35 @@ def pre_get_service( def post_get_service(self, response: service.Service) -> service.Service: """Post-rpc interceptor for get_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_get_service` interceptor runs + before the `post_get_service_with_metadata` interceptor. """ return response + def post_get_service_with_metadata( + self, + response: service.Service, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Service, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_get_service_with_metadata` + interceptor in new development instead of the `post_get_service` interceptor. + When both interceptors are used, this `post_get_service_with_metadata` interceptor runs after the + `post_get_service` interceptor. The (possibly modified) response returned by + `post_get_service` will be passed to + `post_get_service_with_metadata`. + """ + return response, metadata + def pre_get_service_project_attachment( self, request: apphub_service.GetServiceProjectAttachmentRequest, @@ -648,12 +950,38 @@ def post_get_service_project_attachment( ) -> service_project_attachment.ServiceProjectAttachment: """Post-rpc interceptor for get_service_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_service_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_get_service_project_attachment` interceptor runs + before the `post_get_service_project_attachment_with_metadata` interceptor. """ return response + def post_get_service_project_attachment_with_metadata( + self, + response: service_project_attachment.ServiceProjectAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service_project_attachment.ServiceProjectAttachment, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_service_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_get_service_project_attachment_with_metadata` + interceptor in new development instead of the `post_get_service_project_attachment` interceptor. + When both interceptors are used, this `post_get_service_project_attachment_with_metadata` interceptor runs after the + `post_get_service_project_attachment` interceptor. The (possibly modified) response returned by + `post_get_service_project_attachment` will be passed to + `post_get_service_project_attachment_with_metadata`. + """ + return response, metadata + def pre_get_workload( self, request: apphub_service.GetWorkloadRequest, @@ -671,12 +999,35 @@ def pre_get_workload( def post_get_workload(self, response: workload.Workload) -> workload.Workload: """Post-rpc interceptor for get_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_get_workload` interceptor runs + before the `post_get_workload_with_metadata` interceptor. """ return response + def post_get_workload_with_metadata( + self, + response: workload.Workload, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[workload.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_get_workload_with_metadata` + interceptor in new development instead of the `post_get_workload` interceptor. + When both interceptors are used, this `post_get_workload_with_metadata` interceptor runs after the + `post_get_workload` interceptor. The (possibly modified) response returned by + `post_get_workload` will be passed to + `post_get_workload_with_metadata`. + """ + return response, metadata + def pre_list_applications( self, request: apphub_service.ListApplicationsRequest, @@ -696,12 +1047,37 @@ def post_list_applications( ) -> apphub_service.ListApplicationsResponse: """Post-rpc interceptor for list_applications - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_applications_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_list_applications` interceptor runs + before the `post_list_applications_with_metadata` interceptor. """ return response + def post_list_applications_with_metadata( + self, + response: apphub_service.ListApplicationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.ListApplicationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_applications + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_list_applications_with_metadata` + interceptor in new development instead of the `post_list_applications` interceptor. + When both interceptors are used, this `post_list_applications_with_metadata` interceptor runs after the + `post_list_applications` interceptor. The (possibly modified) response returned by + `post_list_applications` will be passed to + `post_list_applications_with_metadata`. + """ + return response, metadata + def pre_list_discovered_services( self, request: apphub_service.ListDiscoveredServicesRequest, @@ -722,12 +1098,38 @@ def post_list_discovered_services( ) -> apphub_service.ListDiscoveredServicesResponse: """Post-rpc interceptor for list_discovered_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_discovered_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_list_discovered_services` interceptor runs + before the `post_list_discovered_services_with_metadata` interceptor. """ return response + def post_list_discovered_services_with_metadata( + self, + response: apphub_service.ListDiscoveredServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.ListDiscoveredServicesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_discovered_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_list_discovered_services_with_metadata` + interceptor in new development instead of the `post_list_discovered_services` interceptor. + When both interceptors are used, this `post_list_discovered_services_with_metadata` interceptor runs after the + `post_list_discovered_services` interceptor. The (possibly modified) response returned by + `post_list_discovered_services` will be passed to + `post_list_discovered_services_with_metadata`. + """ + return response, metadata + def pre_list_discovered_workloads( self, request: apphub_service.ListDiscoveredWorkloadsRequest, @@ -748,12 +1150,38 @@ def post_list_discovered_workloads( ) -> apphub_service.ListDiscoveredWorkloadsResponse: """Post-rpc interceptor for list_discovered_workloads - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_discovered_workloads_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_list_discovered_workloads` interceptor runs + before the `post_list_discovered_workloads_with_metadata` interceptor. """ return response + def post_list_discovered_workloads_with_metadata( + self, + response: apphub_service.ListDiscoveredWorkloadsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.ListDiscoveredWorkloadsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_discovered_workloads + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_list_discovered_workloads_with_metadata` + interceptor in new development instead of the `post_list_discovered_workloads` interceptor. + When both interceptors are used, this `post_list_discovered_workloads_with_metadata` interceptor runs after the + `post_list_discovered_workloads` interceptor. The (possibly modified) response returned by + `post_list_discovered_workloads` will be passed to + `post_list_discovered_workloads_with_metadata`. + """ + return response, metadata + def pre_list_service_project_attachments( self, request: apphub_service.ListServiceProjectAttachmentsRequest, @@ -774,12 +1202,38 @@ def post_list_service_project_attachments( ) -> apphub_service.ListServiceProjectAttachmentsResponse: """Post-rpc interceptor for list_service_project_attachments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_service_project_attachments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_list_service_project_attachments` interceptor runs + before the `post_list_service_project_attachments_with_metadata` interceptor. """ return response + def post_list_service_project_attachments_with_metadata( + self, + response: apphub_service.ListServiceProjectAttachmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.ListServiceProjectAttachmentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_service_project_attachments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_list_service_project_attachments_with_metadata` + interceptor in new development instead of the `post_list_service_project_attachments` interceptor. + When both interceptors are used, this `post_list_service_project_attachments_with_metadata` interceptor runs after the + `post_list_service_project_attachments` interceptor. The (possibly modified) response returned by + `post_list_service_project_attachments` will be passed to + `post_list_service_project_attachments_with_metadata`. + """ + return response, metadata + def pre_list_services( self, request: apphub_service.ListServicesRequest, @@ -799,12 +1253,37 @@ def post_list_services( ) -> apphub_service.ListServicesResponse: """Post-rpc interceptor for list_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_list_services` interceptor runs + before the `post_list_services_with_metadata` interceptor. """ return response + def post_list_services_with_metadata( + self, + response: apphub_service.ListServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.ListServicesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_list_services_with_metadata` + interceptor in new development instead of the `post_list_services` interceptor. + When both interceptors are used, this `post_list_services_with_metadata` interceptor runs after the + `post_list_services` interceptor. The (possibly modified) response returned by + `post_list_services` will be passed to + `post_list_services_with_metadata`. + """ + return response, metadata + def pre_list_workloads( self, request: apphub_service.ListWorkloadsRequest, @@ -824,12 +1303,37 @@ def post_list_workloads( ) -> apphub_service.ListWorkloadsResponse: """Post-rpc interceptor for list_workloads - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workloads_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_list_workloads` interceptor runs + before the `post_list_workloads_with_metadata` interceptor. """ return response + def post_list_workloads_with_metadata( + self, + response: apphub_service.ListWorkloadsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.ListWorkloadsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_workloads + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_list_workloads_with_metadata` + interceptor in new development instead of the `post_list_workloads` interceptor. + When both interceptors are used, this `post_list_workloads_with_metadata` interceptor runs after the + `post_list_workloads` interceptor. The (possibly modified) response returned by + `post_list_workloads` will be passed to + `post_list_workloads_with_metadata`. + """ + return response, metadata + def pre_lookup_discovered_service( self, request: apphub_service.LookupDiscoveredServiceRequest, @@ -850,12 +1354,38 @@ def post_lookup_discovered_service( ) -> apphub_service.LookupDiscoveredServiceResponse: """Post-rpc interceptor for lookup_discovered_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_discovered_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_lookup_discovered_service` interceptor runs + before the `post_lookup_discovered_service_with_metadata` interceptor. """ return response + def post_lookup_discovered_service_with_metadata( + self, + response: apphub_service.LookupDiscoveredServiceResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.LookupDiscoveredServiceResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_discovered_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_lookup_discovered_service_with_metadata` + interceptor in new development instead of the `post_lookup_discovered_service` interceptor. + When both interceptors are used, this `post_lookup_discovered_service_with_metadata` interceptor runs after the + `post_lookup_discovered_service` interceptor. The (possibly modified) response returned by + `post_lookup_discovered_service` will be passed to + `post_lookup_discovered_service_with_metadata`. + """ + return response, metadata + def pre_lookup_discovered_workload( self, request: apphub_service.LookupDiscoveredWorkloadRequest, @@ -876,12 +1406,38 @@ def post_lookup_discovered_workload( ) -> apphub_service.LookupDiscoveredWorkloadResponse: """Post-rpc interceptor for lookup_discovered_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_discovered_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_lookup_discovered_workload` interceptor runs + before the `post_lookup_discovered_workload_with_metadata` interceptor. """ return response + def post_lookup_discovered_workload_with_metadata( + self, + response: apphub_service.LookupDiscoveredWorkloadResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.LookupDiscoveredWorkloadResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_discovered_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_lookup_discovered_workload_with_metadata` + interceptor in new development instead of the `post_lookup_discovered_workload` interceptor. + When both interceptors are used, this `post_lookup_discovered_workload_with_metadata` interceptor runs after the + `post_lookup_discovered_workload` interceptor. The (possibly modified) response returned by + `post_lookup_discovered_workload` will be passed to + `post_lookup_discovered_workload_with_metadata`. + """ + return response, metadata + def pre_lookup_service_project_attachment( self, request: apphub_service.LookupServiceProjectAttachmentRequest, @@ -902,12 +1458,38 @@ def post_lookup_service_project_attachment( ) -> apphub_service.LookupServiceProjectAttachmentResponse: """Post-rpc interceptor for lookup_service_project_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_service_project_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_lookup_service_project_attachment` interceptor runs + before the `post_lookup_service_project_attachment_with_metadata` interceptor. """ return response + def post_lookup_service_project_attachment_with_metadata( + self, + response: apphub_service.LookupServiceProjectAttachmentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + apphub_service.LookupServiceProjectAttachmentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_service_project_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_lookup_service_project_attachment_with_metadata` + interceptor in new development instead of the `post_lookup_service_project_attachment` interceptor. + When both interceptors are used, this `post_lookup_service_project_attachment_with_metadata` interceptor runs after the + `post_lookup_service_project_attachment` interceptor. The (possibly modified) response returned by + `post_lookup_service_project_attachment` will be passed to + `post_lookup_service_project_attachment_with_metadata`. + """ + return response, metadata + def pre_update_application( self, request: apphub_service.UpdateApplicationRequest, @@ -927,12 +1509,35 @@ def post_update_application( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_application - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_application_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_update_application` interceptor runs + before the `post_update_application_with_metadata` interceptor. """ return response + def post_update_application_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_application + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_update_application_with_metadata` + interceptor in new development instead of the `post_update_application` interceptor. + When both interceptors are used, this `post_update_application_with_metadata` interceptor runs after the + `post_update_application` interceptor. The (possibly modified) response returned by + `post_update_application` will be passed to + `post_update_application_with_metadata`. + """ + return response, metadata + def pre_update_service( self, request: apphub_service.UpdateServiceRequest, @@ -952,12 +1557,35 @@ def post_update_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_update_service` interceptor runs + before the `post_update_service_with_metadata` interceptor. """ return response + def post_update_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_update_service_with_metadata` + interceptor in new development instead of the `post_update_service` interceptor. + When both interceptors are used, this `post_update_service_with_metadata` interceptor runs after the + `post_update_service` interceptor. The (possibly modified) response returned by + `post_update_service` will be passed to + `post_update_service_with_metadata`. + """ + return response, metadata + def pre_update_workload( self, request: apphub_service.UpdateWorkloadRequest, @@ -977,12 +1605,35 @@ def post_update_workload( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppHub server but before - it is returned to user code. + it is returned to user code. This `post_update_workload` interceptor runs + before the `post_update_workload_with_metadata` interceptor. """ return response + def post_update_workload_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppHub server but before it is returned to user code. + + We recommend only using this `post_update_workload_with_metadata` + interceptor in new development instead of the `post_update_workload` interceptor. + When both interceptors are used, this `post_update_workload_with_metadata` interceptor runs after the + `post_update_workload` interceptor. The (possibly modified) response returned by + `post_update_workload` will be passed to + `post_update_workload_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1474,6 +2125,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1623,6 +2278,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1776,6 +2435,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_service_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_service_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1925,6 +2591,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2072,6 +2742,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2215,6 +2889,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2362,6 +3040,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_service_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_service_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2505,6 +3190,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2659,6 +3348,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detach_service_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_detach_service_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2811,6 +3507,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2958,6 +3658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_discovered_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_discovered_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3107,6 +3811,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_discovered_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_discovered_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3252,6 +3960,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3406,6 +4118,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_service_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_service_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3557,6 +4276,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3703,6 +4426,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_applications(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_applications_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3847,6 +4574,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_discovered_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_discovered_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3991,6 +4722,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_discovered_workloads(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_discovered_workloads_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4139,6 +4874,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_service_project_attachments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_service_project_attachments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4285,6 +5027,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4429,6 +5175,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workloads(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workloads_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4573,6 +5323,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_discovered_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_discovered_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4720,6 +5474,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_discovered_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_discovered_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4872,6 +5630,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_service_project_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_lookup_service_project_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5031,6 +5796,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_application(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_application_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5180,6 +5949,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5329,6 +6102,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-apphub/noxfile.py b/packages/google-cloud-apphub/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-apphub/noxfile.py +++ b/packages/google-cloud-apphub/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json index 4f2385e79d1a..f447f3094ffa 100644 --- a/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json +++ b/packages/google-cloud-apphub/samples/generated_samples/snippet_metadata_google.cloud.apphub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-apphub", - "version": "0.1.5" + "version": "0.1.6" }, "snippets": [ { diff --git a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py index 650272d23554..a4d31b972eaa 100644 --- a/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py +++ b/packages/google-cloud-apphub/tests/unit/gapic/apphub_v1/test_app_hub.py @@ -88,6 +88,13 @@ from google.cloud.apphub_v1.types import workload from google.cloud.apphub_v1.types import workload as gca_workload +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -310,6 +317,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AppHubClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AppHubClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -18873,10 +18923,14 @@ def test_lookup_service_project_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_lookup_service_project_attachment" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_lookup_service_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_lookup_service_project_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.LookupServiceProjectAttachmentRequest.pb( apphub_service.LookupServiceProjectAttachmentRequest() ) @@ -18902,6 +18956,10 @@ def test_lookup_service_project_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.LookupServiceProjectAttachmentResponse() + post_with_metadata.return_value = ( + apphub_service.LookupServiceProjectAttachmentResponse(), + metadata, + ) client.lookup_service_project_attachment( request, @@ -18913,6 +18971,7 @@ def test_lookup_service_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_service_project_attachments_rest_bad_request( @@ -18999,10 +19058,14 @@ def test_list_service_project_attachments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_list_service_project_attachments" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_list_service_project_attachments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_list_service_project_attachments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.ListServiceProjectAttachmentsRequest.pb( apphub_service.ListServiceProjectAttachmentsRequest() ) @@ -19028,6 +19091,10 @@ def test_list_service_project_attachments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.ListServiceProjectAttachmentsResponse() + post_with_metadata.return_value = ( + apphub_service.ListServiceProjectAttachmentsResponse(), + metadata, + ) client.list_service_project_attachments( request, @@ -19039,6 +19106,7 @@ def test_list_service_project_attachments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_service_project_attachment_rest_bad_request( @@ -19197,10 +19265,14 @@ def test_create_service_project_attachment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_create_service_project_attachment" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_create_service_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_create_service_project_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.CreateServiceProjectAttachmentRequest.pb( apphub_service.CreateServiceProjectAttachmentRequest() ) @@ -19224,6 +19296,7 @@ def test_create_service_project_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_service_project_attachment( request, @@ -19235,6 +19308,7 @@ def test_create_service_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_service_project_attachment_rest_bad_request( @@ -19332,10 +19406,14 @@ def test_get_service_project_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_get_service_project_attachment" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_get_service_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_get_service_project_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.GetServiceProjectAttachmentRequest.pb( apphub_service.GetServiceProjectAttachmentRequest() ) @@ -19361,6 +19439,10 @@ def test_get_service_project_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service_project_attachment.ServiceProjectAttachment() + post_with_metadata.return_value = ( + service_project_attachment.ServiceProjectAttachment(), + metadata, + ) client.get_service_project_attachment( request, @@ -19372,6 +19454,7 @@ def test_get_service_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_service_project_attachment_rest_bad_request( @@ -19454,10 +19537,14 @@ def test_delete_service_project_attachment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_delete_service_project_attachment" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_delete_service_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_delete_service_project_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.DeleteServiceProjectAttachmentRequest.pb( apphub_service.DeleteServiceProjectAttachmentRequest() ) @@ -19481,6 +19568,7 @@ def test_delete_service_project_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_service_project_attachment( request, @@ -19492,6 +19580,7 @@ def test_delete_service_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_detach_service_project_attachment_rest_bad_request( @@ -19573,10 +19662,14 @@ def test_detach_service_project_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_detach_service_project_attachment" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_detach_service_project_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_detach_service_project_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.DetachServiceProjectAttachmentRequest.pb( apphub_service.DetachServiceProjectAttachmentRequest() ) @@ -19602,6 +19695,10 @@ def test_detach_service_project_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.DetachServiceProjectAttachmentResponse() + post_with_metadata.return_value = ( + apphub_service.DetachServiceProjectAttachmentResponse(), + metadata, + ) client.detach_service_project_attachment( request, @@ -19613,6 +19710,7 @@ def test_detach_service_project_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_discovered_services_rest_bad_request( @@ -19697,10 +19795,13 @@ def test_list_discovered_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_list_discovered_services" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_list_discovered_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_list_discovered_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.ListDiscoveredServicesRequest.pb( apphub_service.ListDiscoveredServicesRequest() ) @@ -19726,6 +19827,10 @@ def test_list_discovered_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.ListDiscoveredServicesResponse() + post_with_metadata.return_value = ( + apphub_service.ListDiscoveredServicesResponse(), + metadata, + ) client.list_discovered_services( request, @@ -19737,6 +19842,7 @@ def test_list_discovered_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_discovered_service_rest_bad_request( @@ -19823,10 +19929,13 @@ def test_get_discovered_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_get_discovered_service" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_get_discovered_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_get_discovered_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.GetDiscoveredServiceRequest.pb( apphub_service.GetDiscoveredServiceRequest() ) @@ -19850,6 +19959,7 @@ def test_get_discovered_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.DiscoveredService() + post_with_metadata.return_value = service.DiscoveredService(), metadata client.get_discovered_service( request, @@ -19861,6 +19971,7 @@ def test_get_discovered_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lookup_discovered_service_rest_bad_request( @@ -19940,10 +20051,13 @@ def test_lookup_discovered_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_lookup_discovered_service" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_lookup_discovered_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_lookup_discovered_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.LookupDiscoveredServiceRequest.pb( apphub_service.LookupDiscoveredServiceRequest() ) @@ -19969,6 +20083,10 @@ def test_lookup_discovered_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.LookupDiscoveredServiceResponse() + post_with_metadata.return_value = ( + apphub_service.LookupDiscoveredServiceResponse(), + metadata, + ) client.lookup_discovered_service( request, @@ -19980,6 +20098,7 @@ def test_lookup_discovered_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_services_rest_bad_request( @@ -20064,10 +20183,13 @@ def test_list_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_list_services" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_list_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_list_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.ListServicesRequest.pb( apphub_service.ListServicesRequest() ) @@ -20093,6 +20215,10 @@ def test_list_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.ListServicesResponse() + post_with_metadata.return_value = ( + apphub_service.ListServicesResponse(), + metadata, + ) client.list_services( request, @@ -20104,6 +20230,7 @@ def test_list_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_service_rest_bad_request( @@ -20274,10 +20401,13 @@ def test_create_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_create_service" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_create_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_create_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.CreateServiceRequest.pb( apphub_service.CreateServiceRequest() ) @@ -20301,6 +20431,7 @@ def test_create_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_service( request, @@ -20312,6 +20443,7 @@ def test_create_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_service_rest_bad_request(request_type=apphub_service.GetServiceRequest): @@ -20406,10 +20538,13 @@ def test_get_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_get_service" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_get_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_get_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.GetServiceRequest.pb( apphub_service.GetServiceRequest() ) @@ -20433,6 +20568,7 @@ def test_get_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Service() + post_with_metadata.return_value = service.Service(), metadata client.get_service( request, @@ -20444,6 +20580,7 @@ def test_get_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_service_rest_bad_request( @@ -20622,10 +20759,13 @@ def test_update_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_update_service" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_update_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_update_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.UpdateServiceRequest.pb( apphub_service.UpdateServiceRequest() ) @@ -20649,6 +20789,7 @@ def test_update_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_service( request, @@ -20660,6 +20801,7 @@ def test_update_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_service_rest_bad_request( @@ -20742,10 +20884,13 @@ def test_delete_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_delete_service" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_delete_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_delete_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.DeleteServiceRequest.pb( apphub_service.DeleteServiceRequest() ) @@ -20769,6 +20914,7 @@ def test_delete_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_service( request, @@ -20780,6 +20926,7 @@ def test_delete_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_discovered_workloads_rest_bad_request( @@ -20864,10 +21011,13 @@ def test_list_discovered_workloads_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_list_discovered_workloads" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_list_discovered_workloads_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_list_discovered_workloads" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.ListDiscoveredWorkloadsRequest.pb( apphub_service.ListDiscoveredWorkloadsRequest() ) @@ -20893,6 +21043,10 @@ def test_list_discovered_workloads_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.ListDiscoveredWorkloadsResponse() + post_with_metadata.return_value = ( + apphub_service.ListDiscoveredWorkloadsResponse(), + metadata, + ) client.list_discovered_workloads( request, @@ -20904,6 +21058,7 @@ def test_list_discovered_workloads_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_discovered_workload_rest_bad_request( @@ -20990,10 +21145,13 @@ def test_get_discovered_workload_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_get_discovered_workload" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_get_discovered_workload_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_get_discovered_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.GetDiscoveredWorkloadRequest.pb( apphub_service.GetDiscoveredWorkloadRequest() ) @@ -21019,6 +21177,7 @@ def test_get_discovered_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = workload.DiscoveredWorkload() + post_with_metadata.return_value = workload.DiscoveredWorkload(), metadata client.get_discovered_workload( request, @@ -21030,6 +21189,7 @@ def test_get_discovered_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lookup_discovered_workload_rest_bad_request( @@ -21109,10 +21269,14 @@ def test_lookup_discovered_workload_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_lookup_discovered_workload" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, + "post_lookup_discovered_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_lookup_discovered_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.LookupDiscoveredWorkloadRequest.pb( apphub_service.LookupDiscoveredWorkloadRequest() ) @@ -21138,6 +21302,10 @@ def test_lookup_discovered_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.LookupDiscoveredWorkloadResponse() + post_with_metadata.return_value = ( + apphub_service.LookupDiscoveredWorkloadResponse(), + metadata, + ) client.lookup_discovered_workload( request, @@ -21149,6 +21317,7 @@ def test_lookup_discovered_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workloads_rest_bad_request( @@ -21233,10 +21402,13 @@ def test_list_workloads_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_list_workloads" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_list_workloads_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_list_workloads" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.ListWorkloadsRequest.pb( apphub_service.ListWorkloadsRequest() ) @@ -21262,6 +21434,10 @@ def test_list_workloads_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.ListWorkloadsResponse() + post_with_metadata.return_value = ( + apphub_service.ListWorkloadsResponse(), + metadata, + ) client.list_workloads( request, @@ -21273,6 +21449,7 @@ def test_list_workloads_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_workload_rest_bad_request( @@ -21443,10 +21620,13 @@ def test_create_workload_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_create_workload" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_create_workload_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_create_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.CreateWorkloadRequest.pb( apphub_service.CreateWorkloadRequest() ) @@ -21470,6 +21650,7 @@ def test_create_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_workload( request, @@ -21481,6 +21662,7 @@ def test_create_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_workload_rest_bad_request(request_type=apphub_service.GetWorkloadRequest): @@ -21575,10 +21757,13 @@ def test_get_workload_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_get_workload" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_get_workload_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_get_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.GetWorkloadRequest.pb( apphub_service.GetWorkloadRequest() ) @@ -21602,6 +21787,7 @@ def test_get_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = workload.Workload() + post_with_metadata.return_value = workload.Workload(), metadata client.get_workload( request, @@ -21613,6 +21799,7 @@ def test_get_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_workload_rest_bad_request( @@ -21791,10 +21978,13 @@ def test_update_workload_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_update_workload" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_update_workload_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_update_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.UpdateWorkloadRequest.pb( apphub_service.UpdateWorkloadRequest() ) @@ -21818,6 +22008,7 @@ def test_update_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_workload( request, @@ -21829,6 +22020,7 @@ def test_update_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workload_rest_bad_request( @@ -21911,10 +22103,13 @@ def test_delete_workload_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_delete_workload" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_delete_workload_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_delete_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.DeleteWorkloadRequest.pb( apphub_service.DeleteWorkloadRequest() ) @@ -21938,6 +22133,7 @@ def test_delete_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_workload( request, @@ -21949,6 +22145,7 @@ def test_delete_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_applications_rest_bad_request( @@ -22033,10 +22230,13 @@ def test_list_applications_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_list_applications" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_list_applications_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_list_applications" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.ListApplicationsRequest.pb( apphub_service.ListApplicationsRequest() ) @@ -22062,6 +22262,10 @@ def test_list_applications_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = apphub_service.ListApplicationsResponse() + post_with_metadata.return_value = ( + apphub_service.ListApplicationsResponse(), + metadata, + ) client.list_applications( request, @@ -22073,6 +22277,7 @@ def test_list_applications_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_application_rest_bad_request( @@ -22237,10 +22442,13 @@ def test_create_application_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_create_application" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_create_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_create_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.CreateApplicationRequest.pb( apphub_service.CreateApplicationRequest() ) @@ -22264,6 +22472,7 @@ def test_create_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_application( request, @@ -22275,6 +22484,7 @@ def test_create_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_application_rest_bad_request( @@ -22365,10 +22575,13 @@ def test_get_application_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppHubRestInterceptor, "post_get_application" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_get_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_get_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.GetApplicationRequest.pb( apphub_service.GetApplicationRequest() ) @@ -22392,6 +22605,7 @@ def test_get_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = application.Application() + post_with_metadata.return_value = application.Application(), metadata client.get_application( request, @@ -22403,6 +22617,7 @@ def test_get_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_application_rest_bad_request( @@ -22575,10 +22790,13 @@ def test_update_application_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_update_application" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_update_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_update_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.UpdateApplicationRequest.pb( apphub_service.UpdateApplicationRequest() ) @@ -22602,6 +22820,7 @@ def test_update_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_application( request, @@ -22613,6 +22832,7 @@ def test_update_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_application_rest_bad_request( @@ -22691,10 +22911,13 @@ def test_delete_application_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppHubRestInterceptor, "post_delete_application" ) as post, mock.patch.object( + transports.AppHubRestInterceptor, "post_delete_application_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AppHubRestInterceptor, "pre_delete_application" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apphub_service.DeleteApplicationRequest.pb( apphub_service.DeleteApplicationRequest() ) @@ -22718,6 +22941,7 @@ def test_delete_application_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_application( request, @@ -22729,6 +22953,7 @@ def test_delete_application_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-artifact-registry/CHANGELOG.md b/packages/google-cloud-artifact-registry/CHANGELOG.md index 0a7ce1eafc4c..e920da3d5d43 100644 --- a/packages/google-cloud-artifact-registry/CHANGELOG.md +++ b/packages/google-cloud-artifact-registry/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-artifact-registry-v1.14.0...google-cloud-artifact-registry-v1.15.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-artifact-registry-v1.13.1...google-cloud-artifact-registry-v1.14.0) (2024-12-12) diff --git a/packages/google-cloud-artifact-registry/README.rst b/packages/google-cloud-artifact-registry/README.rst index b2513a0873a4..76a39bb2fdfc 100644 --- a/packages/google-cloud-artifact-registry/README.rst +++ b/packages/google-cloud-artifact-registry/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Artifact Registry.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Artifact Registry.: https://cloud.google.com/artifact-registry -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py index e4bdc840f07d..833490c8c91a 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -877,6 +879,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -6766,16 +6795,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -6821,16 +6854,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -6876,16 +6913,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py index c6f825e20696..587f7a53b760 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py @@ -500,12 +500,35 @@ def post_batch_delete_versions( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_versions` interceptor runs + before the `post_batch_delete_versions_with_metadata` interceptor. """ return response + def post_batch_delete_versions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_batch_delete_versions_with_metadata` + interceptor in new development instead of the `post_batch_delete_versions` interceptor. + When both interceptors are used, this `post_batch_delete_versions_with_metadata` interceptor runs after the + `post_batch_delete_versions` interceptor. The (possibly modified) response returned by + `post_batch_delete_versions` will be passed to + `post_batch_delete_versions_with_metadata`. + """ + return response, metadata + def pre_create_attachment( self, request: gda_attachment.CreateAttachmentRequest, @@ -525,12 +548,35 @@ def post_create_attachment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_create_attachment` interceptor runs + before the `post_create_attachment_with_metadata` interceptor. """ return response + def post_create_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_create_attachment_with_metadata` + interceptor in new development instead of the `post_create_attachment` interceptor. + When both interceptors are used, this `post_create_attachment_with_metadata` interceptor runs after the + `post_create_attachment` interceptor. The (possibly modified) response returned by + `post_create_attachment` will be passed to + `post_create_attachment_with_metadata`. + """ + return response, metadata + def pre_create_repository( self, request: gda_repository.CreateRepositoryRequest, @@ -550,12 +596,35 @@ def post_create_repository( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_create_repository` interceptor runs + before the `post_create_repository_with_metadata` interceptor. """ return response + def post_create_repository_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_create_repository_with_metadata` + interceptor in new development instead of the `post_create_repository` interceptor. + When both interceptors are used, this `post_create_repository_with_metadata` interceptor runs after the + `post_create_repository` interceptor. The (possibly modified) response returned by + `post_create_repository` will be passed to + `post_create_repository_with_metadata`. + """ + return response, metadata + def pre_create_rule( self, request: gda_rule.CreateRuleRequest, @@ -571,12 +640,33 @@ def pre_create_rule( def post_create_rule(self, response: gda_rule.Rule) -> gda_rule.Rule: """Post-rpc interceptor for create_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_create_rule` interceptor runs + before the `post_create_rule_with_metadata` interceptor. """ return response + def post_create_rule_with_metadata( + self, response: gda_rule.Rule, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_rule.Rule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_create_rule_with_metadata` + interceptor in new development instead of the `post_create_rule` interceptor. + When both interceptors are used, this `post_create_rule_with_metadata` interceptor runs after the + `post_create_rule` interceptor. The (possibly modified) response returned by + `post_create_rule` will be passed to + `post_create_rule_with_metadata`. + """ + return response, metadata + def pre_create_tag( self, request: gda_tag.CreateTagRequest, @@ -592,12 +682,33 @@ def pre_create_tag( def post_create_tag(self, response: gda_tag.Tag) -> gda_tag.Tag: """Post-rpc interceptor for create_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_create_tag` interceptor runs + before the `post_create_tag_with_metadata` interceptor. """ return response + def post_create_tag_with_metadata( + self, response: gda_tag.Tag, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_tag.Tag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_create_tag_with_metadata` + interceptor in new development instead of the `post_create_tag` interceptor. + When both interceptors are used, this `post_create_tag_with_metadata` interceptor runs after the + `post_create_tag` interceptor. The (possibly modified) response returned by + `post_create_tag` will be passed to + `post_create_tag_with_metadata`. + """ + return response, metadata + def pre_delete_attachment( self, request: attachment.DeleteAttachmentRequest, @@ -617,12 +728,35 @@ def post_delete_attachment( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_attachment` interceptor runs + before the `post_delete_attachment_with_metadata` interceptor. """ return response + def post_delete_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_attachment_with_metadata` + interceptor in new development instead of the `post_delete_attachment` interceptor. + When both interceptors are used, this `post_delete_attachment_with_metadata` interceptor runs after the + `post_delete_attachment` interceptor. The (possibly modified) response returned by + `post_delete_attachment` will be passed to + `post_delete_attachment_with_metadata`. + """ + return response, metadata + def pre_delete_file( self, request: file.DeleteFileRequest, @@ -640,12 +774,35 @@ def post_delete_file( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_file` interceptor runs + before the `post_delete_file_with_metadata` interceptor. """ return response + def post_delete_file_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_file_with_metadata` + interceptor in new development instead of the `post_delete_file` interceptor. + When both interceptors are used, this `post_delete_file_with_metadata` interceptor runs after the + `post_delete_file` interceptor. The (possibly modified) response returned by + `post_delete_file` will be passed to + `post_delete_file_with_metadata`. + """ + return response, metadata + def pre_delete_package( self, request: package.DeletePackageRequest, @@ -663,12 +820,35 @@ def post_delete_package( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_package` interceptor runs + before the `post_delete_package_with_metadata` interceptor. """ return response + def post_delete_package_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_package_with_metadata` + interceptor in new development instead of the `post_delete_package` interceptor. + When both interceptors are used, this `post_delete_package_with_metadata` interceptor runs after the + `post_delete_package` interceptor. The (possibly modified) response returned by + `post_delete_package` will be passed to + `post_delete_package_with_metadata`. + """ + return response, metadata + def pre_delete_repository( self, request: repository.DeleteRepositoryRequest, @@ -688,12 +868,35 @@ def post_delete_repository( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_repository` interceptor runs + before the `post_delete_repository_with_metadata` interceptor. """ return response + def post_delete_repository_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_repository_with_metadata` + interceptor in new development instead of the `post_delete_repository` interceptor. + When both interceptors are used, this `post_delete_repository_with_metadata` interceptor runs after the + `post_delete_repository` interceptor. The (possibly modified) response returned by + `post_delete_repository` will be passed to + `post_delete_repository_with_metadata`. + """ + return response, metadata + def pre_delete_rule( self, request: rule.DeleteRuleRequest, @@ -735,12 +938,35 @@ def post_delete_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_version` interceptor runs + before the `post_delete_version_with_metadata` interceptor. """ return response + def post_delete_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_version_with_metadata` + interceptor in new development instead of the `post_delete_version` interceptor. + When both interceptors are used, this `post_delete_version_with_metadata` interceptor runs after the + `post_delete_version` interceptor. The (possibly modified) response returned by + `post_delete_version` will be passed to + `post_delete_version_with_metadata`. + """ + return response, metadata + def pre_get_attachment( self, request: attachment.GetAttachmentRequest, @@ -760,12 +986,35 @@ def post_get_attachment( ) -> attachment.Attachment: """Post-rpc interceptor for get_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_attachment` interceptor runs + before the `post_get_attachment_with_metadata` interceptor. """ return response + def post_get_attachment_with_metadata( + self, + response: attachment.Attachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[attachment.Attachment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_attachment_with_metadata` + interceptor in new development instead of the `post_get_attachment` interceptor. + When both interceptors are used, this `post_get_attachment_with_metadata` interceptor runs after the + `post_get_attachment` interceptor. The (possibly modified) response returned by + `post_get_attachment` will be passed to + `post_get_attachment_with_metadata`. + """ + return response, metadata + def pre_get_docker_image( self, request: artifact.GetDockerImageRequest, @@ -783,12 +1032,35 @@ def post_get_docker_image( ) -> artifact.DockerImage: """Post-rpc interceptor for get_docker_image - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_docker_image_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_docker_image` interceptor runs + before the `post_get_docker_image_with_metadata` interceptor. """ return response + def post_get_docker_image_with_metadata( + self, + response: artifact.DockerImage, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[artifact.DockerImage, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_docker_image + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_docker_image_with_metadata` + interceptor in new development instead of the `post_get_docker_image` interceptor. + When both interceptors are used, this `post_get_docker_image_with_metadata` interceptor runs after the + `post_get_docker_image` interceptor. The (possibly modified) response returned by + `post_get_docker_image` will be passed to + `post_get_docker_image_with_metadata`. + """ + return response, metadata + def pre_get_file( self, request: file.GetFileRequest, @@ -804,12 +1076,33 @@ def pre_get_file( def post_get_file(self, response: file.File) -> file.File: """Post-rpc interceptor for get_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_file` interceptor runs + before the `post_get_file_with_metadata` interceptor. """ return response + def post_get_file_with_metadata( + self, response: file.File, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[file.File, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_file_with_metadata` + interceptor in new development instead of the `post_get_file` interceptor. + When both interceptors are used, this `post_get_file_with_metadata` interceptor runs after the + `post_get_file` interceptor. The (possibly modified) response returned by + `post_get_file` will be passed to + `post_get_file_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -827,12 +1120,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_maven_artifact( self, request: artifact.GetMavenArtifactRequest, @@ -852,12 +1168,35 @@ def post_get_maven_artifact( ) -> artifact.MavenArtifact: """Post-rpc interceptor for get_maven_artifact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_maven_artifact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_maven_artifact` interceptor runs + before the `post_get_maven_artifact_with_metadata` interceptor. """ return response + def post_get_maven_artifact_with_metadata( + self, + response: artifact.MavenArtifact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[artifact.MavenArtifact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_maven_artifact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_maven_artifact_with_metadata` + interceptor in new development instead of the `post_get_maven_artifact` interceptor. + When both interceptors are used, this `post_get_maven_artifact_with_metadata` interceptor runs after the + `post_get_maven_artifact` interceptor. The (possibly modified) response returned by + `post_get_maven_artifact` will be passed to + `post_get_maven_artifact_with_metadata`. + """ + return response, metadata + def pre_get_npm_package( self, request: artifact.GetNpmPackageRequest, @@ -875,12 +1214,35 @@ def post_get_npm_package( ) -> artifact.NpmPackage: """Post-rpc interceptor for get_npm_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_npm_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_npm_package` interceptor runs + before the `post_get_npm_package_with_metadata` interceptor. """ return response + def post_get_npm_package_with_metadata( + self, + response: artifact.NpmPackage, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[artifact.NpmPackage, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_npm_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_npm_package_with_metadata` + interceptor in new development instead of the `post_get_npm_package` interceptor. + When both interceptors are used, this `post_get_npm_package_with_metadata` interceptor runs after the + `post_get_npm_package` interceptor. The (possibly modified) response returned by + `post_get_npm_package` will be passed to + `post_get_npm_package_with_metadata`. + """ + return response, metadata + def pre_get_package( self, request: package.GetPackageRequest, @@ -896,12 +1258,35 @@ def pre_get_package( def post_get_package(self, response: package.Package) -> package.Package: """Post-rpc interceptor for get_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_package` interceptor runs + before the `post_get_package_with_metadata` interceptor. """ return response + def post_get_package_with_metadata( + self, + response: package.Package, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[package.Package, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_package_with_metadata` + interceptor in new development instead of the `post_get_package` interceptor. + When both interceptors are used, this `post_get_package_with_metadata` interceptor runs after the + `post_get_package` interceptor. The (possibly modified) response returned by + `post_get_package` will be passed to + `post_get_package_with_metadata`. + """ + return response, metadata + def pre_get_project_settings( self, request: settings.GetProjectSettingsRequest, @@ -921,12 +1306,35 @@ def post_get_project_settings( ) -> settings.ProjectSettings: """Post-rpc interceptor for get_project_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_project_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_project_settings` interceptor runs + before the `post_get_project_settings_with_metadata` interceptor. """ return response + def post_get_project_settings_with_metadata( + self, + response: settings.ProjectSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[settings.ProjectSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_project_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_project_settings_with_metadata` + interceptor in new development instead of the `post_get_project_settings` interceptor. + When both interceptors are used, this `post_get_project_settings_with_metadata` interceptor runs after the + `post_get_project_settings` interceptor. The (possibly modified) response returned by + `post_get_project_settings` will be passed to + `post_get_project_settings_with_metadata`. + """ + return response, metadata + def pre_get_python_package( self, request: artifact.GetPythonPackageRequest, @@ -946,12 +1354,35 @@ def post_get_python_package( ) -> artifact.PythonPackage: """Post-rpc interceptor for get_python_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_python_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_python_package` interceptor runs + before the `post_get_python_package_with_metadata` interceptor. """ return response + def post_get_python_package_with_metadata( + self, + response: artifact.PythonPackage, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[artifact.PythonPackage, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_python_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_python_package_with_metadata` + interceptor in new development instead of the `post_get_python_package` interceptor. + When both interceptors are used, this `post_get_python_package_with_metadata` interceptor runs after the + `post_get_python_package` interceptor. The (possibly modified) response returned by + `post_get_python_package` will be passed to + `post_get_python_package_with_metadata`. + """ + return response, metadata + def pre_get_repository( self, request: repository.GetRepositoryRequest, @@ -971,12 +1402,35 @@ def post_get_repository( ) -> repository.Repository: """Post-rpc interceptor for get_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_repository` interceptor runs + before the `post_get_repository_with_metadata` interceptor. """ return response + def post_get_repository_with_metadata( + self, + response: repository.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[repository.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_repository_with_metadata` + interceptor in new development instead of the `post_get_repository` interceptor. + When both interceptors are used, this `post_get_repository_with_metadata` interceptor runs after the + `post_get_repository` interceptor. The (possibly modified) response returned by + `post_get_repository` will be passed to + `post_get_repository_with_metadata`. + """ + return response, metadata + def pre_get_rule( self, request: rule.GetRuleRequest, @@ -992,12 +1446,33 @@ def pre_get_rule( def post_get_rule(self, response: rule.Rule) -> rule.Rule: """Post-rpc interceptor for get_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_rule` interceptor runs + before the `post_get_rule_with_metadata` interceptor. """ return response + def post_get_rule_with_metadata( + self, response: rule.Rule, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[rule.Rule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_rule_with_metadata` + interceptor in new development instead of the `post_get_rule` interceptor. + When both interceptors are used, this `post_get_rule_with_metadata` interceptor runs after the + `post_get_rule` interceptor. The (possibly modified) response returned by + `post_get_rule` will be passed to + `post_get_rule_with_metadata`. + """ + return response, metadata + def pre_get_tag( self, request: tag.GetTagRequest, @@ -1013,12 +1488,33 @@ def pre_get_tag( def post_get_tag(self, response: tag.Tag) -> tag.Tag: """Post-rpc interceptor for get_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_tag` interceptor runs + before the `post_get_tag_with_metadata` interceptor. """ return response + def post_get_tag_with_metadata( + self, response: tag.Tag, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tag.Tag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_tag_with_metadata` + interceptor in new development instead of the `post_get_tag` interceptor. + When both interceptors are used, this `post_get_tag_with_metadata` interceptor runs after the + `post_get_tag` interceptor. The (possibly modified) response returned by + `post_get_tag` will be passed to + `post_get_tag_with_metadata`. + """ + return response, metadata + def pre_get_version( self, request: version.GetVersionRequest, @@ -1034,12 +1530,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_get_vpcsc_config( self, request: vpcsc_config.GetVPCSCConfigRequest, @@ -1059,12 +1578,35 @@ def post_get_vpcsc_config( ) -> vpcsc_config.VPCSCConfig: """Post-rpc interceptor for get_vpcsc_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_vpcsc_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_vpcsc_config` interceptor runs + before the `post_get_vpcsc_config_with_metadata` interceptor. """ return response + def post_get_vpcsc_config_with_metadata( + self, + response: vpcsc_config.VPCSCConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[vpcsc_config.VPCSCConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_vpcsc_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_vpcsc_config_with_metadata` + interceptor in new development instead of the `post_get_vpcsc_config` interceptor. + When both interceptors are used, this `post_get_vpcsc_config_with_metadata` interceptor runs after the + `post_get_vpcsc_config` interceptor. The (possibly modified) response returned by + `post_get_vpcsc_config` will be passed to + `post_get_vpcsc_config_with_metadata`. + """ + return response, metadata + def pre_import_apt_artifacts( self, request: apt_artifact.ImportAptArtifactsRequest, @@ -1084,12 +1626,35 @@ def post_import_apt_artifacts( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_apt_artifacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_apt_artifacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_import_apt_artifacts` interceptor runs + before the `post_import_apt_artifacts_with_metadata` interceptor. """ return response + def post_import_apt_artifacts_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_apt_artifacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_import_apt_artifacts_with_metadata` + interceptor in new development instead of the `post_import_apt_artifacts` interceptor. + When both interceptors are used, this `post_import_apt_artifacts_with_metadata` interceptor runs after the + `post_import_apt_artifacts` interceptor. The (possibly modified) response returned by + `post_import_apt_artifacts` will be passed to + `post_import_apt_artifacts_with_metadata`. + """ + return response, metadata + def pre_import_yum_artifacts( self, request: yum_artifact.ImportYumArtifactsRequest, @@ -1109,12 +1674,35 @@ def post_import_yum_artifacts( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_yum_artifacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_yum_artifacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_import_yum_artifacts` interceptor runs + before the `post_import_yum_artifacts_with_metadata` interceptor. """ return response + def post_import_yum_artifacts_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_yum_artifacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_import_yum_artifacts_with_metadata` + interceptor in new development instead of the `post_import_yum_artifacts` interceptor. + When both interceptors are used, this `post_import_yum_artifacts_with_metadata` interceptor runs after the + `post_import_yum_artifacts` interceptor. The (possibly modified) response returned by + `post_import_yum_artifacts` will be passed to + `post_import_yum_artifacts_with_metadata`. + """ + return response, metadata + def pre_list_attachments( self, request: attachment.ListAttachmentsRequest, @@ -1134,11 +1722,36 @@ def post_list_attachments( ) -> attachment.ListAttachmentsResponse: """Post-rpc interceptor for list_attachments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_attachments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_attachments` interceptor runs + before the `post_list_attachments_with_metadata` interceptor. + """ + return response + + def post_list_attachments_with_metadata( + self, + response: attachment.ListAttachmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attachment.ListAttachmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_attachments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_attachments_with_metadata` + interceptor in new development instead of the `post_list_attachments` interceptor. + When both interceptors are used, this `post_list_attachments_with_metadata` interceptor runs after the + `post_list_attachments` interceptor. The (possibly modified) response returned by + `post_list_attachments` will be passed to + `post_list_attachments_with_metadata`. """ - return response + return response, metadata def pre_list_docker_images( self, @@ -1159,12 +1772,37 @@ def post_list_docker_images( ) -> artifact.ListDockerImagesResponse: """Post-rpc interceptor for list_docker_images - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_docker_images_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_docker_images` interceptor runs + before the `post_list_docker_images_with_metadata` interceptor. """ return response + def post_list_docker_images_with_metadata( + self, + response: artifact.ListDockerImagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + artifact.ListDockerImagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_docker_images + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_docker_images_with_metadata` + interceptor in new development instead of the `post_list_docker_images` interceptor. + When both interceptors are used, this `post_list_docker_images_with_metadata` interceptor runs after the + `post_list_docker_images` interceptor. The (possibly modified) response returned by + `post_list_docker_images` will be passed to + `post_list_docker_images_with_metadata`. + """ + return response, metadata + def pre_list_files( self, request: file.ListFilesRequest, @@ -1182,12 +1820,35 @@ def post_list_files( ) -> file.ListFilesResponse: """Post-rpc interceptor for list_files - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_files_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_files` interceptor runs + before the `post_list_files_with_metadata` interceptor. """ return response + def post_list_files_with_metadata( + self, + response: file.ListFilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file.ListFilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_files + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_files_with_metadata` + interceptor in new development instead of the `post_list_files` interceptor. + When both interceptors are used, this `post_list_files_with_metadata` interceptor runs after the + `post_list_files` interceptor. The (possibly modified) response returned by + `post_list_files` will be passed to + `post_list_files_with_metadata`. + """ + return response, metadata + def pre_list_maven_artifacts( self, request: artifact.ListMavenArtifactsRequest, @@ -1207,12 +1868,37 @@ def post_list_maven_artifacts( ) -> artifact.ListMavenArtifactsResponse: """Post-rpc interceptor for list_maven_artifacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_maven_artifacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_maven_artifacts` interceptor runs + before the `post_list_maven_artifacts_with_metadata` interceptor. """ return response + def post_list_maven_artifacts_with_metadata( + self, + response: artifact.ListMavenArtifactsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + artifact.ListMavenArtifactsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_maven_artifacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_maven_artifacts_with_metadata` + interceptor in new development instead of the `post_list_maven_artifacts` interceptor. + When both interceptors are used, this `post_list_maven_artifacts_with_metadata` interceptor runs after the + `post_list_maven_artifacts` interceptor. The (possibly modified) response returned by + `post_list_maven_artifacts` will be passed to + `post_list_maven_artifacts_with_metadata`. + """ + return response, metadata + def pre_list_npm_packages( self, request: artifact.ListNpmPackagesRequest, @@ -1232,12 +1918,37 @@ def post_list_npm_packages( ) -> artifact.ListNpmPackagesResponse: """Post-rpc interceptor for list_npm_packages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_npm_packages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_npm_packages` interceptor runs + before the `post_list_npm_packages_with_metadata` interceptor. """ return response + def post_list_npm_packages_with_metadata( + self, + response: artifact.ListNpmPackagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + artifact.ListNpmPackagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_npm_packages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_npm_packages_with_metadata` + interceptor in new development instead of the `post_list_npm_packages` interceptor. + When both interceptors are used, this `post_list_npm_packages_with_metadata` interceptor runs after the + `post_list_npm_packages` interceptor. The (possibly modified) response returned by + `post_list_npm_packages` will be passed to + `post_list_npm_packages_with_metadata`. + """ + return response, metadata + def pre_list_packages( self, request: package.ListPackagesRequest, @@ -1255,12 +1966,35 @@ def post_list_packages( ) -> package.ListPackagesResponse: """Post-rpc interceptor for list_packages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_packages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_packages` interceptor runs + before the `post_list_packages_with_metadata` interceptor. """ return response + def post_list_packages_with_metadata( + self, + response: package.ListPackagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[package.ListPackagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_packages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_packages_with_metadata` + interceptor in new development instead of the `post_list_packages` interceptor. + When both interceptors are used, this `post_list_packages_with_metadata` interceptor runs after the + `post_list_packages` interceptor. The (possibly modified) response returned by + `post_list_packages` will be passed to + `post_list_packages_with_metadata`. + """ + return response, metadata + def pre_list_python_packages( self, request: artifact.ListPythonPackagesRequest, @@ -1280,12 +2014,37 @@ def post_list_python_packages( ) -> artifact.ListPythonPackagesResponse: """Post-rpc interceptor for list_python_packages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_python_packages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_python_packages` interceptor runs + before the `post_list_python_packages_with_metadata` interceptor. """ return response + def post_list_python_packages_with_metadata( + self, + response: artifact.ListPythonPackagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + artifact.ListPythonPackagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_python_packages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_python_packages_with_metadata` + interceptor in new development instead of the `post_list_python_packages` interceptor. + When both interceptors are used, this `post_list_python_packages_with_metadata` interceptor runs after the + `post_list_python_packages` interceptor. The (possibly modified) response returned by + `post_list_python_packages` will be passed to + `post_list_python_packages_with_metadata`. + """ + return response, metadata + def pre_list_repositories( self, request: repository.ListRepositoriesRequest, @@ -1305,12 +2064,37 @@ def post_list_repositories( ) -> repository.ListRepositoriesResponse: """Post-rpc interceptor for list_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_repositories` interceptor runs + before the `post_list_repositories_with_metadata` interceptor. """ return response + def post_list_repositories_with_metadata( + self, + response: repository.ListRepositoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repository.ListRepositoriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_repositories_with_metadata` + interceptor in new development instead of the `post_list_repositories` interceptor. + When both interceptors are used, this `post_list_repositories_with_metadata` interceptor runs after the + `post_list_repositories` interceptor. The (possibly modified) response returned by + `post_list_repositories` will be passed to + `post_list_repositories_with_metadata`. + """ + return response, metadata + def pre_list_rules( self, request: rule.ListRulesRequest, @@ -1328,12 +2112,35 @@ def post_list_rules( ) -> rule.ListRulesResponse: """Post-rpc interceptor for list_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_rules` interceptor runs + before the `post_list_rules_with_metadata` interceptor. """ return response + def post_list_rules_with_metadata( + self, + response: rule.ListRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rule.ListRulesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_rules_with_metadata` + interceptor in new development instead of the `post_list_rules` interceptor. + When both interceptors are used, this `post_list_rules_with_metadata` interceptor runs after the + `post_list_rules` interceptor. The (possibly modified) response returned by + `post_list_rules` will be passed to + `post_list_rules_with_metadata`. + """ + return response, metadata + def pre_list_tags( self, request: tag.ListTagsRequest, @@ -1349,12 +2156,35 @@ def pre_list_tags( def post_list_tags(self, response: tag.ListTagsResponse) -> tag.ListTagsResponse: """Post-rpc interceptor for list_tags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_tags` interceptor runs + before the `post_list_tags_with_metadata` interceptor. """ return response + def post_list_tags_with_metadata( + self, + response: tag.ListTagsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tag.ListTagsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_tags_with_metadata` + interceptor in new development instead of the `post_list_tags` interceptor. + When both interceptors are used, this `post_list_tags_with_metadata` interceptor runs after the + `post_list_tags` interceptor. The (possibly modified) response returned by + `post_list_tags` will be passed to + `post_list_tags_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: version.ListVersionsRequest, @@ -1372,12 +2202,35 @@ def post_list_versions( ) -> version.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: version.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -1395,12 +2248,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -1421,12 +2297,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_file( self, request: gda_file.UpdateFileRequest, @@ -1442,12 +2344,33 @@ def pre_update_file( def post_update_file(self, response: gda_file.File) -> gda_file.File: """Post-rpc interceptor for update_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_file` interceptor runs + before the `post_update_file_with_metadata` interceptor. """ return response + def post_update_file_with_metadata( + self, response: gda_file.File, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_file.File, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_file_with_metadata` + interceptor in new development instead of the `post_update_file` interceptor. + When both interceptors are used, this `post_update_file_with_metadata` interceptor runs after the + `post_update_file` interceptor. The (possibly modified) response returned by + `post_update_file` will be passed to + `post_update_file_with_metadata`. + """ + return response, metadata + def pre_update_package( self, request: gda_package.UpdatePackageRequest, @@ -1465,12 +2388,35 @@ def pre_update_package( def post_update_package(self, response: gda_package.Package) -> gda_package.Package: """Post-rpc interceptor for update_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_package` interceptor runs + before the `post_update_package_with_metadata` interceptor. """ return response + def post_update_package_with_metadata( + self, + response: gda_package.Package, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gda_package.Package, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_package_with_metadata` + interceptor in new development instead of the `post_update_package` interceptor. + When both interceptors are used, this `post_update_package_with_metadata` interceptor runs after the + `post_update_package` interceptor. The (possibly modified) response returned by + `post_update_package` will be passed to + `post_update_package_with_metadata`. + """ + return response, metadata + def pre_update_project_settings( self, request: settings.UpdateProjectSettingsRequest, @@ -1490,12 +2436,35 @@ def post_update_project_settings( ) -> settings.ProjectSettings: """Post-rpc interceptor for update_project_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_project_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_project_settings` interceptor runs + before the `post_update_project_settings_with_metadata` interceptor. """ return response + def post_update_project_settings_with_metadata( + self, + response: settings.ProjectSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[settings.ProjectSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_project_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_project_settings_with_metadata` + interceptor in new development instead of the `post_update_project_settings` interceptor. + When both interceptors are used, this `post_update_project_settings_with_metadata` interceptor runs after the + `post_update_project_settings` interceptor. The (possibly modified) response returned by + `post_update_project_settings` will be passed to + `post_update_project_settings_with_metadata`. + """ + return response, metadata + def pre_update_repository( self, request: gda_repository.UpdateRepositoryRequest, @@ -1515,12 +2484,35 @@ def post_update_repository( ) -> gda_repository.Repository: """Post-rpc interceptor for update_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_repository` interceptor runs + before the `post_update_repository_with_metadata` interceptor. """ return response + def post_update_repository_with_metadata( + self, + response: gda_repository.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gda_repository.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_repository_with_metadata` + interceptor in new development instead of the `post_update_repository` interceptor. + When both interceptors are used, this `post_update_repository_with_metadata` interceptor runs after the + `post_update_repository` interceptor. The (possibly modified) response returned by + `post_update_repository` will be passed to + `post_update_repository_with_metadata`. + """ + return response, metadata + def pre_update_rule( self, request: gda_rule.UpdateRuleRequest, @@ -1536,12 +2528,33 @@ def pre_update_rule( def post_update_rule(self, response: gda_rule.Rule) -> gda_rule.Rule: """Post-rpc interceptor for update_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_rule` interceptor runs + before the `post_update_rule_with_metadata` interceptor. """ return response + def post_update_rule_with_metadata( + self, response: gda_rule.Rule, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_rule.Rule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_rule_with_metadata` + interceptor in new development instead of the `post_update_rule` interceptor. + When both interceptors are used, this `post_update_rule_with_metadata` interceptor runs after the + `post_update_rule` interceptor. The (possibly modified) response returned by + `post_update_rule` will be passed to + `post_update_rule_with_metadata`. + """ + return response, metadata + def pre_update_tag( self, request: gda_tag.UpdateTagRequest, @@ -1557,12 +2570,33 @@ def pre_update_tag( def post_update_tag(self, response: gda_tag.Tag) -> gda_tag.Tag: """Post-rpc interceptor for update_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_tag` interceptor runs + before the `post_update_tag_with_metadata` interceptor. """ return response + def post_update_tag_with_metadata( + self, response: gda_tag.Tag, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_tag.Tag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_tag_with_metadata` + interceptor in new development instead of the `post_update_tag` interceptor. + When both interceptors are used, this `post_update_tag_with_metadata` interceptor runs after the + `post_update_tag` interceptor. The (possibly modified) response returned by + `post_update_tag` will be passed to + `post_update_tag_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: gda_version.UpdateVersionRequest, @@ -1580,12 +2614,35 @@ def pre_update_version( def post_update_version(self, response: gda_version.Version) -> gda_version.Version: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: gda_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gda_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + def pre_update_vpcsc_config( self, request: gda_vpcsc_config.UpdateVPCSCConfigRequest, @@ -1606,12 +2663,35 @@ def post_update_vpcsc_config( ) -> gda_vpcsc_config.VPCSCConfig: """Post-rpc interceptor for update_vpcsc_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_vpcsc_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_vpcsc_config` interceptor runs + before the `post_update_vpcsc_config_with_metadata` interceptor. """ return response + def post_update_vpcsc_config_with_metadata( + self, + response: gda_vpcsc_config.VPCSCConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gda_vpcsc_config.VPCSCConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_vpcsc_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_vpcsc_config_with_metadata` + interceptor in new development instead of the `post_update_vpcsc_config` interceptor. + When both interceptors are used, this `post_update_vpcsc_config_with_metadata` interceptor runs after the + `post_update_vpcsc_config` interceptor. The (possibly modified) response returned by + `post_update_vpcsc_config` will be passed to + `post_update_vpcsc_config_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1949,6 +3029,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2100,6 +3184,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2251,6 +3339,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2403,6 +3495,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2552,6 +3648,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2696,6 +3796,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2837,6 +3941,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2978,6 +4086,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3122,6 +4234,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3475,6 +4591,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3621,6 +4741,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3771,6 +4895,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_docker_image(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_docker_image_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3918,6 +5046,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4133,6 +5265,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4278,6 +5414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_maven_artifact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_maven_artifact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4420,6 +5560,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_npm_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_npm_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4562,6 +5706,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4708,6 +5856,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_project_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_project_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4853,6 +6005,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_python_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_python_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4995,6 +6151,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5145,6 +6305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5292,6 +6456,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5437,6 +6605,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5581,6 +6753,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_vpcsc_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_vpcsc_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5732,6 +6908,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_apt_artifacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_apt_artifacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5883,6 +7063,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_yum_artifacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_yum_artifacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6028,6 +7212,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_attachments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_attachments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6175,6 +7363,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_docker_images(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_docker_images_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6317,6 +7509,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_files(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_files_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6462,6 +7658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_maven_artifacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_maven_artifacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6609,6 +7809,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_npm_packages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_npm_packages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6751,6 +7955,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_packages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_packages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6896,6 +8104,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_python_packages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_python_packages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7043,6 +8255,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7185,6 +8401,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7327,6 +8547,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7467,6 +8691,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7688,6 +8916,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7837,6 +9069,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7986,6 +9222,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8134,6 +9374,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8287,6 +9531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_project_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_project_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8438,6 +9686,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8590,6 +9842,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8740,6 +9996,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8891,6 +10151,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9042,6 +10306,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_vpcsc_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_vpcsc_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py index c31d94a1615a..42a6bee73e45 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -654,6 +656,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3719,16 +3748,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3774,16 +3807,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/rest.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/rest.py index 0c13932b472f..a0fc644d8dd8 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/rest.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1beta2/services/artifact_registry/transports/rest.py @@ -300,12 +300,35 @@ def post_create_repository( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_create_repository` interceptor runs + before the `post_create_repository_with_metadata` interceptor. """ return response + def post_create_repository_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_create_repository_with_metadata` + interceptor in new development instead of the `post_create_repository` interceptor. + When both interceptors are used, this `post_create_repository_with_metadata` interceptor runs after the + `post_create_repository` interceptor. The (possibly modified) response returned by + `post_create_repository` will be passed to + `post_create_repository_with_metadata`. + """ + return response, metadata + def pre_create_tag( self, request: gda_tag.CreateTagRequest, @@ -321,12 +344,33 @@ def pre_create_tag( def post_create_tag(self, response: gda_tag.Tag) -> gda_tag.Tag: """Post-rpc interceptor for create_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_create_tag` interceptor runs + before the `post_create_tag_with_metadata` interceptor. """ return response + def post_create_tag_with_metadata( + self, response: gda_tag.Tag, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_tag.Tag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_create_tag_with_metadata` + interceptor in new development instead of the `post_create_tag` interceptor. + When both interceptors are used, this `post_create_tag_with_metadata` interceptor runs after the + `post_create_tag` interceptor. The (possibly modified) response returned by + `post_create_tag` will be passed to + `post_create_tag_with_metadata`. + """ + return response, metadata + def pre_delete_package( self, request: package.DeletePackageRequest, @@ -344,12 +388,35 @@ def post_delete_package( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_package` interceptor runs + before the `post_delete_package_with_metadata` interceptor. """ return response + def post_delete_package_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_package_with_metadata` + interceptor in new development instead of the `post_delete_package` interceptor. + When both interceptors are used, this `post_delete_package_with_metadata` interceptor runs after the + `post_delete_package` interceptor. The (possibly modified) response returned by + `post_delete_package` will be passed to + `post_delete_package_with_metadata`. + """ + return response, metadata + def pre_delete_repository( self, request: repository.DeleteRepositoryRequest, @@ -369,12 +436,35 @@ def post_delete_repository( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_repository` interceptor runs + before the `post_delete_repository_with_metadata` interceptor. """ return response + def post_delete_repository_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_repository_with_metadata` + interceptor in new development instead of the `post_delete_repository` interceptor. + When both interceptors are used, this `post_delete_repository_with_metadata` interceptor runs after the + `post_delete_repository` interceptor. The (possibly modified) response returned by + `post_delete_repository` will be passed to + `post_delete_repository_with_metadata`. + """ + return response, metadata + def pre_delete_tag( self, request: tag.DeleteTagRequest, @@ -404,12 +494,35 @@ def post_delete_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_delete_version` interceptor runs + before the `post_delete_version_with_metadata` interceptor. """ return response + def post_delete_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_delete_version_with_metadata` + interceptor in new development instead of the `post_delete_version` interceptor. + When both interceptors are used, this `post_delete_version_with_metadata` interceptor runs after the + `post_delete_version` interceptor. The (possibly modified) response returned by + `post_delete_version` will be passed to + `post_delete_version_with_metadata`. + """ + return response, metadata + def pre_get_file( self, request: file.GetFileRequest, @@ -425,12 +538,33 @@ def pre_get_file( def post_get_file(self, response: file.File) -> file.File: """Post-rpc interceptor for get_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_file` interceptor runs + before the `post_get_file_with_metadata` interceptor. """ return response + def post_get_file_with_metadata( + self, response: file.File, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[file.File, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_file_with_metadata` + interceptor in new development instead of the `post_get_file` interceptor. + When both interceptors are used, this `post_get_file_with_metadata` interceptor runs after the + `post_get_file` interceptor. The (possibly modified) response returned by + `post_get_file` will be passed to + `post_get_file_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -448,12 +582,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_package( self, request: package.GetPackageRequest, @@ -469,12 +626,35 @@ def pre_get_package( def post_get_package(self, response: package.Package) -> package.Package: """Post-rpc interceptor for get_package - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_package_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_package` interceptor runs + before the `post_get_package_with_metadata` interceptor. """ return response + def post_get_package_with_metadata( + self, + response: package.Package, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[package.Package, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_package + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_package_with_metadata` + interceptor in new development instead of the `post_get_package` interceptor. + When both interceptors are used, this `post_get_package_with_metadata` interceptor runs after the + `post_get_package` interceptor. The (possibly modified) response returned by + `post_get_package` will be passed to + `post_get_package_with_metadata`. + """ + return response, metadata + def pre_get_project_settings( self, request: settings.GetProjectSettingsRequest, @@ -494,12 +674,35 @@ def post_get_project_settings( ) -> settings.ProjectSettings: """Post-rpc interceptor for get_project_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_project_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_project_settings` interceptor runs + before the `post_get_project_settings_with_metadata` interceptor. """ return response + def post_get_project_settings_with_metadata( + self, + response: settings.ProjectSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[settings.ProjectSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_project_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_project_settings_with_metadata` + interceptor in new development instead of the `post_get_project_settings` interceptor. + When both interceptors are used, this `post_get_project_settings_with_metadata` interceptor runs after the + `post_get_project_settings` interceptor. The (possibly modified) response returned by + `post_get_project_settings` will be passed to + `post_get_project_settings_with_metadata`. + """ + return response, metadata + def pre_get_repository( self, request: repository.GetRepositoryRequest, @@ -519,12 +722,35 @@ def post_get_repository( ) -> repository.Repository: """Post-rpc interceptor for get_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_repository` interceptor runs + before the `post_get_repository_with_metadata` interceptor. """ return response + def post_get_repository_with_metadata( + self, + response: repository.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[repository.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_repository_with_metadata` + interceptor in new development instead of the `post_get_repository` interceptor. + When both interceptors are used, this `post_get_repository_with_metadata` interceptor runs after the + `post_get_repository` interceptor. The (possibly modified) response returned by + `post_get_repository` will be passed to + `post_get_repository_with_metadata`. + """ + return response, metadata + def pre_get_tag( self, request: tag.GetTagRequest, @@ -540,12 +766,33 @@ def pre_get_tag( def post_get_tag(self, response: tag.Tag) -> tag.Tag: """Post-rpc interceptor for get_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_tag` interceptor runs + before the `post_get_tag_with_metadata` interceptor. """ return response + def post_get_tag_with_metadata( + self, response: tag.Tag, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tag.Tag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_tag_with_metadata` + interceptor in new development instead of the `post_get_tag` interceptor. + When both interceptors are used, this `post_get_tag_with_metadata` interceptor runs after the + `post_get_tag` interceptor. The (possibly modified) response returned by + `post_get_tag` will be passed to + `post_get_tag_with_metadata`. + """ + return response, metadata + def pre_get_version( self, request: version.GetVersionRequest, @@ -561,12 +808,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_import_apt_artifacts( self, request: apt_artifact.ImportAptArtifactsRequest, @@ -586,12 +856,35 @@ def post_import_apt_artifacts( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_apt_artifacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_apt_artifacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_import_apt_artifacts` interceptor runs + before the `post_import_apt_artifacts_with_metadata` interceptor. """ return response + def post_import_apt_artifacts_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_apt_artifacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_import_apt_artifacts_with_metadata` + interceptor in new development instead of the `post_import_apt_artifacts` interceptor. + When both interceptors are used, this `post_import_apt_artifacts_with_metadata` interceptor runs after the + `post_import_apt_artifacts` interceptor. The (possibly modified) response returned by + `post_import_apt_artifacts` will be passed to + `post_import_apt_artifacts_with_metadata`. + """ + return response, metadata + def pre_import_yum_artifacts( self, request: yum_artifact.ImportYumArtifactsRequest, @@ -611,12 +904,35 @@ def post_import_yum_artifacts( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_yum_artifacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_yum_artifacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_import_yum_artifacts` interceptor runs + before the `post_import_yum_artifacts_with_metadata` interceptor. """ return response + def post_import_yum_artifacts_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_yum_artifacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_import_yum_artifacts_with_metadata` + interceptor in new development instead of the `post_import_yum_artifacts` interceptor. + When both interceptors are used, this `post_import_yum_artifacts_with_metadata` interceptor runs after the + `post_import_yum_artifacts` interceptor. The (possibly modified) response returned by + `post_import_yum_artifacts` will be passed to + `post_import_yum_artifacts_with_metadata`. + """ + return response, metadata + def pre_list_files( self, request: file.ListFilesRequest, @@ -634,12 +950,35 @@ def post_list_files( ) -> file.ListFilesResponse: """Post-rpc interceptor for list_files - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_files_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_files` interceptor runs + before the `post_list_files_with_metadata` interceptor. """ return response + def post_list_files_with_metadata( + self, + response: file.ListFilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[file.ListFilesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_files + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_files_with_metadata` + interceptor in new development instead of the `post_list_files` interceptor. + When both interceptors are used, this `post_list_files_with_metadata` interceptor runs after the + `post_list_files` interceptor. The (possibly modified) response returned by + `post_list_files` will be passed to + `post_list_files_with_metadata`. + """ + return response, metadata + def pre_list_packages( self, request: package.ListPackagesRequest, @@ -657,12 +996,35 @@ def post_list_packages( ) -> package.ListPackagesResponse: """Post-rpc interceptor for list_packages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_packages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_packages` interceptor runs + before the `post_list_packages_with_metadata` interceptor. """ return response + def post_list_packages_with_metadata( + self, + response: package.ListPackagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[package.ListPackagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_packages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_packages_with_metadata` + interceptor in new development instead of the `post_list_packages` interceptor. + When both interceptors are used, this `post_list_packages_with_metadata` interceptor runs after the + `post_list_packages` interceptor. The (possibly modified) response returned by + `post_list_packages` will be passed to + `post_list_packages_with_metadata`. + """ + return response, metadata + def pre_list_repositories( self, request: repository.ListRepositoriesRequest, @@ -682,12 +1044,37 @@ def post_list_repositories( ) -> repository.ListRepositoriesResponse: """Post-rpc interceptor for list_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_repositories` interceptor runs + before the `post_list_repositories_with_metadata` interceptor. """ return response + def post_list_repositories_with_metadata( + self, + response: repository.ListRepositoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repository.ListRepositoriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_repositories_with_metadata` + interceptor in new development instead of the `post_list_repositories` interceptor. + When both interceptors are used, this `post_list_repositories_with_metadata` interceptor runs after the + `post_list_repositories` interceptor. The (possibly modified) response returned by + `post_list_repositories` will be passed to + `post_list_repositories_with_metadata`. + """ + return response, metadata + def pre_list_tags( self, request: tag.ListTagsRequest, @@ -703,12 +1090,35 @@ def pre_list_tags( def post_list_tags(self, response: tag.ListTagsResponse) -> tag.ListTagsResponse: """Post-rpc interceptor for list_tags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_tags` interceptor runs + before the `post_list_tags_with_metadata` interceptor. """ return response + def post_list_tags_with_metadata( + self, + response: tag.ListTagsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tag.ListTagsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_tags_with_metadata` + interceptor in new development instead of the `post_list_tags` interceptor. + When both interceptors are used, this `post_list_tags_with_metadata` interceptor runs after the + `post_list_tags` interceptor. The (possibly modified) response returned by + `post_list_tags` will be passed to + `post_list_tags_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: version.ListVersionsRequest, @@ -726,12 +1136,35 @@ def post_list_versions( ) -> version.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: version.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -749,12 +1182,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -775,12 +1231,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_project_settings( self, request: settings.UpdateProjectSettingsRequest, @@ -800,12 +1282,35 @@ def post_update_project_settings( ) -> settings.ProjectSettings: """Post-rpc interceptor for update_project_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_project_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_project_settings` interceptor runs + before the `post_update_project_settings_with_metadata` interceptor. """ return response + def post_update_project_settings_with_metadata( + self, + response: settings.ProjectSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[settings.ProjectSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_project_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_project_settings_with_metadata` + interceptor in new development instead of the `post_update_project_settings` interceptor. + When both interceptors are used, this `post_update_project_settings_with_metadata` interceptor runs after the + `post_update_project_settings` interceptor. The (possibly modified) response returned by + `post_update_project_settings` will be passed to + `post_update_project_settings_with_metadata`. + """ + return response, metadata + def pre_update_repository( self, request: gda_repository.UpdateRepositoryRequest, @@ -825,12 +1330,35 @@ def post_update_repository( ) -> gda_repository.Repository: """Post-rpc interceptor for update_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_repository` interceptor runs + before the `post_update_repository_with_metadata` interceptor. """ return response + def post_update_repository_with_metadata( + self, + response: gda_repository.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gda_repository.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_repository_with_metadata` + interceptor in new development instead of the `post_update_repository` interceptor. + When both interceptors are used, this `post_update_repository_with_metadata` interceptor runs after the + `post_update_repository` interceptor. The (possibly modified) response returned by + `post_update_repository` will be passed to + `post_update_repository_with_metadata`. + """ + return response, metadata + def pre_update_tag( self, request: gda_tag.UpdateTagRequest, @@ -846,12 +1374,33 @@ def pre_update_tag( def post_update_tag(self, response: gda_tag.Tag) -> gda_tag.Tag: """Post-rpc interceptor for update_tag - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_tag_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ArtifactRegistry server but before - it is returned to user code. + it is returned to user code. This `post_update_tag` interceptor runs + before the `post_update_tag_with_metadata` interceptor. """ return response + def post_update_tag_with_metadata( + self, response: gda_tag.Tag, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gda_tag.Tag, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tag + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_update_tag_with_metadata` + interceptor in new development instead of the `post_update_tag` interceptor. + When both interceptors are used, this `post_update_tag_with_metadata` interceptor runs after the + `post_update_tag` interceptor. The (possibly modified) response returned by + `post_update_tag` will be passed to + `post_update_tag_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1164,6 +1713,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1313,6 +1866,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1454,6 +2011,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1598,6 +2159,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1845,6 +2410,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1992,6 +2561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2207,6 +2780,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2349,6 +2926,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_package(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_package_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2495,6 +3076,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_project_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_project_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2637,6 +3222,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2784,6 +3373,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2929,6 +3522,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3080,6 +3677,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_apt_artifacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_apt_artifacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3231,6 +3832,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_yum_artifacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_yum_artifacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3371,6 +3976,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_files(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_files_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3511,6 +4120,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_packages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_packages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3656,6 +4269,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3800,6 +4417,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3940,6 +4561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4161,6 +4786,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4310,6 +4939,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4463,6 +5096,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_project_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_project_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4614,6 +5251,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4764,6 +5405,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_tag(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tag_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-artifact-registry/noxfile.py b/packages/google-cloud-artifact-registry/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-artifact-registry/noxfile.py +++ b/packages/google-cloud-artifact-registry/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json index 204d24e22bdc..15b695d1cc53 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json index 6c2031e5589f..8d5ddafa8752 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-artifact-registry", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py index 01fef8477a4a..0a26f71dc2b3 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -98,6 +98,13 @@ from google.cloud.artifactregistry_v1.types import vpcsc_config from google.cloud.artifactregistry_v1.types import yum_artifact +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -357,6 +364,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ArtifactRegistryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ArtifactRegistryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -30509,10 +30559,14 @@ def test_list_docker_images_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_docker_images" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_docker_images_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_docker_images" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.ListDockerImagesRequest.pb( artifact.ListDockerImagesRequest() ) @@ -30538,6 +30592,7 @@ def test_list_docker_images_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.ListDockerImagesResponse() + post_with_metadata.return_value = artifact.ListDockerImagesResponse(), metadata client.list_docker_images( request, @@ -30549,6 +30604,7 @@ def test_list_docker_images_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_docker_image_rest_bad_request(request_type=artifact.GetDockerImageRequest): @@ -30643,10 +30699,14 @@ def test_get_docker_image_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_docker_image" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_get_docker_image_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_docker_image" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.GetDockerImageRequest.pb(artifact.GetDockerImageRequest()) transcode.return_value = { "method": "post", @@ -30668,6 +30728,7 @@ def test_get_docker_image_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.DockerImage() + post_with_metadata.return_value = artifact.DockerImage(), metadata client.get_docker_image( request, @@ -30679,6 +30740,7 @@ def test_get_docker_image_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_maven_artifacts_rest_bad_request( @@ -30763,10 +30825,14 @@ def test_list_maven_artifacts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_maven_artifacts" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_maven_artifacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_maven_artifacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.ListMavenArtifactsRequest.pb( artifact.ListMavenArtifactsRequest() ) @@ -30792,6 +30858,10 @@ def test_list_maven_artifacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.ListMavenArtifactsResponse() + post_with_metadata.return_value = ( + artifact.ListMavenArtifactsResponse(), + metadata, + ) client.list_maven_artifacts( request, @@ -30803,6 +30873,7 @@ def test_list_maven_artifacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_maven_artifact_rest_bad_request( @@ -30899,10 +30970,14 @@ def test_get_maven_artifact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_maven_artifact" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_get_maven_artifact_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_maven_artifact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.GetMavenArtifactRequest.pb( artifact.GetMavenArtifactRequest() ) @@ -30926,6 +31001,7 @@ def test_get_maven_artifact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.MavenArtifact() + post_with_metadata.return_value = artifact.MavenArtifact(), metadata client.get_maven_artifact( request, @@ -30937,6 +31013,7 @@ def test_get_maven_artifact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_npm_packages_rest_bad_request( @@ -31021,10 +31098,14 @@ def test_list_npm_packages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_npm_packages" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_npm_packages_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_npm_packages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.ListNpmPackagesRequest.pb( artifact.ListNpmPackagesRequest() ) @@ -31050,6 +31131,7 @@ def test_list_npm_packages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.ListNpmPackagesResponse() + post_with_metadata.return_value = artifact.ListNpmPackagesResponse(), metadata client.list_npm_packages( request, @@ -31061,6 +31143,7 @@ def test_list_npm_packages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_npm_package_rest_bad_request(request_type=artifact.GetNpmPackageRequest): @@ -31153,10 +31236,13 @@ def test_get_npm_package_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_npm_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_npm_package_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_npm_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.GetNpmPackageRequest.pb(artifact.GetNpmPackageRequest()) transcode.return_value = { "method": "post", @@ -31178,6 +31264,7 @@ def test_get_npm_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.NpmPackage() + post_with_metadata.return_value = artifact.NpmPackage(), metadata client.get_npm_package( request, @@ -31189,6 +31276,7 @@ def test_get_npm_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_python_packages_rest_bad_request( @@ -31273,10 +31361,14 @@ def test_list_python_packages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_python_packages" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_python_packages_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_python_packages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.ListPythonPackagesRequest.pb( artifact.ListPythonPackagesRequest() ) @@ -31302,6 +31394,10 @@ def test_list_python_packages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.ListPythonPackagesResponse() + post_with_metadata.return_value = ( + artifact.ListPythonPackagesResponse(), + metadata, + ) client.list_python_packages( request, @@ -31313,6 +31409,7 @@ def test_list_python_packages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_python_package_rest_bad_request( @@ -31407,10 +31504,14 @@ def test_get_python_package_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_python_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_get_python_package_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_python_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = artifact.GetPythonPackageRequest.pb( artifact.GetPythonPackageRequest() ) @@ -31434,6 +31535,7 @@ def test_get_python_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = artifact.PythonPackage() + post_with_metadata.return_value = artifact.PythonPackage(), metadata client.get_python_package( request, @@ -31445,6 +31547,7 @@ def test_get_python_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_apt_artifacts_rest_bad_request( @@ -31525,10 +31628,14 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_import_apt_artifacts" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_import_apt_artifacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_import_apt_artifacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apt_artifact.ImportAptArtifactsRequest.pb( apt_artifact.ImportAptArtifactsRequest() ) @@ -31552,6 +31659,7 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_apt_artifacts( request, @@ -31563,6 +31671,7 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_yum_artifacts_rest_bad_request( @@ -31643,10 +31752,14 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_import_yum_artifacts" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_import_yum_artifacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_import_yum_artifacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = yum_artifact.ImportYumArtifactsRequest.pb( yum_artifact.ImportYumArtifactsRequest() ) @@ -31670,6 +31783,7 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_yum_artifacts( request, @@ -31681,6 +31795,7 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_repositories_rest_bad_request( @@ -31765,10 +31880,14 @@ def test_list_repositories_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_repositories" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_repositories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repository.ListRepositoriesRequest.pb( repository.ListRepositoriesRequest() ) @@ -31794,6 +31913,10 @@ def test_list_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repository.ListRepositoriesResponse() + post_with_metadata.return_value = ( + repository.ListRepositoriesResponse(), + metadata, + ) client.list_repositories( request, @@ -31805,6 +31928,7 @@ def test_list_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_repository_rest_bad_request(request_type=repository.GetRepositoryRequest): @@ -31905,10 +32029,13 @@ def test_get_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_repository_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repository.GetRepositoryRequest.pb( repository.GetRepositoryRequest() ) @@ -31932,6 +32059,7 @@ def test_get_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repository.Repository() + post_with_metadata.return_value = repository.Repository(), metadata client.get_repository( request, @@ -31943,6 +32071,7 @@ def test_get_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_repository_rest_bad_request( @@ -32160,10 +32289,14 @@ def test_create_repository_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_create_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_create_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_create_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_repository.CreateRepositoryRequest.pb( gda_repository.CreateRepositoryRequest() ) @@ -32187,6 +32320,7 @@ def test_create_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_repository( request, @@ -32198,6 +32332,7 @@ def test_create_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_repository_rest_bad_request( @@ -32445,10 +32580,14 @@ def test_update_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_update_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_repository.UpdateRepositoryRequest.pb( gda_repository.UpdateRepositoryRequest() ) @@ -32472,6 +32611,7 @@ def test_update_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_repository.Repository() + post_with_metadata.return_value = gda_repository.Repository(), metadata client.update_repository( request, @@ -32483,6 +32623,7 @@ def test_update_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_repository_rest_bad_request( @@ -32563,10 +32704,14 @@ def test_delete_repository_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_delete_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repository.DeleteRepositoryRequest.pb( repository.DeleteRepositoryRequest() ) @@ -32590,6 +32735,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_repository( request, @@ -32601,6 +32747,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_packages_rest_bad_request(request_type=package.ListPackagesRequest): @@ -32683,10 +32830,13 @@ def test_list_packages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_packages" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_packages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_packages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = package.ListPackagesRequest.pb(package.ListPackagesRequest()) transcode.return_value = { "method": "post", @@ -32710,6 +32860,7 @@ def test_list_packages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = package.ListPackagesResponse() + post_with_metadata.return_value = package.ListPackagesResponse(), metadata client.list_packages( request, @@ -32721,6 +32872,7 @@ def test_list_packages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_package_rest_bad_request(request_type=package.GetPackageRequest): @@ -32809,10 +32961,13 @@ def test_get_package_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_package_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = package.GetPackageRequest.pb(package.GetPackageRequest()) transcode.return_value = { "method": "post", @@ -32834,6 +32989,7 @@ def test_get_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = package.Package() + post_with_metadata.return_value = package.Package(), metadata client.get_package( request, @@ -32845,6 +33001,7 @@ def test_get_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_package_rest_bad_request(request_type=package.DeletePackageRequest): @@ -32927,10 +33084,13 @@ def test_delete_package_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_package_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = package.DeletePackageRequest.pb(package.DeletePackageRequest()) transcode.return_value = { "method": "post", @@ -32952,6 +33112,7 @@ def test_delete_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_package( request, @@ -32963,6 +33124,7 @@ def test_delete_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_versions_rest_bad_request(request_type=version.ListVersionsRequest): @@ -33049,10 +33211,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -33076,6 +33241,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.ListVersionsResponse() + post_with_metadata.return_value = version.ListVersionsResponse(), metadata client.list_versions( request, @@ -33087,6 +33253,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): @@ -33175,10 +33342,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -33200,6 +33370,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -33211,6 +33382,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): @@ -33293,10 +33465,13 @@ def test_delete_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_version" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.DeleteVersionRequest.pb(version.DeleteVersionRequest()) transcode.return_value = { "method": "post", @@ -33318,6 +33493,7 @@ def test_delete_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_version( request, @@ -33329,6 +33505,7 @@ def test_delete_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_versions_rest_bad_request( @@ -33413,10 +33590,14 @@ def test_batch_delete_versions_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_batch_delete_versions" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_batch_delete_versions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_batch_delete_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.BatchDeleteVersionsRequest.pb( version.BatchDeleteVersionsRequest() ) @@ -33440,6 +33621,7 @@ def test_batch_delete_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_versions( request, @@ -33451,6 +33633,7 @@ def test_batch_delete_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request(request_type=gda_version.UpdateVersionRequest): @@ -33619,10 +33802,13 @@ def test_update_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_version.UpdateVersionRequest.pb( gda_version.UpdateVersionRequest() ) @@ -33646,6 +33832,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_version.Version() + post_with_metadata.return_value = gda_version.Version(), metadata client.update_version( request, @@ -33657,6 +33844,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_files_rest_bad_request(request_type=file.ListFilesRequest): @@ -33739,10 +33927,13 @@ def test_list_files_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_files" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_files_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_files" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file.ListFilesRequest.pb(file.ListFilesRequest()) transcode.return_value = { "method": "post", @@ -33764,6 +33955,7 @@ def test_list_files_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file.ListFilesResponse() + post_with_metadata.return_value = file.ListFilesResponse(), metadata client.list_files( request, @@ -33775,6 +33967,7 @@ def test_list_files_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_file_rest_bad_request(request_type=file.GetFileRequest): @@ -33865,10 +34058,13 @@ def test_get_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_file" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file.GetFileRequest.pb(file.GetFileRequest()) transcode.return_value = { "method": "post", @@ -33890,6 +34086,7 @@ def test_get_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file.File() + post_with_metadata.return_value = file.File(), metadata client.get_file( request, @@ -33901,6 +34098,7 @@ def test_get_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_file_rest_bad_request(request_type=file.DeleteFileRequest): @@ -33983,10 +34181,13 @@ def test_delete_file_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_file" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file.DeleteFileRequest.pb(file.DeleteFileRequest()) transcode.return_value = { "method": "post", @@ -34008,6 +34209,7 @@ def test_delete_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_file( request, @@ -34019,6 +34221,7 @@ def test_delete_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_file_rest_bad_request(request_type=gda_file.UpdateFileRequest): @@ -34190,10 +34393,13 @@ def test_update_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_file" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_update_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_file.UpdateFileRequest.pb(gda_file.UpdateFileRequest()) transcode.return_value = { "method": "post", @@ -34215,6 +34421,7 @@ def test_update_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_file.File() + post_with_metadata.return_value = gda_file.File(), metadata client.update_file( request, @@ -34226,6 +34433,7 @@ def test_update_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tags_rest_bad_request(request_type=tag.ListTagsRequest): @@ -34312,10 +34520,13 @@ def test_list_tags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_tags" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_tags_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_tags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tag.ListTagsRequest.pb(tag.ListTagsRequest()) transcode.return_value = { "method": "post", @@ -34337,6 +34548,7 @@ def test_list_tags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tag.ListTagsResponse() + post_with_metadata.return_value = tag.ListTagsResponse(), metadata client.list_tags( request, @@ -34348,6 +34560,7 @@ def test_list_tags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_tag_rest_bad_request(request_type=tag.GetTagRequest): @@ -34436,10 +34649,13 @@ def test_get_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_tag" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_tag_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tag.GetTagRequest.pb(tag.GetTagRequest()) transcode.return_value = { "method": "post", @@ -34461,6 +34677,7 @@ def test_get_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tag.Tag() + post_with_metadata.return_value = tag.Tag(), metadata client.get_tag( request, @@ -34472,6 +34689,7 @@ def test_get_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_tag_rest_bad_request(request_type=gda_tag.CreateTagRequest): @@ -34628,10 +34846,13 @@ def test_create_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_create_tag" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_create_tag_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_create_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_tag.CreateTagRequest.pb(gda_tag.CreateTagRequest()) transcode.return_value = { "method": "post", @@ -34653,6 +34874,7 @@ def test_create_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_tag.Tag() + post_with_metadata.return_value = gda_tag.Tag(), metadata client.create_tag( request, @@ -34664,6 +34886,7 @@ def test_create_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_tag_rest_bad_request(request_type=gda_tag.UpdateTagRequest): @@ -34827,10 +35050,13 @@ def test_update_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_tag" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_update_tag_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_tag.UpdateTagRequest.pb(gda_tag.UpdateTagRequest()) transcode.return_value = { "method": "post", @@ -34852,6 +35078,7 @@ def test_update_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_tag.Tag() + post_with_metadata.return_value = gda_tag.Tag(), metadata client.update_tag( request, @@ -34863,6 +35090,7 @@ def test_update_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_tag_rest_bad_request(request_type=tag.DeleteTagRequest): @@ -35139,10 +35367,13 @@ def test_create_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_create_rule" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_create_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_create_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_rule.CreateRuleRequest.pb(gda_rule.CreateRuleRequest()) transcode.return_value = { "method": "post", @@ -35164,6 +35395,7 @@ def test_create_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_rule.Rule() + post_with_metadata.return_value = gda_rule.Rule(), metadata client.create_rule( request, @@ -35175,6 +35407,7 @@ def test_create_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rules_rest_bad_request(request_type=rule.ListRulesRequest): @@ -35257,10 +35490,13 @@ def test_list_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_rules" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_rules_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = rule.ListRulesRequest.pb(rule.ListRulesRequest()) transcode.return_value = { "method": "post", @@ -35282,6 +35518,7 @@ def test_list_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rule.ListRulesResponse() + post_with_metadata.return_value = rule.ListRulesResponse(), metadata client.list_rules( request, @@ -35293,6 +35530,7 @@ def test_list_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_rest_bad_request(request_type=rule.GetRuleRequest): @@ -35385,10 +35623,13 @@ def test_get_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_rule" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = rule.GetRuleRequest.pb(rule.GetRuleRequest()) transcode.return_value = { "method": "post", @@ -35410,6 +35651,7 @@ def test_get_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rule.Rule() + post_with_metadata.return_value = rule.Rule(), metadata client.get_rule( request, @@ -35421,6 +35663,7 @@ def test_get_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rule_rest_bad_request(request_type=gda_rule.UpdateRuleRequest): @@ -35596,10 +35839,13 @@ def test_update_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_rule" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_update_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_rule.UpdateRuleRequest.pb(gda_rule.UpdateRuleRequest()) transcode.return_value = { "method": "post", @@ -35621,6 +35867,7 @@ def test_update_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_rule.Rule() + post_with_metadata.return_value = gda_rule.Rule(), metadata client.update_rule( request, @@ -35632,6 +35879,7 @@ def test_update_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rule_rest_bad_request(request_type=rule.DeleteRuleRequest): @@ -35828,10 +36076,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -35853,6 +36104,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -35864,6 +36116,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -35951,10 +36204,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -35976,6 +36232,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -35987,6 +36244,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -36072,10 +36330,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -36099,6 +36361,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -36110,6 +36376,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_project_settings_rest_bad_request( @@ -36201,10 +36468,14 @@ def test_get_project_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_project_settings" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_get_project_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_project_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = settings.GetProjectSettingsRequest.pb( settings.GetProjectSettingsRequest() ) @@ -36228,6 +36499,7 @@ def test_get_project_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = settings.ProjectSettings() + post_with_metadata.return_value = settings.ProjectSettings(), metadata client.get_project_settings( request, @@ -36239,6 +36511,7 @@ def test_get_project_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_project_settings_rest_bad_request( @@ -36402,10 +36675,14 @@ def test_update_project_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_project_settings" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_update_project_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_project_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = settings.UpdateProjectSettingsRequest.pb( settings.UpdateProjectSettingsRequest() ) @@ -36429,6 +36706,7 @@ def test_update_project_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = settings.ProjectSettings() + post_with_metadata.return_value = settings.ProjectSettings(), metadata client.update_project_settings( request, @@ -36440,6 +36718,7 @@ def test_update_project_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_vpcsc_config_rest_bad_request( @@ -36526,10 +36805,14 @@ def test_get_vpcsc_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_vpcsc_config" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_get_vpcsc_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_vpcsc_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = vpcsc_config.GetVPCSCConfigRequest.pb( vpcsc_config.GetVPCSCConfigRequest() ) @@ -36553,6 +36836,7 @@ def test_get_vpcsc_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = vpcsc_config.VPCSCConfig() + post_with_metadata.return_value = vpcsc_config.VPCSCConfig(), metadata client.get_vpcsc_config( request, @@ -36564,6 +36848,7 @@ def test_get_vpcsc_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_vpcsc_config_rest_bad_request( @@ -36725,10 +37010,14 @@ def test_update_vpcsc_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_vpcsc_config" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_update_vpcsc_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_vpcsc_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_vpcsc_config.UpdateVPCSCConfigRequest.pb( gda_vpcsc_config.UpdateVPCSCConfigRequest() ) @@ -36754,6 +37043,7 @@ def test_update_vpcsc_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_vpcsc_config.VPCSCConfig() + post_with_metadata.return_value = gda_vpcsc_config.VPCSCConfig(), metadata client.update_vpcsc_config( request, @@ -36765,6 +37055,7 @@ def test_update_vpcsc_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_package_rest_bad_request(request_type=gda_package.UpdatePackageRequest): @@ -36931,10 +37222,13 @@ def test_update_package_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_update_package_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_package.UpdatePackageRequest.pb( gda_package.UpdatePackageRequest() ) @@ -36958,6 +37252,7 @@ def test_update_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_package.Package() + post_with_metadata.return_value = gda_package.Package(), metadata client.update_package( request, @@ -36969,6 +37264,7 @@ def test_update_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_attachments_rest_bad_request( @@ -37053,10 +37349,14 @@ def test_list_attachments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_attachments" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_attachments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_attachments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attachment.ListAttachmentsRequest.pb( attachment.ListAttachmentsRequest() ) @@ -37082,6 +37382,7 @@ def test_list_attachments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attachment.ListAttachmentsResponse() + post_with_metadata.return_value = attachment.ListAttachmentsResponse(), metadata client.list_attachments( request, @@ -37093,6 +37394,7 @@ def test_list_attachments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attachment_rest_bad_request(request_type=attachment.GetAttachmentRequest): @@ -37189,10 +37491,13 @@ def test_get_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_attachment" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_attachment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attachment.GetAttachmentRequest.pb( attachment.GetAttachmentRequest() ) @@ -37216,6 +37521,7 @@ def test_get_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attachment.Attachment() + post_with_metadata.return_value = attachment.Attachment(), metadata client.get_attachment( request, @@ -37227,6 +37533,7 @@ def test_get_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_attachment_rest_bad_request( @@ -37385,10 +37692,14 @@ def test_create_attachment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_create_attachment" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_create_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_create_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_attachment.CreateAttachmentRequest.pb( gda_attachment.CreateAttachmentRequest() ) @@ -37412,6 +37723,7 @@ def test_create_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_attachment( request, @@ -37423,6 +37735,7 @@ def test_create_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_attachment_rest_bad_request( @@ -37507,10 +37820,14 @@ def test_delete_attachment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_attachment" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_delete_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attachment.DeleteAttachmentRequest.pb( attachment.DeleteAttachmentRequest() ) @@ -37534,6 +37851,7 @@ def test_delete_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_attachment( request, @@ -37545,6 +37863,7 @@ def test_delete_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py index e0648ecd9971..c054b07805c9 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1beta2/test_artifact_registry.py @@ -85,6 +85,13 @@ from google.cloud.artifactregistry_v1beta2.types import tag as gda_tag from google.cloud.artifactregistry_v1beta2.types import version, yum_artifact +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ArtifactRegistryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ArtifactRegistryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -14764,10 +14814,14 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_import_apt_artifacts" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_import_apt_artifacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_import_apt_artifacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = apt_artifact.ImportAptArtifactsRequest.pb( apt_artifact.ImportAptArtifactsRequest() ) @@ -14791,6 +14845,7 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_apt_artifacts( request, @@ -14802,6 +14857,7 @@ def test_import_apt_artifacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_yum_artifacts_rest_bad_request( @@ -14882,10 +14938,14 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_import_yum_artifacts" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_import_yum_artifacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_import_yum_artifacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = yum_artifact.ImportYumArtifactsRequest.pb( yum_artifact.ImportYumArtifactsRequest() ) @@ -14909,6 +14969,7 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_yum_artifacts( request, @@ -14920,6 +14981,7 @@ def test_import_yum_artifacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_repositories_rest_bad_request( @@ -15004,10 +15066,14 @@ def test_list_repositories_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_repositories" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_list_repositories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repository.ListRepositoriesRequest.pb( repository.ListRepositoriesRequest() ) @@ -15033,6 +15099,10 @@ def test_list_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repository.ListRepositoriesResponse() + post_with_metadata.return_value = ( + repository.ListRepositoriesResponse(), + metadata, + ) client.list_repositories( request, @@ -15044,6 +15114,7 @@ def test_list_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_repository_rest_bad_request(request_type=repository.GetRepositoryRequest): @@ -15132,10 +15203,13 @@ def test_get_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_repository_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repository.GetRepositoryRequest.pb( repository.GetRepositoryRequest() ) @@ -15159,6 +15233,7 @@ def test_get_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repository.Repository() + post_with_metadata.return_value = repository.Repository(), metadata client.get_repository( request, @@ -15170,6 +15245,7 @@ def test_get_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_repository_rest_bad_request( @@ -15327,10 +15403,14 @@ def test_create_repository_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_create_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_create_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_create_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_repository.CreateRepositoryRequest.pb( gda_repository.CreateRepositoryRequest() ) @@ -15354,6 +15434,7 @@ def test_create_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_repository( request, @@ -15365,6 +15446,7 @@ def test_create_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_repository_rest_bad_request( @@ -15540,10 +15622,14 @@ def test_update_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_update_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_repository.UpdateRepositoryRequest.pb( gda_repository.UpdateRepositoryRequest() ) @@ -15567,6 +15653,7 @@ def test_update_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_repository.Repository() + post_with_metadata.return_value = gda_repository.Repository(), metadata client.update_repository( request, @@ -15578,6 +15665,7 @@ def test_update_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_repository_rest_bad_request( @@ -15658,10 +15746,14 @@ def test_delete_repository_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_repository" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_delete_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repository.DeleteRepositoryRequest.pb( repository.DeleteRepositoryRequest() ) @@ -15685,6 +15777,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_repository( request, @@ -15696,6 +15789,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_packages_rest_bad_request(request_type=package.ListPackagesRequest): @@ -15778,10 +15872,13 @@ def test_list_packages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_packages" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_packages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_packages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = package.ListPackagesRequest.pb(package.ListPackagesRequest()) transcode.return_value = { "method": "post", @@ -15805,6 +15902,7 @@ def test_list_packages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = package.ListPackagesResponse() + post_with_metadata.return_value = package.ListPackagesResponse(), metadata client.list_packages( request, @@ -15816,6 +15914,7 @@ def test_list_packages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_package_rest_bad_request(request_type=package.GetPackageRequest): @@ -15904,10 +16003,13 @@ def test_get_package_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_package_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = package.GetPackageRequest.pb(package.GetPackageRequest()) transcode.return_value = { "method": "post", @@ -15929,6 +16031,7 @@ def test_get_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = package.Package() + post_with_metadata.return_value = package.Package(), metadata client.get_package( request, @@ -15940,6 +16043,7 @@ def test_get_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_package_rest_bad_request(request_type=package.DeletePackageRequest): @@ -16022,10 +16126,13 @@ def test_delete_package_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_package" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_package_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_package" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = package.DeletePackageRequest.pb(package.DeletePackageRequest()) transcode.return_value = { "method": "post", @@ -16047,6 +16154,7 @@ def test_delete_package_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_package( request, @@ -16058,6 +16166,7 @@ def test_delete_package_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_versions_rest_bad_request(request_type=version.ListVersionsRequest): @@ -16144,10 +16253,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -16171,6 +16283,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.ListVersionsResponse() + post_with_metadata.return_value = version.ListVersionsResponse(), metadata client.list_versions( request, @@ -16182,6 +16295,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): @@ -16270,10 +16384,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -16295,6 +16412,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -16306,6 +16424,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): @@ -16388,10 +16507,13 @@ def test_delete_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_delete_version" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_delete_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_delete_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.DeleteVersionRequest.pb(version.DeleteVersionRequest()) transcode.return_value = { "method": "post", @@ -16413,6 +16535,7 @@ def test_delete_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_version( request, @@ -16424,6 +16547,7 @@ def test_delete_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_files_rest_bad_request(request_type=file.ListFilesRequest): @@ -16506,10 +16630,13 @@ def test_list_files_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_files" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_files_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_files" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file.ListFilesRequest.pb(file.ListFilesRequest()) transcode.return_value = { "method": "post", @@ -16531,6 +16658,7 @@ def test_list_files_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file.ListFilesResponse() + post_with_metadata.return_value = file.ListFilesResponse(), metadata client.list_files( request, @@ -16542,6 +16670,7 @@ def test_list_files_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_file_rest_bad_request(request_type=file.GetFileRequest): @@ -16632,10 +16761,13 @@ def test_get_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_file" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = file.GetFileRequest.pb(file.GetFileRequest()) transcode.return_value = { "method": "post", @@ -16657,6 +16789,7 @@ def test_get_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = file.File() + post_with_metadata.return_value = file.File(), metadata client.get_file( request, @@ -16668,6 +16801,7 @@ def test_get_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tags_rest_bad_request(request_type=tag.ListTagsRequest): @@ -16754,10 +16888,13 @@ def test_list_tags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_list_tags" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_list_tags_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_list_tags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tag.ListTagsRequest.pb(tag.ListTagsRequest()) transcode.return_value = { "method": "post", @@ -16779,6 +16916,7 @@ def test_list_tags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tag.ListTagsResponse() + post_with_metadata.return_value = tag.ListTagsResponse(), metadata client.list_tags( request, @@ -16790,6 +16928,7 @@ def test_list_tags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_tag_rest_bad_request(request_type=tag.GetTagRequest): @@ -16878,10 +17017,13 @@ def test_get_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_tag" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_tag_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tag.GetTagRequest.pb(tag.GetTagRequest()) transcode.return_value = { "method": "post", @@ -16903,6 +17045,7 @@ def test_get_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tag.Tag() + post_with_metadata.return_value = tag.Tag(), metadata client.get_tag( request, @@ -16914,6 +17057,7 @@ def test_get_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_tag_rest_bad_request(request_type=gda_tag.CreateTagRequest): @@ -17070,10 +17214,13 @@ def test_create_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_create_tag" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_create_tag_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_create_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_tag.CreateTagRequest.pb(gda_tag.CreateTagRequest()) transcode.return_value = { "method": "post", @@ -17095,6 +17242,7 @@ def test_create_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_tag.Tag() + post_with_metadata.return_value = gda_tag.Tag(), metadata client.create_tag( request, @@ -17106,6 +17254,7 @@ def test_create_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_tag_rest_bad_request(request_type=gda_tag.UpdateTagRequest): @@ -17269,10 +17418,13 @@ def test_update_tag_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_tag" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_update_tag_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_tag" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gda_tag.UpdateTagRequest.pb(gda_tag.UpdateTagRequest()) transcode.return_value = { "method": "post", @@ -17294,6 +17446,7 @@ def test_update_tag_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gda_tag.Tag() + post_with_metadata.return_value = gda_tag.Tag(), metadata client.update_tag( request, @@ -17305,6 +17458,7 @@ def test_update_tag_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_tag_rest_bad_request(request_type=tag.DeleteTagRequest): @@ -17501,10 +17655,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -17526,6 +17683,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -17537,6 +17695,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -17624,10 +17783,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -17649,6 +17811,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -17660,6 +17823,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -17745,10 +17909,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -17772,6 +17940,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -17783,6 +17955,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_project_settings_rest_bad_request( @@ -17872,10 +18045,14 @@ def test_get_project_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_get_project_settings" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_get_project_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_get_project_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = settings.GetProjectSettingsRequest.pb( settings.GetProjectSettingsRequest() ) @@ -17899,6 +18076,7 @@ def test_get_project_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = settings.ProjectSettings() + post_with_metadata.return_value = settings.ProjectSettings(), metadata client.get_project_settings( request, @@ -17910,6 +18088,7 @@ def test_get_project_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_project_settings_rest_bad_request( @@ -18070,10 +18249,14 @@ def test_update_project_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "post_update_project_settings" ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, + "post_update_project_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ArtifactRegistryRestInterceptor, "pre_update_project_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = settings.UpdateProjectSettingsRequest.pb( settings.UpdateProjectSettingsRequest() ) @@ -18097,6 +18280,7 @@ def test_update_project_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = settings.ProjectSettings() + post_with_metadata.return_value = settings.ProjectSettings(), metadata client.update_project_settings( request, @@ -18108,6 +18292,7 @@ def test_update_project_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-asset/CHANGELOG.md b/packages/google-cloud-asset/CHANGELOG.md index f15b1daab02b..408eb5815da4 100644 --- a/packages/google-cloud-asset/CHANGELOG.md +++ b/packages/google-cloud-asset/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## [3.29.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.28.0...google-cloud-asset-v3.29.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [3.28.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-asset-v3.27.1...google-cloud-asset-v3.28.0) (2024-12-12) diff --git a/packages/google-cloud-asset/README.rst b/packages/google-cloud-asset/README.rst index a9eb11117e78..c225453b620d 100644 --- a/packages/google-cloud-asset/README.rst +++ b/packages/google-cloud-asset/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Cloud Asset Inventory.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Cloud Asset Inventory.: https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 32cacc95a964..856a3fbea2a6 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 32cacc95a964..856a3fbea2a6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py index d165632ed4f1..5cea5d80a980 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -592,6 +594,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3824,16 +3853,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index ebdff51d6ef8..74ddca041a08 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -271,12 +271,37 @@ def post_analyze_iam_policy( ) -> asset_service.AnalyzeIamPolicyResponse: """Post-rpc interceptor for analyze_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_iam_policy` interceptor runs + before the `post_analyze_iam_policy_with_metadata` interceptor. """ return response + def post_analyze_iam_policy_with_metadata( + self, + response: asset_service.AnalyzeIamPolicyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for analyze_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_iam_policy_with_metadata` + interceptor in new development instead of the `post_analyze_iam_policy` interceptor. + When both interceptors are used, this `post_analyze_iam_policy_with_metadata` interceptor runs after the + `post_analyze_iam_policy` interceptor. The (possibly modified) response returned by + `post_analyze_iam_policy` will be passed to + `post_analyze_iam_policy_with_metadata`. + """ + return response, metadata + def pre_analyze_iam_policy_longrunning( self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, @@ -297,12 +322,35 @@ def post_analyze_iam_policy_longrunning( ) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_iam_policy_longrunning` interceptor runs + before the `post_analyze_iam_policy_longrunning_with_metadata` interceptor. """ return response + def post_analyze_iam_policy_longrunning_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_iam_policy_longrunning + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_iam_policy_longrunning_with_metadata` + interceptor in new development instead of the `post_analyze_iam_policy_longrunning` interceptor. + When both interceptors are used, this `post_analyze_iam_policy_longrunning_with_metadata` interceptor runs after the + `post_analyze_iam_policy_longrunning` interceptor. The (possibly modified) response returned by + `post_analyze_iam_policy_longrunning` will be passed to + `post_analyze_iam_policy_longrunning_with_metadata`. + """ + return response, metadata + def pre_analyze_move( self, request: asset_service.AnalyzeMoveRequest, @@ -322,12 +370,37 @@ def post_analyze_move( ) -> asset_service.AnalyzeMoveResponse: """Post-rpc interceptor for analyze_move - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_move_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_move` interceptor runs + before the `post_analyze_move_with_metadata` interceptor. """ return response + def post_analyze_move_with_metadata( + self, + response: asset_service.AnalyzeMoveResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for analyze_move + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_move_with_metadata` + interceptor in new development instead of the `post_analyze_move` interceptor. + When both interceptors are used, this `post_analyze_move_with_metadata` interceptor runs after the + `post_analyze_move` interceptor. The (possibly modified) response returned by + `post_analyze_move` will be passed to + `post_analyze_move_with_metadata`. + """ + return response, metadata + def pre_analyze_org_policies( self, request: asset_service.AnalyzeOrgPoliciesRequest, @@ -347,12 +420,38 @@ def post_analyze_org_policies( ) -> asset_service.AnalyzeOrgPoliciesResponse: """Post-rpc interceptor for analyze_org_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policies` interceptor runs + before the `post_analyze_org_policies_with_metadata` interceptor. """ return response + def post_analyze_org_policies_with_metadata( + self, + response: asset_service.AnalyzeOrgPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_org_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policies_with_metadata` + interceptor in new development instead of the `post_analyze_org_policies` interceptor. + When both interceptors are used, this `post_analyze_org_policies_with_metadata` interceptor runs after the + `post_analyze_org_policies` interceptor. The (possibly modified) response returned by + `post_analyze_org_policies` will be passed to + `post_analyze_org_policies_with_metadata`. + """ + return response, metadata + def pre_analyze_org_policy_governed_assets( self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, @@ -373,12 +472,38 @@ def post_analyze_org_policy_governed_assets( ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: """Post-rpc interceptor for analyze_org_policy_governed_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policy_governed_assets` interceptor runs + before the `post_analyze_org_policy_governed_assets_with_metadata` interceptor. """ return response + def post_analyze_org_policy_governed_assets_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policy_governed_assets_with_metadata` + interceptor in new development instead of the `post_analyze_org_policy_governed_assets` interceptor. + When both interceptors are used, this `post_analyze_org_policy_governed_assets_with_metadata` interceptor runs after the + `post_analyze_org_policy_governed_assets` interceptor. The (possibly modified) response returned by + `post_analyze_org_policy_governed_assets` will be passed to + `post_analyze_org_policy_governed_assets_with_metadata`. + """ + return response, metadata + def pre_analyze_org_policy_governed_containers( self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, @@ -399,12 +524,38 @@ def post_analyze_org_policy_governed_containers( ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: """Post-rpc interceptor for analyze_org_policy_governed_containers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policy_governed_containers` interceptor runs + before the `post_analyze_org_policy_governed_containers_with_metadata` interceptor. """ return response + def post_analyze_org_policy_governed_containers_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_org_policy_governed_containers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policy_governed_containers_with_metadata` + interceptor in new development instead of the `post_analyze_org_policy_governed_containers` interceptor. + When both interceptors are used, this `post_analyze_org_policy_governed_containers_with_metadata` interceptor runs after the + `post_analyze_org_policy_governed_containers` interceptor. The (possibly modified) response returned by + `post_analyze_org_policy_governed_containers` will be passed to + `post_analyze_org_policy_governed_containers_with_metadata`. + """ + return response, metadata + def pre_batch_get_assets_history( self, request: asset_service.BatchGetAssetsHistoryRequest, @@ -425,12 +576,38 @@ def post_batch_get_assets_history( ) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_assets_history` interceptor runs + before the `post_batch_get_assets_history_with_metadata` interceptor. """ return response + def post_batch_get_assets_history_with_metadata( + self, + response: asset_service.BatchGetAssetsHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetAssetsHistoryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_get_assets_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_batch_get_assets_history_with_metadata` + interceptor in new development instead of the `post_batch_get_assets_history` interceptor. + When both interceptors are used, this `post_batch_get_assets_history_with_metadata` interceptor runs after the + `post_batch_get_assets_history` interceptor. The (possibly modified) response returned by + `post_batch_get_assets_history` will be passed to + `post_batch_get_assets_history_with_metadata`. + """ + return response, metadata + def pre_batch_get_effective_iam_policies( self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, @@ -451,12 +628,38 @@ def post_batch_get_effective_iam_policies( ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: """Post-rpc interceptor for batch_get_effective_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_effective_iam_policies` interceptor runs + before the `post_batch_get_effective_iam_policies_with_metadata` interceptor. """ return response + def post_batch_get_effective_iam_policies_with_metadata( + self, + response: asset_service.BatchGetEffectiveIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_get_effective_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_batch_get_effective_iam_policies_with_metadata` + interceptor in new development instead of the `post_batch_get_effective_iam_policies` interceptor. + When both interceptors are used, this `post_batch_get_effective_iam_policies_with_metadata` interceptor runs after the + `post_batch_get_effective_iam_policies` interceptor. The (possibly modified) response returned by + `post_batch_get_effective_iam_policies` will be passed to + `post_batch_get_effective_iam_policies_with_metadata`. + """ + return response, metadata + def pre_create_feed( self, request: asset_service.CreateFeedRequest, @@ -474,12 +677,35 @@ def pre_create_feed( def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for create_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_feed` interceptor runs + before the `post_create_feed_with_metadata` interceptor. """ return response + def post_create_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_feed_with_metadata` + interceptor in new development instead of the `post_create_feed` interceptor. + When both interceptors are used, this `post_create_feed_with_metadata` interceptor runs after the + `post_create_feed` interceptor. The (possibly modified) response returned by + `post_create_feed` will be passed to + `post_create_feed_with_metadata`. + """ + return response, metadata + def pre_create_saved_query( self, request: asset_service.CreateSavedQueryRequest, @@ -499,12 +725,35 @@ def post_create_saved_query( ) -> asset_service.SavedQuery: """Post-rpc interceptor for create_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_saved_query` interceptor runs + before the `post_create_saved_query_with_metadata` interceptor. """ return response + def post_create_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_saved_query_with_metadata` + interceptor in new development instead of the `post_create_saved_query` interceptor. + When both interceptors are used, this `post_create_saved_query_with_metadata` interceptor runs after the + `post_create_saved_query` interceptor. The (possibly modified) response returned by + `post_create_saved_query` will be passed to + `post_create_saved_query_with_metadata`. + """ + return response, metadata + def pre_delete_feed( self, request: asset_service.DeleteFeedRequest, @@ -552,12 +801,35 @@ def post_export_assets( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_export_assets` interceptor runs + before the `post_export_assets_with_metadata` interceptor. """ return response + def post_export_assets_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_export_assets_with_metadata` + interceptor in new development instead of the `post_export_assets` interceptor. + When both interceptors are used, this `post_export_assets_with_metadata` interceptor runs after the + `post_export_assets` interceptor. The (possibly modified) response returned by + `post_export_assets` will be passed to + `post_export_assets_with_metadata`. + """ + return response, metadata + def pre_get_feed( self, request: asset_service.GetFeedRequest, @@ -573,12 +845,35 @@ def pre_get_feed( def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for get_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_feed` interceptor runs + before the `post_get_feed_with_metadata` interceptor. """ return response + def post_get_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_feed_with_metadata` + interceptor in new development instead of the `post_get_feed` interceptor. + When both interceptors are used, this `post_get_feed_with_metadata` interceptor runs after the + `post_get_feed` interceptor. The (possibly modified) response returned by + `post_get_feed` will be passed to + `post_get_feed_with_metadata`. + """ + return response, metadata + def pre_get_saved_query( self, request: asset_service.GetSavedQueryRequest, @@ -598,12 +893,35 @@ def post_get_saved_query( ) -> asset_service.SavedQuery: """Post-rpc interceptor for get_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_saved_query` interceptor runs + before the `post_get_saved_query_with_metadata` interceptor. """ return response + def post_get_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_saved_query_with_metadata` + interceptor in new development instead of the `post_get_saved_query` interceptor. + When both interceptors are used, this `post_get_saved_query_with_metadata` interceptor runs after the + `post_get_saved_query` interceptor. The (possibly modified) response returned by + `post_get_saved_query` will be passed to + `post_get_saved_query_with_metadata`. + """ + return response, metadata + def pre_list_assets( self, request: asset_service.ListAssetsRequest, @@ -623,12 +941,37 @@ def post_list_assets( ) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_assets` interceptor runs + before the `post_list_assets_with_metadata` interceptor. """ return response + def post_list_assets_with_metadata( + self, + response: asset_service.ListAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_assets_with_metadata` + interceptor in new development instead of the `post_list_assets` interceptor. + When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the + `post_list_assets` interceptor. The (possibly modified) response returned by + `post_list_assets` will be passed to + `post_list_assets_with_metadata`. + """ + return response, metadata + def pre_list_feeds( self, request: asset_service.ListFeedsRequest, @@ -646,12 +989,37 @@ def post_list_feeds( ) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_feeds_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_feeds` interceptor runs + before the `post_list_feeds_with_metadata` interceptor. """ return response + def post_list_feeds_with_metadata( + self, + response: asset_service.ListFeedsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_feeds + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_feeds_with_metadata` + interceptor in new development instead of the `post_list_feeds` interceptor. + When both interceptors are used, this `post_list_feeds_with_metadata` interceptor runs after the + `post_list_feeds` interceptor. The (possibly modified) response returned by + `post_list_feeds` will be passed to + `post_list_feeds_with_metadata`. + """ + return response, metadata + def pre_list_saved_queries( self, request: asset_service.ListSavedQueriesRequest, @@ -671,12 +1039,37 @@ def post_list_saved_queries( ) -> asset_service.ListSavedQueriesResponse: """Post-rpc interceptor for list_saved_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_saved_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_saved_queries` interceptor runs + before the `post_list_saved_queries_with_metadata` interceptor. """ return response + def post_list_saved_queries_with_metadata( + self, + response: asset_service.ListSavedQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_saved_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_saved_queries_with_metadata` + interceptor in new development instead of the `post_list_saved_queries` interceptor. + When both interceptors are used, this `post_list_saved_queries_with_metadata` interceptor runs after the + `post_list_saved_queries` interceptor. The (possibly modified) response returned by + `post_list_saved_queries` will be passed to + `post_list_saved_queries_with_metadata`. + """ + return response, metadata + def pre_query_assets( self, request: asset_service.QueryAssetsRequest, @@ -696,12 +1089,37 @@ def post_query_assets( ) -> asset_service.QueryAssetsResponse: """Post-rpc interceptor for query_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_query_assets` interceptor runs + before the `post_query_assets_with_metadata` interceptor. """ return response + def post_query_assets_with_metadata( + self, + response: asset_service.QueryAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for query_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_query_assets_with_metadata` + interceptor in new development instead of the `post_query_assets` interceptor. + When both interceptors are used, this `post_query_assets_with_metadata` interceptor runs after the + `post_query_assets` interceptor. The (possibly modified) response returned by + `post_query_assets` will be passed to + `post_query_assets_with_metadata`. + """ + return response, metadata + def pre_search_all_iam_policies( self, request: asset_service.SearchAllIamPoliciesRequest, @@ -722,12 +1140,38 @@ def post_search_all_iam_policies( ) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_iam_policies` interceptor runs + before the `post_search_all_iam_policies_with_metadata` interceptor. """ return response + def post_search_all_iam_policies_with_metadata( + self, + response: asset_service.SearchAllIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_all_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_iam_policies_with_metadata` + interceptor in new development instead of the `post_search_all_iam_policies` interceptor. + When both interceptors are used, this `post_search_all_iam_policies_with_metadata` interceptor runs after the + `post_search_all_iam_policies` interceptor. The (possibly modified) response returned by + `post_search_all_iam_policies` will be passed to + `post_search_all_iam_policies_with_metadata`. + """ + return response, metadata + def pre_search_all_resources( self, request: asset_service.SearchAllResourcesRequest, @@ -747,12 +1191,38 @@ def post_search_all_resources( ) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_resources` interceptor runs + before the `post_search_all_resources_with_metadata` interceptor. """ return response + def post_search_all_resources_with_metadata( + self, + response: asset_service.SearchAllResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_all_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_resources_with_metadata` + interceptor in new development instead of the `post_search_all_resources` interceptor. + When both interceptors are used, this `post_search_all_resources_with_metadata` interceptor runs after the + `post_search_all_resources` interceptor. The (possibly modified) response returned by + `post_search_all_resources` will be passed to + `post_search_all_resources_with_metadata`. + """ + return response, metadata + def pre_update_feed( self, request: asset_service.UpdateFeedRequest, @@ -770,12 +1240,35 @@ def pre_update_feed( def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for update_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_feed` interceptor runs + before the `post_update_feed_with_metadata` interceptor. """ return response + def post_update_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_feed_with_metadata` + interceptor in new development instead of the `post_update_feed` interceptor. + When both interceptors are used, this `post_update_feed_with_metadata` interceptor runs after the + `post_update_feed` interceptor. The (possibly modified) response returned by + `post_update_feed` will be passed to + `post_update_feed_with_metadata`. + """ + return response, metadata + def pre_update_saved_query( self, request: asset_service.UpdateSavedQueryRequest, @@ -795,12 +1288,35 @@ def post_update_saved_query( ) -> asset_service.SavedQuery: """Post-rpc interceptor for update_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_saved_query` interceptor runs + before the `post_update_saved_query_with_metadata` interceptor. """ return response + def post_update_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_saved_query_with_metadata` + interceptor in new development instead of the `post_update_saved_query` interceptor. + When both interceptors are used, this `post_update_saved_query_with_metadata` interceptor runs after the + `post_update_saved_query` interceptor. The (possibly modified) response returned by + `post_update_saved_query` will be passed to + `post_update_saved_query_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -1070,6 +1586,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1226,6 +1746,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1373,6 +1900,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_move(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_move_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1520,6 +2051,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_org_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1674,6 +2209,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1828,6 +2370,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1972,6 +2521,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_assets_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2123,6 +2676,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_effective_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_batch_get_effective_iam_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2287,6 +2847,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_feed_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2437,6 +3001,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_saved_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2806,6 +3374,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2956,6 +3528,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_feed_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3098,6 +3674,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_saved_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3242,6 +3822,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3388,6 +3972,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_feeds(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_feeds_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3530,6 +4118,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_saved_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_saved_queries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3684,6 +4276,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3828,6 +4424,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3972,6 +4572,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4134,6 +4738,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_feed_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4284,6 +4892,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_saved_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 32cacc95a964..856a3fbea2a6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py index 6b3bcdb97929..ccbacd7ce6cb 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -462,6 +464,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py index 75b7f92f1c1d..64bc4efb08a1 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py @@ -111,12 +111,38 @@ def post_search_all_iam_policies( ) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_iam_policies` interceptor runs + before the `post_search_all_iam_policies_with_metadata` interceptor. """ return response + def post_search_all_iam_policies_with_metadata( + self, + response: asset_service.SearchAllIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllIamPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_all_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_iam_policies_with_metadata` + interceptor in new development instead of the `post_search_all_iam_policies` interceptor. + When both interceptors are used, this `post_search_all_iam_policies_with_metadata` interceptor runs after the + `post_search_all_iam_policies` interceptor. The (possibly modified) response returned by + `post_search_all_iam_policies` will be passed to + `post_search_all_iam_policies_with_metadata`. + """ + return response, metadata + def pre_search_all_resources( self, request: asset_service.SearchAllResourcesRequest, @@ -136,12 +162,38 @@ def post_search_all_resources( ) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_resources` interceptor runs + before the `post_search_all_resources_with_metadata` interceptor. """ return response + def post_search_all_resources_with_metadata( + self, + response: asset_service.SearchAllResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.SearchAllResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_all_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_resources_with_metadata` + interceptor in new development instead of the `post_search_all_resources` interceptor. + When both interceptors are used, this `post_search_all_resources_with_metadata` interceptor runs after the + `post_search_all_resources` interceptor. The (possibly modified) response returned by + `post_search_all_resources` will be passed to + `post_search_all_resources_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AssetServiceRestStub: @@ -348,6 +400,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -492,6 +548,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 32cacc95a964..856a3fbea2a6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py index bb244420c734..1dcd8fa26f7b 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -478,6 +480,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1283,16 +1312,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py index 732090cba91f..9cfd65acb4d1 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py @@ -129,12 +129,35 @@ def pre_create_feed( def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for create_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_feed` interceptor runs + before the `post_create_feed_with_metadata` interceptor. """ return response + def post_create_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_feed_with_metadata` + interceptor in new development instead of the `post_create_feed` interceptor. + When both interceptors are used, this `post_create_feed_with_metadata` interceptor runs after the + `post_create_feed` interceptor. The (possibly modified) response returned by + `post_create_feed` will be passed to + `post_create_feed_with_metadata`. + """ + return response, metadata + def pre_delete_feed( self, request: asset_service.DeleteFeedRequest, @@ -164,12 +187,35 @@ def pre_get_feed( def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for get_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_feed` interceptor runs + before the `post_get_feed_with_metadata` interceptor. """ return response + def post_get_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_feed_with_metadata` + interceptor in new development instead of the `post_get_feed` interceptor. + When both interceptors are used, this `post_get_feed_with_metadata` interceptor runs after the + `post_get_feed` interceptor. The (possibly modified) response returned by + `post_get_feed` will be passed to + `post_get_feed_with_metadata`. + """ + return response, metadata + def pre_list_feeds( self, request: asset_service.ListFeedsRequest, @@ -187,12 +233,37 @@ def post_list_feeds( ) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_feeds_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_feeds` interceptor runs + before the `post_list_feeds_with_metadata` interceptor. """ return response + def post_list_feeds_with_metadata( + self, + response: asset_service.ListFeedsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_feeds + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_feeds_with_metadata` + interceptor in new development instead of the `post_list_feeds` interceptor. + When both interceptors are used, this `post_list_feeds_with_metadata` interceptor runs after the + `post_list_feeds` interceptor. The (possibly modified) response returned by + `post_list_feeds` will be passed to + `post_list_feeds_with_metadata`. + """ + return response, metadata + def pre_update_feed( self, request: asset_service.UpdateFeedRequest, @@ -210,12 +281,35 @@ def pre_update_feed( def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for update_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_feed` interceptor runs + before the `post_update_feed_with_metadata` interceptor. """ return response + def post_update_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_feed_with_metadata` + interceptor in new development instead of the `post_update_feed` interceptor. + When both interceptors are used, this `post_update_feed_with_metadata` interceptor runs after the + `post_update_feed` interceptor. The (possibly modified) response returned by + `post_update_feed` will be passed to + `post_update_feed_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -465,6 +559,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_feed_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -725,6 +823,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_feed_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -869,6 +971,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_feeds(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_feeds_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1029,6 +1135,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_feed_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py index f2fd25602f80..16c2618143bb 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p4beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 32cacc95a964..856a3fbea2a6 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py index d2ba26ca61ef..f89848ece101 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -528,6 +530,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py index 0f27d605b35c..2919c7b180f2 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py @@ -102,12 +102,37 @@ def post_list_assets( ) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_assets` interceptor runs + before the `post_list_assets_with_metadata` interceptor. """ return response + def post_list_assets_with_metadata( + self, + response: asset_service.ListAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_assets_with_metadata` + interceptor in new development instead of the `post_list_assets` interceptor. + When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the + `post_list_assets` interceptor. The (possibly modified) response returned by + `post_list_assets` will be passed to + `post_list_assets_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AssetServiceRestStub: @@ -316,6 +341,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-asset/noxfile.py b/packages/google-cloud-asset/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-asset/noxfile.py +++ b/packages/google-cloud-asset/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 0bf3fb1c54e9..e6936177e858 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.28.0" + "version": "3.29.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index 931c1565a2c7..36476848d744 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.28.0" + "version": "3.29.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 81c7f38381ca..751348e6ccf6 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.28.0" + "version": "3.29.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index 898a8afb3ac8..aa980dbc968e 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.28.0" + "version": "3.29.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py index 819d95e9e05f..740d78460245 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -75,6 +75,13 @@ ) from google.cloud.asset_v1.types import asset_service, assets +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -15684,10 +15734,13 @@ def test_export_assets_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AssetServiceRestInterceptor, "post_export_assets" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_export_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ExportAssetsRequest.pb( asset_service.ExportAssetsRequest() ) @@ -15711,6 +15764,7 @@ def test_export_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_assets( request, @@ -15722,6 +15776,7 @@ def test_export_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): @@ -15804,10 +15859,13 @@ def test_list_assets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_list_assets" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_list_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListAssetsRequest.pb( asset_service.ListAssetsRequest() ) @@ -15833,6 +15891,7 @@ def test_list_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListAssetsResponse() + post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata client.list_assets( request, @@ -15844,6 +15903,7 @@ def test_list_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_get_assets_history_rest_bad_request( @@ -15925,10 +15985,14 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_batch_get_assets_history" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_assets_history_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.BatchGetAssetsHistoryRequest.pb( asset_service.BatchGetAssetsHistoryRequest() ) @@ -15954,6 +16018,10 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetAssetsHistoryResponse() + post_with_metadata.return_value = ( + asset_service.BatchGetAssetsHistoryResponse(), + metadata, + ) client.batch_get_assets_history( request, @@ -15965,6 +16033,7 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): @@ -16055,10 +16124,13 @@ def test_create_feed_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_create_feed" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_create_feed" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.CreateFeedRequest.pb( asset_service.CreateFeedRequest() ) @@ -16082,6 +16154,7 @@ def test_create_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.create_feed( request, @@ -16093,6 +16166,7 @@ def test_create_feed_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): @@ -16183,10 +16257,13 @@ def test_get_feed_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_get_feed" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_get_feed" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) transcode.return_value = { "method": "post", @@ -16208,6 +16285,7 @@ def test_get_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.get_feed( request, @@ -16219,6 +16297,7 @@ def test_get_feed_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): @@ -16298,10 +16377,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_list_feeds" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_list_feeds" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) transcode.return_value = { "method": "post", @@ -16325,6 +16407,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListFeedsResponse() + post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata client.list_feeds( request, @@ -16336,6 +16419,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): @@ -16426,10 +16510,13 @@ def test_update_feed_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_update_feed" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_update_feed" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.UpdateFeedRequest.pb( asset_service.UpdateFeedRequest() ) @@ -16453,6 +16540,7 @@ def test_update_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.update_feed( request, @@ -16464,6 +16552,7 @@ def test_update_feed_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): @@ -16655,10 +16744,14 @@ def test_search_all_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_search_all_resources" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_search_all_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.SearchAllResourcesRequest.pb( asset_service.SearchAllResourcesRequest() ) @@ -16684,6 +16777,10 @@ def test_search_all_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllResourcesResponse() + post_with_metadata.return_value = ( + asset_service.SearchAllResourcesResponse(), + metadata, + ) client.search_all_resources( request, @@ -16695,6 +16792,7 @@ def test_search_all_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_all_iam_policies_rest_bad_request( @@ -16779,10 +16877,14 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_search_all_iam_policies" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_iam_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.SearchAllIamPoliciesRequest.pb( asset_service.SearchAllIamPoliciesRequest() ) @@ -16808,6 +16910,10 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllIamPoliciesResponse() + post_with_metadata.return_value = ( + asset_service.SearchAllIamPoliciesResponse(), + metadata, + ) client.search_all_iam_policies( request, @@ -16819,6 +16925,7 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_iam_policy_rest_bad_request( @@ -16903,10 +17010,13 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_analyze_iam_policy" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeIamPolicyRequest.pb( asset_service.AnalyzeIamPolicyRequest() ) @@ -16932,6 +17042,10 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeIamPolicyResponse() + post_with_metadata.return_value = ( + asset_service.AnalyzeIamPolicyResponse(), + metadata, + ) client.analyze_iam_policy( request, @@ -16943,6 +17057,7 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_iam_policy_longrunning_rest_bad_request( @@ -17023,10 +17138,14 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_iam_policy_longrunning_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb( asset_service.AnalyzeIamPolicyLongrunningRequest() ) @@ -17050,6 +17169,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.analyze_iam_policy_longrunning( request, @@ -17061,6 +17181,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): @@ -17140,10 +17261,13 @@ def test_analyze_move_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_analyze_move" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_analyze_move" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeMoveRequest.pb( asset_service.AnalyzeMoveRequest() ) @@ -17169,6 +17293,7 @@ def test_analyze_move_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeMoveResponse() + post_with_metadata.return_value = asset_service.AnalyzeMoveResponse(), metadata client.analyze_move( request, @@ -17180,6 +17305,7 @@ def test_analyze_move_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): @@ -17264,10 +17390,13 @@ def test_query_assets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_query_assets" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_query_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.QueryAssetsRequest.pb( asset_service.QueryAssetsRequest() ) @@ -17293,6 +17422,7 @@ def test_query_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.QueryAssetsResponse() + post_with_metadata.return_value = asset_service.QueryAssetsResponse(), metadata client.query_assets( request, @@ -17304,6 +17434,7 @@ def test_query_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_saved_query_rest_bad_request( @@ -17490,10 +17621,13 @@ def test_create_saved_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_create_saved_query" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_create_saved_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.CreateSavedQueryRequest.pb( asset_service.CreateSavedQueryRequest() ) @@ -17517,6 +17651,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata client.create_saved_query( request, @@ -17528,6 +17663,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_saved_query_rest_bad_request( @@ -17618,10 +17754,13 @@ def test_get_saved_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_get_saved_query" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_get_saved_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.GetSavedQueryRequest.pb( asset_service.GetSavedQueryRequest() ) @@ -17645,6 +17784,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata client.get_saved_query( request, @@ -17656,6 +17796,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_saved_queries_rest_bad_request( @@ -17740,10 +17881,13 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_list_saved_queries" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_list_saved_queries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListSavedQueriesRequest.pb( asset_service.ListSavedQueriesRequest() ) @@ -17769,6 +17913,10 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListSavedQueriesResponse() + post_with_metadata.return_value = ( + asset_service.ListSavedQueriesResponse(), + metadata, + ) client.list_saved_queries( request, @@ -17780,6 +17928,7 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_saved_query_rest_bad_request( @@ -17966,10 +18115,13 @@ def test_update_saved_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_update_saved_query" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_update_saved_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.UpdateSavedQueryRequest.pb( asset_service.UpdateSavedQueryRequest() ) @@ -17993,6 +18145,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata client.update_saved_query( request, @@ -18004,6 +18157,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_saved_query_rest_bad_request( @@ -18196,10 +18350,14 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_batch_get_effective_iam_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb( asset_service.BatchGetEffectiveIamPoliciesRequest() ) @@ -18225,6 +18383,10 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + post_with_metadata.return_value = ( + asset_service.BatchGetEffectiveIamPoliciesResponse(), + metadata, + ) client.batch_get_effective_iam_policies( request, @@ -18236,6 +18398,7 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_org_policies_rest_bad_request( @@ -18320,10 +18483,14 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_analyze_org_policies" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_analyze_org_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb( asset_service.AnalyzeOrgPoliciesRequest() ) @@ -18349,6 +18516,10 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPoliciesResponse() + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPoliciesResponse(), + metadata, + ) client.analyze_org_policies( request, @@ -18360,6 +18531,7 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_org_policy_governed_containers_rest_bad_request( @@ -18447,11 +18619,15 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers", ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_containers_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb( asset_service.AnalyzeOrgPolicyGovernedContainersRequest() ) @@ -18477,6 +18653,10 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), + metadata, + ) client.analyze_org_policy_governed_containers( request, @@ -18488,6 +18668,7 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_org_policy_governed_assets_rest_bad_request( @@ -18575,10 +18756,14 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets", ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_analyze_org_policy_governed_assets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() ) @@ -18604,6 +18789,10 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + post_with_metadata.return_value = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), + metadata, + ) client.analyze_org_policy_governed_assets( request, @@ -18615,6 +18804,7 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py index a56eea16909f..a1405e454be1 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py @@ -61,6 +61,13 @@ ) from google.cloud.asset_v1p1beta1.types import asset_service, assets +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3013,10 +3063,14 @@ def test_search_all_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_search_all_resources" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_search_all_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.SearchAllResourcesRequest.pb( asset_service.SearchAllResourcesRequest() ) @@ -3042,6 +3096,10 @@ def test_search_all_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllResourcesResponse() + post_with_metadata.return_value = ( + asset_service.SearchAllResourcesResponse(), + metadata, + ) client.search_all_resources( request, @@ -3053,6 +3111,7 @@ def test_search_all_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_all_iam_policies_rest_bad_request( @@ -3137,10 +3196,14 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_search_all_iam_policies" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, + "post_search_all_iam_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.SearchAllIamPoliciesRequest.pb( asset_service.SearchAllIamPoliciesRequest() ) @@ -3166,6 +3229,10 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllIamPoliciesResponse() + post_with_metadata.return_value = ( + asset_service.SearchAllIamPoliciesResponse(), + metadata, + ) client.search_all_iam_policies( request, @@ -3177,6 +3244,7 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py index fabc04732970..359ac3b6df79 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py @@ -61,6 +61,13 @@ ) from google.cloud.asset_v1p2beta1.types import asset_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3992,10 +4042,13 @@ def test_create_feed_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_create_feed" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_create_feed" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.CreateFeedRequest.pb( asset_service.CreateFeedRequest() ) @@ -4019,6 +4072,7 @@ def test_create_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.create_feed( request, @@ -4030,6 +4084,7 @@ def test_create_feed_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): @@ -4118,10 +4173,13 @@ def test_get_feed_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_get_feed" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_get_feed" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) transcode.return_value = { "method": "post", @@ -4143,6 +4201,7 @@ def test_get_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.get_feed( request, @@ -4154,6 +4213,7 @@ def test_get_feed_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): @@ -4233,10 +4293,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_list_feeds" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_list_feeds" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) transcode.return_value = { "method": "post", @@ -4260,6 +4323,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListFeedsResponse() + post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata client.list_feeds( request, @@ -4271,6 +4335,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): @@ -4359,10 +4424,13 @@ def test_update_feed_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_update_feed" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_update_feed" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.UpdateFeedRequest.pb( asset_service.UpdateFeedRequest() ) @@ -4386,6 +4454,7 @@ def test_update_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.update_feed( request, @@ -4397,6 +4466,7 @@ def test_update_feed_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): diff --git a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py index 80019a17117e..c5f28ec9e21b 100644 --- a/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py +++ b/packages/google-cloud-asset/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py @@ -62,6 +62,13 @@ ) from google.cloud.asset_v1p5beta1.types import asset_service, assets +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2069,10 +2119,13 @@ def test_list_assets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssetServiceRestInterceptor, "post_list_assets" ) as post, mock.patch.object( + transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AssetServiceRestInterceptor, "pre_list_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListAssetsRequest.pb( asset_service.ListAssetsRequest() ) @@ -2098,6 +2151,7 @@ def test_list_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListAssetsResponse() + post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata client.list_assets( request, @@ -2109,6 +2163,7 @@ def test_list_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-assured-workloads/CHANGELOG.md b/packages/google-cloud-assured-workloads/CHANGELOG.md index bd7991e56401..674204fb7dbc 100644 --- a/packages/google-cloud-assured-workloads/CHANGELOG.md +++ b/packages/google-cloud-assured-workloads/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-assured-workloads-v1.14.0...google-cloud-assured-workloads-v1.15.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-assured-workloads-v1.13.1...google-cloud-assured-workloads-v1.14.0) (2024-12-12) diff --git a/packages/google-cloud-assured-workloads/README.rst b/packages/google-cloud-assured-workloads/README.rst index bff2d3f544c4..2e5ffa9e187c 100644 --- a/packages/google-cloud-assured-workloads/README.rst +++ b/packages/google-cloud-assured-workloads/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Assured Workloads for Government.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Assured Workloads for Government.: https://cloud.google.com/assured-workloads/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py index 37bbd45cf1ac..c71f0f9296ef 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1759,16 +1788,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1814,16 +1847,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py index 37fba2a15379..27fa546c5548 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py @@ -163,12 +163,35 @@ def post_create_workload( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_create_workload` interceptor runs + before the `post_create_workload_with_metadata` interceptor. """ return response + def post_create_workload_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_create_workload_with_metadata` + interceptor in new development instead of the `post_create_workload` interceptor. + When both interceptors are used, this `post_create_workload_with_metadata` interceptor runs after the + `post_create_workload` interceptor. The (possibly modified) response returned by + `post_create_workload` will be passed to + `post_create_workload_with_metadata`. + """ + return response, metadata + def pre_delete_workload( self, request: assuredworkloads.DeleteWorkloadRequest, @@ -202,12 +225,35 @@ def post_get_workload( ) -> assuredworkloads.Workload: """Post-rpc interceptor for get_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_get_workload` interceptor runs + before the `post_get_workload_with_metadata` interceptor. """ return response + def post_get_workload_with_metadata( + self, + response: assuredworkloads.Workload, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[assuredworkloads.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_get_workload_with_metadata` + interceptor in new development instead of the `post_get_workload` interceptor. + When both interceptors are used, this `post_get_workload_with_metadata` interceptor runs after the + `post_get_workload` interceptor. The (possibly modified) response returned by + `post_get_workload` will be passed to + `post_get_workload_with_metadata`. + """ + return response, metadata + def pre_list_workloads( self, request: assuredworkloads.ListWorkloadsRequest, @@ -227,12 +273,37 @@ def post_list_workloads( ) -> assuredworkloads.ListWorkloadsResponse: """Post-rpc interceptor for list_workloads - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workloads_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_list_workloads` interceptor runs + before the `post_list_workloads_with_metadata` interceptor. """ return response + def post_list_workloads_with_metadata( + self, + response: assuredworkloads.ListWorkloadsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + assuredworkloads.ListWorkloadsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_workloads + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_list_workloads_with_metadata` + interceptor in new development instead of the `post_list_workloads` interceptor. + When both interceptors are used, this `post_list_workloads_with_metadata` interceptor runs after the + `post_list_workloads` interceptor. The (possibly modified) response returned by + `post_list_workloads` will be passed to + `post_list_workloads_with_metadata`. + """ + return response, metadata + def pre_restrict_allowed_resources( self, request: assuredworkloads.RestrictAllowedResourcesRequest, @@ -253,12 +324,38 @@ def post_restrict_allowed_resources( ) -> assuredworkloads.RestrictAllowedResourcesResponse: """Post-rpc interceptor for restrict_allowed_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restrict_allowed_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_restrict_allowed_resources` interceptor runs + before the `post_restrict_allowed_resources_with_metadata` interceptor. """ return response + def post_restrict_allowed_resources_with_metadata( + self, + response: assuredworkloads.RestrictAllowedResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + assuredworkloads.RestrictAllowedResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for restrict_allowed_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_restrict_allowed_resources_with_metadata` + interceptor in new development instead of the `post_restrict_allowed_resources` interceptor. + When both interceptors are used, this `post_restrict_allowed_resources_with_metadata` interceptor runs after the + `post_restrict_allowed_resources` interceptor. The (possibly modified) response returned by + `post_restrict_allowed_resources` will be passed to + `post_restrict_allowed_resources_with_metadata`. + """ + return response, metadata + def pre_update_workload( self, request: assuredworkloads.UpdateWorkloadRequest, @@ -278,12 +375,35 @@ def post_update_workload( ) -> assuredworkloads.Workload: """Post-rpc interceptor for update_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_update_workload` interceptor runs + before the `post_update_workload_with_metadata` interceptor. """ return response + def post_update_workload_with_metadata( + self, + response: assuredworkloads.Workload, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[assuredworkloads.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_update_workload_with_metadata` + interceptor in new development instead of the `post_update_workload` interceptor. + When both interceptors are used, this `post_update_workload_with_metadata` interceptor runs after the + `post_update_workload` interceptor. The (possibly modified) response returned by + `post_update_workload` will be passed to + `post_update_workload_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -608,6 +728,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -879,6 +1003,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1042,6 +1170,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workloads(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workloads_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1198,6 +1330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_restrict_allowed_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restrict_allowed_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1353,6 +1489,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py index 22d4c1f3af26..34fcc666eeb2 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -494,6 +496,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1542,16 +1571,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1597,16 +1630,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py index ae5506592ef0..76f4eeb954fa 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py @@ -147,12 +147,35 @@ def post_create_workload( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_create_workload` interceptor runs + before the `post_create_workload_with_metadata` interceptor. """ return response + def post_create_workload_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_create_workload_with_metadata` + interceptor in new development instead of the `post_create_workload` interceptor. + When both interceptors are used, this `post_create_workload_with_metadata` interceptor runs after the + `post_create_workload` interceptor. The (possibly modified) response returned by + `post_create_workload` will be passed to + `post_create_workload_with_metadata`. + """ + return response, metadata + def pre_delete_workload( self, request: assuredworkloads.DeleteWorkloadRequest, @@ -187,12 +210,38 @@ def post_restrict_allowed_resources( ) -> assuredworkloads.RestrictAllowedResourcesResponse: """Post-rpc interceptor for restrict_allowed_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restrict_allowed_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. + it is returned to user code. This `post_restrict_allowed_resources` interceptor runs + before the `post_restrict_allowed_resources_with_metadata` interceptor. """ return response + def post_restrict_allowed_resources_with_metadata( + self, + response: assuredworkloads.RestrictAllowedResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + assuredworkloads.RestrictAllowedResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for restrict_allowed_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssuredWorkloadsService server but before it is returned to user code. + + We recommend only using this `post_restrict_allowed_resources_with_metadata` + interceptor in new development instead of the `post_restrict_allowed_resources` interceptor. + When both interceptors are used, this `post_restrict_allowed_resources_with_metadata` interceptor runs after the + `post_restrict_allowed_resources` interceptor. The (possibly modified) response returned by + `post_restrict_allowed_resources` will be passed to + `post_restrict_allowed_resources_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -517,6 +566,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -818,6 +871,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_restrict_allowed_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restrict_allowed_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-assured-workloads/noxfile.py b/packages/google-cloud-assured-workloads/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-assured-workloads/noxfile.py +++ b/packages/google-cloud-assured-workloads/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json index 7aea6b7e0ea0..6ef4c5d83ad4 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json index ef100c1cce17..5c2437be53c9 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py index 8d3f85d46859..acc3b330b373 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py @@ -73,6 +73,13 @@ ) from google.cloud.assuredworkloads_v1.types import assuredworkloads +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssuredWorkloadsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssuredWorkloadsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6216,10 +6266,14 @@ def test_create_workload_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload" ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_create_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_create_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.CreateWorkloadRequest.pb( assuredworkloads.CreateWorkloadRequest() ) @@ -6243,6 +6297,7 @@ def test_create_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_workload( request, @@ -6254,6 +6309,7 @@ def test_create_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_workload_rest_bad_request( @@ -6468,10 +6524,14 @@ def test_update_workload_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "post_update_workload" ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_update_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_update_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.UpdateWorkloadRequest.pb( assuredworkloads.UpdateWorkloadRequest() ) @@ -6495,6 +6555,7 @@ def test_update_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = assuredworkloads.Workload() + post_with_metadata.return_value = assuredworkloads.Workload(), metadata client.update_workload( request, @@ -6506,6 +6567,7 @@ def test_update_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restrict_allowed_resources_rest_bad_request( @@ -6590,11 +6652,15 @@ def test_restrict_allowed_resources_rest_interceptors(null_interceptor): transports.AssuredWorkloadsServiceRestInterceptor, "post_restrict_allowed_resources", ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_restrict_allowed_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_restrict_allowed_resources", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.RestrictAllowedResourcesRequest.pb( assuredworkloads.RestrictAllowedResourcesRequest() ) @@ -6620,6 +6686,10 @@ def test_restrict_allowed_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + post_with_metadata.return_value = ( + assuredworkloads.RestrictAllowedResourcesResponse(), + metadata, + ) client.restrict_allowed_resources( request, @@ -6631,6 +6701,7 @@ def test_restrict_allowed_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workload_rest_bad_request( @@ -6849,10 +6920,14 @@ def test_get_workload_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "post_get_workload" ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_get_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_get_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.GetWorkloadRequest.pb( assuredworkloads.GetWorkloadRequest() ) @@ -6876,6 +6951,7 @@ def test_get_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = assuredworkloads.Workload() + post_with_metadata.return_value = assuredworkloads.Workload(), metadata client.get_workload( request, @@ -6887,6 +6963,7 @@ def test_get_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workloads_rest_bad_request( @@ -6971,10 +7048,14 @@ def test_list_workloads_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "post_list_workloads" ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_list_workloads_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_list_workloads" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.ListWorkloadsRequest.pb( assuredworkloads.ListWorkloadsRequest() ) @@ -7000,6 +7081,10 @@ def test_list_workloads_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = assuredworkloads.ListWorkloadsResponse() + post_with_metadata.return_value = ( + assuredworkloads.ListWorkloadsResponse(), + metadata, + ) client.list_workloads( request, @@ -7011,6 +7096,7 @@ def test_list_workloads_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_violations_rest_error(): diff --git a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py index e26d44ea87c0..d8dc888e6a54 100644 --- a/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py +++ b/packages/google-cloud-assured-workloads/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py @@ -73,6 +73,13 @@ ) from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssuredWorkloadsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssuredWorkloadsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4665,10 +4715,14 @@ def test_create_workload_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload" ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_create_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_create_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.CreateWorkloadRequest.pb( assuredworkloads.CreateWorkloadRequest() ) @@ -4692,6 +4746,7 @@ def test_create_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_workload( request, @@ -4703,6 +4758,7 @@ def test_create_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_workload_rest_error(): @@ -4799,11 +4855,15 @@ def test_restrict_allowed_resources_rest_interceptors(null_interceptor): transports.AssuredWorkloadsServiceRestInterceptor, "post_restrict_allowed_resources", ) as post, mock.patch.object( + transports.AssuredWorkloadsServiceRestInterceptor, + "post_restrict_allowed_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AssuredWorkloadsServiceRestInterceptor, "pre_restrict_allowed_resources", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = assuredworkloads.RestrictAllowedResourcesRequest.pb( assuredworkloads.RestrictAllowedResourcesRequest() ) @@ -4829,6 +4889,10 @@ def test_restrict_allowed_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = assuredworkloads.RestrictAllowedResourcesResponse() + post_with_metadata.return_value = ( + assuredworkloads.RestrictAllowedResourcesResponse(), + metadata, + ) client.restrict_allowed_resources( request, @@ -4840,6 +4904,7 @@ def test_restrict_allowed_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workload_rest_bad_request( diff --git a/packages/google-cloud-audit-log/.OwlBot.yaml b/packages/google-cloud-audit-log/.OwlBot.yaml new file mode 100644 index 000000000000..847b49962131 --- /dev/null +++ b/packages/google-cloud-audit-log/.OwlBot.yaml @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/audit/(audit-py)/(.*audit.*) + dest: /owl-bot-staging/google-cloud-audit-log/$1/google/cloud/audit/$2 + +begin-after-commit-hash: 7af2cb8b2b725641ac0d07e2f256d453682802e6 + diff --git a/packages/google-cloud-audit-log/.flake8 b/packages/google-cloud-audit-log/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-cloud-audit-log/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-audit-log/.gitignore b/packages/google-cloud-audit-log/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-audit-log/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-audit-log/.repo-metadata.json b/packages/google-cloud-audit-log/.repo-metadata.json new file mode 100644 index 000000000000..b722a1070d1d --- /dev/null +++ b/packages/google-cloud-audit-log/.repo-metadata.json @@ -0,0 +1,16 @@ +{ + "name": "auditlog", + "name_pretty": "Audit Log API", + "product_documentation": "https://cloud.google.com/logging/docs/audit", + "client_documentation": "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log", + "issue_tracker": "", + "release_level": "preview", + "language": "python", + "library_type": "OTHER", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-audit-log", + "api_id": "", + "default_version": "apiVersion", + "codeowner_team": "", + "api_shortname": "auditlog" +} diff --git a/packages/google-cloud-audit-log/CHANGELOG.md b/packages/google-cloud-audit-log/CHANGELOG.md new file mode 100644 index 000000000000..f6ce8e51e7e1 --- /dev/null +++ b/packages/google-cloud-audit-log/CHANGELOG.md @@ -0,0 +1,75 @@ +# Changelog + +## [0.3.0](https://github.com/googleapis/python-audit-log/compare/v0.2.5...v0.3.0) (2024-08-15) + + +### Features + +* Add `google/cloud/audit/bigquery_audit_metadata_pb2.py` ([c6efc56](https://github.com/googleapis/python-audit-log/commit/c6efc56eec9627ecf1e139cc33d5815937f04dc6)) +* Add PolicyViolation. this will only be present when access is denied due to Organization Policy [fc5be6f](https://github.com/googleapis/googleapis/commit/fc5be6f850e7989e912b40c6b79306c6dc9655bd) ([c6efc56](https://github.com/googleapis/python-audit-log/commit/c6efc56eec9627ecf1e139cc33d5815937f04dc6)) +* Add support for Python 3.12 ([#113](https://github.com/googleapis/python-audit-log/issues/113)) ([08b0bca](https://github.com/googleapis/python-audit-log/commit/08b0bca0ee634d65bba18c7de102063be17d0958)) +* Add the principal field to the ServiceAccountDelegationInfo [ba89dac](https://github.com/googleapis/googleapis/commit/ba89dace27923254d96ab8339b831dc996e2112f) ([c6efc56](https://github.com/googleapis/python-audit-log/commit/c6efc56eec9627ecf1e139cc33d5815937f04dc6)) +* Introduce compatibility with native namespace packages ([#117](https://github.com/googleapis/python-audit-log/issues/117)) ([9007e8a](https://github.com/googleapis/python-audit-log/commit/9007e8af7f5300f866f42035c36a9d3fe36ef117)) +* Update AuditLog proto to include all new changes in Audit Logging [40292fc](https://github.com/googleapis/googleapis/commit/40292fc8f271f3b8708f9c91c85d7240200893a6) ([c6efc56](https://github.com/googleapis/python-audit-log/commit/c6efc56eec9627ecf1e139cc33d5815937f04dc6)) + + +### Bug Fixes + +* **deps:** Require protobuf>=3.20.2, protobuf<6 ([bf1434a](https://github.com/googleapis/python-audit-log/commit/bf1434a7f4c0d03767c6f943de898d5562e874b1)) +* Regenerate pb2 files for compatibility with protobuf 5.x ([bf1434a](https://github.com/googleapis/python-audit-log/commit/bf1434a7f4c0d03767c6f943de898d5562e874b1)) + +## [0.2.5](https://github.com/googleapis/python-audit-log/compare/v0.2.4...v0.2.5) (2023-01-07) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.19.5 ([#79](https://github.com/googleapis/python-audit-log/issues/79)) ([94e7044](https://github.com/googleapis/python-audit-log/commit/94e7044c66050e6a419bf694e25e677827aa6c13)) + +## [0.2.4](https://github.com/googleapis/python-audit-log/compare/v0.2.3...v0.2.4) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#67](https://github.com/googleapis/python-audit-log/issues/67)) ([e337e78](https://github.com/googleapis/python-audit-log/commit/e337e781951dea0fbbb6ef9c4ff9896fa3fce86a)) + +## [0.2.3](https://github.com/googleapis/python-audit-log/compare/v0.2.2...v0.2.3) (2022-07-16) + + +### Bug Fixes + +* require python 3.7+ ([#63](https://github.com/googleapis/python-audit-log/issues/63)) ([c7d33f4](https://github.com/googleapis/python-audit-log/commit/c7d33f463e6dda2d24cc884f4049cfd437876812)) + +### [0.2.2](https://github.com/googleapis/python-audit-log/compare/v0.2.1...v0.2.2) (2022-05-26) + + +### Bug Fixes + +* regenerate pb2 files using grpcio-tools ([#57](https://github.com/googleapis/python-audit-log/issues/57)) ([7058ada](https://github.com/googleapis/python-audit-log/commit/7058ada0cc89cac453b6d55d6a1529d7274784fd)) + +### [0.2.1](https://github.com/googleapis/python-audit-log/compare/v0.2.0...v0.2.1) (2022-05-26) + + +### Bug Fixes + +* **deps:** require protobuf>= 3.6.0, <4.0.0dev ([#55](https://github.com/googleapis/python-audit-log/issues/55)) ([e84a2a9](https://github.com/googleapis/python-audit-log/commit/e84a2a9bb8efa13e53a9941580307dbaabec72b1)) + +## [0.2.0](https://www.github.com/googleapis/python-audit-log/compare/v0.1.1...v0.2.0) (2021-10-13) + + +### Features + +* add trove classifier for python 3.10 ([#38](https://www.github.com/googleapis/python-audit-log/issues/38)) ([355cbf1](https://www.github.com/googleapis/python-audit-log/commit/355cbf14dbe67879395c068ff0192b9d21410c51)) + +### [0.1.1](https://www.github.com/googleapis/python-audit-log/compare/v0.1.0...v0.1.1) (2021-08-31) + + +### Bug Fixes + +* remove deprecated call to Descriptor() ([#29](https://www.github.com/googleapis/python-audit-log/issues/29)) ([26f15be](https://www.github.com/googleapis/python-audit-log/commit/26f15be30432e61a6555c2cfe6643a83bf60def0)) + +## 0.1.0 (2020-07-30) + + +### Features + +* publish audit_log.proto ([4ca63a0](https://www.github.com/googleapis/python-audit-log/commit/4ca63a097e68bbae3e0094f071b9ef122c0db696)) diff --git a/packages/google-cloud-audit-log/CODE_OF_CONDUCT.md b/packages/google-cloud-audit-log/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-audit-log/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-audit-log/CONTRIBUTING.rst b/packages/google-cloud-audit-log/CONTRIBUTING.rst new file mode 100644 index 000000000000..64a6383d34f3 --- /dev/null +++ b/packages/google-cloud-audit-log/CONTRIBUTING.rst @@ -0,0 +1,273 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.13 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.13 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-audit-log + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-audit-log/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-audit-log/LICENSE b/packages/google-cloud-audit-log/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-audit-log/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-audit-log/MANIFEST.in b/packages/google-cloud-audit-log/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-cloud-audit-log/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-audit-log/README.rst b/packages/google-cloud-audit-log/README.rst new file mode 100644 index 000000000000..e86f447b0621 --- /dev/null +++ b/packages/google-cloud-audit-log/README.rst @@ -0,0 +1,108 @@ +Python Client for Audit Log API +=============================== + +|preview| |pypi| |versions| + +`Audit Log API`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-audit-log.svg + :target: https://pypi.org/project/google-cloud-audit-log/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-audit-log.svg + :target: https://pypi.org/project/google-cloud-audit-log/ +.. _Audit Log API: https://cloud.google.com/logging/docs/audit +.. _Client Library Documentation: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log/summary_overview +.. _Product Documentation: https://cloud.google.com/logging/docs/audit + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Audit Log API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Audit Log API.: https://cloud.google.com/logging/docs/audit +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-audit-log + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-audit-log + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Audit Log API + to see other available methods on the client. +- Read the `Audit Log API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Audit Log API Product documentation: https://cloud.google.com/logging/docs/audit +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-audit-log/SECURITY.md b/packages/google-cloud-audit-log/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-audit-log/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-audit-log/docs/CHANGELOG.md b/packages/google-cloud-audit-log/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-audit-log/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-audit-log/docs/README.rst b/packages/google-cloud-audit-log/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-audit-log/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-audit-log/docs/_static/custom.css b/packages/google-cloud-audit-log/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-audit-log/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-audit-log/docs/_templates/layout.html b/packages/google-cloud-audit-log/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-audit-log/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-audit-log/docs/audit.rst b/packages/google-cloud-audit-log/docs/audit.rst new file mode 100644 index 000000000000..7f629bdfbc93 --- /dev/null +++ b/packages/google-cloud-audit-log/docs/audit.rst @@ -0,0 +1,6 @@ +Types for google.cloud.audit +================================== + +.. automodule:: google.cloud.audit + :members: + :show-inheritance: diff --git a/packages/google-cloud-audit-log/docs/conf.py b/packages/google-cloud-audit-log/docs/conf.py new file mode 100644 index 000000000000..c8d88ce279b8 --- /dev/null +++ b/packages/google-cloud-audit-log/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-audit-log documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-audit-log" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-audit-log", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-audit-log-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-audit-log.tex", + "google-cloud-audit-log Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-audit-log", + "google-cloud-audit-log Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-audit-log", + "google-cloud-audit-log Documentation", + author, + "google-cloud-audit-log", + "google-cloud-audit-log Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-audit-log/docs/index.rst b/packages/google-cloud-audit-log/docs/index.rst new file mode 100644 index 000000000000..37afd3a39ae2 --- /dev/null +++ b/packages/google-cloud-audit-log/docs/index.rst @@ -0,0 +1,22 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + audit + +Changelog +--------- + +For a list of all ``google-cloud-audit-log`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + + summary_overview.md diff --git a/packages/google-cloud-audit-log/docs/multiprocessing.rst b/packages/google-cloud-audit-log/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-audit-log/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-audit-log/docs/summary_overview.md b/packages/google-cloud-audit-log/docs/summary_overview.md new file mode 100644 index 000000000000..bc57774b12a7 --- /dev/null +++ b/packages/google-cloud-audit-log/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Audit Log API API + +Overview of the APIs available for Audit Log API API. + +## All entries + +Classes, methods and properties & attributes for +Audit Log API API. + +[classes](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log/summary_class.html) + +[methods](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log/summary_method.html) + +[properties and +attributes](https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-audit-log/summary_property.html) diff --git a/packages/google-cloud-audit-log/google/cloud/audit/__init__.py b/packages/google-cloud-audit-log/google/cloud/audit/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-audit-log/google/cloud/audit/audit_log_pb2.py b/packages/google-cloud-audit-log/google/cloud/audit/audit_log_pb2.py new file mode 100644 index 000000000000..770084f5ff1d --- /dev/null +++ b/packages/google-cloud-audit-log/google/cloud/audit/audit_log_pb2.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/audit/audit_log.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.rpc.context import ( + attribute_context_pb2 as google_dot_rpc_dot_context_dot_attribute__context__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n"google/cloud/audit/audit_log.proto\x12\x12google.cloud.audit\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a*google/rpc/context/attribute_context.proto\x1a\x17google/rpc/status.proto"\xc6\x05\n\x08\x41uditLog\x12\x14\n\x0cservice_name\x18\x07 \x01(\t\x12\x13\n\x0bmethod_name\x18\x08 \x01(\t\x12\x15\n\rresource_name\x18\x0b \x01(\t\x12?\n\x11resource_location\x18\x14 \x01(\x0b\x32$.google.cloud.audit.ResourceLocation\x12\x38\n\x17resource_original_state\x18\x13 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x1a\n\x12num_response_items\x18\x0c \x01(\x03\x12"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x13\x61uthentication_info\x18\x03 \x01(\x0b\x32&.google.cloud.audit.AuthenticationInfo\x12\x41\n\x12\x61uthorization_info\x18\t \x03(\x0b\x32%.google.cloud.audit.AuthorizationInfo\x12\x46\n\x15policy_violation_info\x18\x19 \x01(\x0b\x32\'.google.cloud.audit.PolicyViolationInfo\x12=\n\x10request_metadata\x18\x04 \x01(\x0b\x32#.google.cloud.audit.RequestMetadata\x12(\n\x07request\x18\x10 \x01(\x0b\x32\x17.google.protobuf.Struct\x12)\n\x08response\x18\x11 \x01(\x0b\x32\x17.google.protobuf.Struct\x12)\n\x08metadata\x18\x12 \x01(\x0b\x32\x17.google.protobuf.Struct\x12.\n\x0cservice_data\x18\x0f \x01(\x0b\x32\x14.google.protobuf.AnyB\x02\x18\x01"\x99\x02\n\x12\x41uthenticationInfo\x12\x17\n\x0fprincipal_email\x18\x01 \x01(\t\x12\x1a\n\x12\x61uthority_selector\x18\x02 \x01(\t\x12\x36\n\x15third_party_principal\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct\x12 \n\x18service_account_key_name\x18\x05 \x01(\t\x12Y\n\x1fservice_account_delegation_info\x18\x06 \x03(\x0b\x32\x30.google.cloud.audit.ServiceAccountDelegationInfo\x12\x19\n\x11principal_subject\x18\x08 \x01(\t"\x96\x01\n\x11\x41uthorizationInfo\x12\x10\n\x08resource\x18\x01 \x01(\t\x12\x12\n\npermission\x18\x02 \x01(\t\x12\x0f\n\x07granted\x18\x03 \x01(\x08\x12J\n\x13resource_attributes\x18\x05 \x01(\x0b\x32-.google.rpc.context.AttributeContext.Resource"\xf5\x01\n\x0fRequestMetadata\x12\x11\n\tcaller_ip\x18\x01 \x01(\t\x12"\n\x1a\x63\x61ller_supplied_user_agent\x18\x02 \x01(\t\x12\x16\n\x0e\x63\x61ller_network\x18\x03 \x01(\t\x12H\n\x12request_attributes\x18\x07 \x01(\x0b\x32,.google.rpc.context.AttributeContext.Request\x12I\n\x16\x64\x65stination_attributes\x18\x08 \x01(\x0b\x32).google.rpc.context.AttributeContext.Peer"I\n\x10ResourceLocation\x12\x19\n\x11\x63urrent_locations\x18\x01 \x03(\t\x12\x1a\n\x12original_locations\x18\x02 \x03(\t"\xc3\x03\n\x1cServiceAccountDelegationInfo\x12\x19\n\x11principal_subject\x18\x03 \x01(\t\x12\x65\n\x15\x66irst_party_principal\x18\x01 \x01(\x0b\x32\x44.google.cloud.audit.ServiceAccountDelegationInfo.FirstPartyPrincipalH\x00\x12\x65\n\x15third_party_principal\x18\x02 \x01(\x0b\x32\x44.google.cloud.audit.ServiceAccountDelegationInfo.ThirdPartyPrincipalH\x00\x1a\x61\n\x13\x46irstPartyPrincipal\x12\x17\n\x0fprincipal_email\x18\x01 \x01(\t\x12\x31\n\x10service_metadata\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct\x1aJ\n\x13ThirdPartyPrincipal\x12\x33\n\x12third_party_claims\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructB\x0b\n\tAuthority"d\n\x13PolicyViolationInfo\x12M\n\x19org_policy_violation_info\x18\x01 \x01(\x0b\x32*.google.cloud.audit.OrgPolicyViolationInfo"\xb2\x02\n\x16OrgPolicyViolationInfo\x12-\n\x07payload\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x01\x12\x1a\n\rresource_type\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12X\n\rresource_tags\x18\x03 \x03(\x0b\x32<.google.cloud.audit.OrgPolicyViolationInfo.ResourceTagsEntryB\x03\xe0\x41\x01\x12>\n\x0eviolation_info\x18\x04 \x03(\x0b\x32!.google.cloud.audit.ViolationInfoB\x03\xe0\x41\x01\x1a\x33\n\x11ResourceTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x97\x02\n\rViolationInfo\x12\x17\n\nconstraint\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rerror_message\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rchecked_value\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x46\n\x0bpolicy_type\x18\x04 \x01(\x0e\x32,.google.cloud.audit.ViolationInfo.PolicyTypeB\x03\xe0\x41\x01"m\n\nPolicyType\x12\x1b\n\x17POLICY_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x42OOLEAN_CONSTRAINT\x10\x01\x12\x13\n\x0fLIST_CONSTRAINT\x10\x02\x12\x15\n\x11\x43USTOM_CONSTRAINT\x10\x03\x42\x65\n\x16\x63om.google.cloud.auditB\rAuditLogProtoP\x01Z7google.golang.org/genproto/googleapis/cloud/audit;audit\xf8\x01\x01\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.cloud.audit.audit_log_pb2", _globals +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\026com.google.cloud.auditB\rAuditLogProtoP\001Z7google.golang.org/genproto/googleapis/cloud/audit;audit\370\001\001" + _AUDITLOG.fields_by_name["service_data"]._options = None + _AUDITLOG.fields_by_name["service_data"]._serialized_options = b"\030\001" + _ORGPOLICYVIOLATIONINFO_RESOURCETAGSENTRY._options = None + _ORGPOLICYVIOLATIONINFO_RESOURCETAGSENTRY._serialized_options = b"8\001" + _ORGPOLICYVIOLATIONINFO.fields_by_name["payload"]._options = None + _ORGPOLICYVIOLATIONINFO.fields_by_name["payload"]._serialized_options = b"\340A\001" + _ORGPOLICYVIOLATIONINFO.fields_by_name["resource_type"]._options = None + _ORGPOLICYVIOLATIONINFO.fields_by_name[ + "resource_type" + ]._serialized_options = b"\340A\001" + _ORGPOLICYVIOLATIONINFO.fields_by_name["resource_tags"]._options = None + _ORGPOLICYVIOLATIONINFO.fields_by_name[ + "resource_tags" + ]._serialized_options = b"\340A\001" + _ORGPOLICYVIOLATIONINFO.fields_by_name["violation_info"]._options = None + _ORGPOLICYVIOLATIONINFO.fields_by_name[ + "violation_info" + ]._serialized_options = b"\340A\001" + _VIOLATIONINFO.fields_by_name["constraint"]._options = None + _VIOLATIONINFO.fields_by_name["constraint"]._serialized_options = b"\340A\001" + _VIOLATIONINFO.fields_by_name["error_message"]._options = None + _VIOLATIONINFO.fields_by_name["error_message"]._serialized_options = b"\340A\001" + _VIOLATIONINFO.fields_by_name["checked_value"]._options = None + _VIOLATIONINFO.fields_by_name["checked_value"]._serialized_options = b"\340A\001" + _VIOLATIONINFO.fields_by_name["policy_type"]._options = None + _VIOLATIONINFO.fields_by_name["policy_type"]._serialized_options = b"\340A\001" + _globals["_AUDITLOG"]._serialized_start = 218 + _globals["_AUDITLOG"]._serialized_end = 928 + _globals["_AUTHENTICATIONINFO"]._serialized_start = 931 + _globals["_AUTHENTICATIONINFO"]._serialized_end = 1212 + _globals["_AUTHORIZATIONINFO"]._serialized_start = 1215 + _globals["_AUTHORIZATIONINFO"]._serialized_end = 1365 + _globals["_REQUESTMETADATA"]._serialized_start = 1368 + _globals["_REQUESTMETADATA"]._serialized_end = 1613 + _globals["_RESOURCELOCATION"]._serialized_start = 1615 + _globals["_RESOURCELOCATION"]._serialized_end = 1688 + _globals["_SERVICEACCOUNTDELEGATIONINFO"]._serialized_start = 1691 + _globals["_SERVICEACCOUNTDELEGATIONINFO"]._serialized_end = 2142 + _globals[ + "_SERVICEACCOUNTDELEGATIONINFO_FIRSTPARTYPRINCIPAL" + ]._serialized_start = 1956 + _globals["_SERVICEACCOUNTDELEGATIONINFO_FIRSTPARTYPRINCIPAL"]._serialized_end = 2053 + _globals[ + "_SERVICEACCOUNTDELEGATIONINFO_THIRDPARTYPRINCIPAL" + ]._serialized_start = 2055 + _globals["_SERVICEACCOUNTDELEGATIONINFO_THIRDPARTYPRINCIPAL"]._serialized_end = 2129 + _globals["_POLICYVIOLATIONINFO"]._serialized_start = 2144 + _globals["_POLICYVIOLATIONINFO"]._serialized_end = 2244 + _globals["_ORGPOLICYVIOLATIONINFO"]._serialized_start = 2247 + _globals["_ORGPOLICYVIOLATIONINFO"]._serialized_end = 2553 + _globals["_ORGPOLICYVIOLATIONINFO_RESOURCETAGSENTRY"]._serialized_start = 2502 + _globals["_ORGPOLICYVIOLATIONINFO_RESOURCETAGSENTRY"]._serialized_end = 2553 + _globals["_VIOLATIONINFO"]._serialized_start = 2556 + _globals["_VIOLATIONINFO"]._serialized_end = 2835 + _globals["_VIOLATIONINFO_POLICYTYPE"]._serialized_start = 2726 + _globals["_VIOLATIONINFO_POLICYTYPE"]._serialized_end = 2835 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-audit-log/google/cloud/audit/bigquery_audit_metadata_pb2.py b/packages/google-cloud-audit-log/google/cloud/audit/bigquery_audit_metadata_pb2.py new file mode 100644 index 000000000000..813dd61cdb3c --- /dev/null +++ b/packages/google-cloud-audit-log/google/cloud/audit/bigquery_audit_metadata_pb2.py @@ -0,0 +1,213 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/audit/bigquery_audit_metadata.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n0google/cloud/audit/bigquery_audit_metadata.proto\x12\x12google.cloud.audit\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xabo\n\x15\x42igQueryAuditMetadata\x12O\n\rjob_insertion\x18\x01 \x01(\x0b\x32\x36.google.cloud.audit.BigQueryAuditMetadata.JobInsertionH\x00\x12I\n\njob_change\x18\x02 \x01(\x0b\x32\x33.google.cloud.audit.BigQueryAuditMetadata.JobChangeH\x00\x12M\n\x0cjob_deletion\x18\x17 \x01(\x0b\x32\x35.google.cloud.audit.BigQueryAuditMetadata.JobDeletionH\x00\x12U\n\x10\x64\x61taset_creation\x18\x03 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.DatasetCreationH\x00\x12Q\n\x0e\x64\x61taset_change\x18\x04 \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.DatasetChangeH\x00\x12U\n\x10\x64\x61taset_deletion\x18\x05 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.DatasetDeletionH\x00\x12Q\n\x0etable_creation\x18\x06 \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.TableCreationH\x00\x12M\n\x0ctable_change\x18\x08 \x01(\x0b\x32\x35.google.cloud.audit.BigQueryAuditMetadata.TableChangeH\x00\x12Q\n\x0etable_deletion\x18\t \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.TableDeletionH\x00\x12R\n\x0ftable_data_read\x18\n \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.TableDataReadH\x00\x12V\n\x11table_data_change\x18\x0b \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.TableDataChangeH\x00\x12Q\n\x0emodel_deletion\x18\x0c \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.ModelDeletionH\x00\x12Q\n\x0emodel_creation\x18\r \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.ModelCreationH\x00\x12^\n\x15model_metadata_change\x18\x0e \x01(\x0b\x32=.google.cloud.audit.BigQueryAuditMetadata.ModelMetadataChangeH\x00\x12V\n\x11model_data_change\x18\x0f \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.ModelDataChangeH\x00\x12R\n\x0fmodel_data_read\x18\x13 \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.ModelDataReadH\x00\x12U\n\x10routine_creation\x18\x10 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.RoutineCreationH\x00\x12Q\n\x0eroutine_change\x18\x11 \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.RoutineChangeH\x00\x12U\n\x10routine_deletion\x18\x12 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.RoutineDeletionH\x00\x12g\n\x1arow_access_policy_creation\x18\x14 \x01(\x0b\x32\x41.google.cloud.audit.BigQueryAuditMetadata.RowAccessPolicyCreationH\x00\x12\x63\n\x18row_access_policy_change\x18\x15 \x01(\x0b\x32?.google.cloud.audit.BigQueryAuditMetadata.RowAccessPolicyChangeH\x00\x12g\n\x1arow_access_policy_deletion\x18\x16 \x01(\x0b\x32\x41.google.cloud.audit.BigQueryAuditMetadata.RowAccessPolicyDeletionH\x00\x12Q\n\x0eunlink_dataset\x18\x19 \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.UnlinkDatasetH\x00\x12\x61\n\x18\x66irst_party_app_metadata\x18\x18 \x01(\x0b\x32?.google.cloud.audit.BigQueryAuditMetadata.FirstPartyAppMetadata\x1a\xe6\x01\n\x0cJobInsertion\x12:\n\x03job\x18\x01 \x01(\x0b\x32-.google.cloud.audit.BigQueryAuditMetadata.Job\x12M\n\x06reason\x18\x02 \x01(\x0e\x32=.google.cloud.audit.BigQueryAuditMetadata.JobInsertion.Reason"K\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x16\n\x12JOB_INSERT_REQUEST\x10\x01\x12\x11\n\rQUERY_REQUEST\x10\x02\x1a\xce\x01\n\tJobChange\x12\x42\n\x06\x62\x65\x66ore\x18\x01 \x01(\x0e\x32\x32.google.cloud.audit.BigQueryAuditMetadata.JobState\x12\x41\n\x05\x61\x66ter\x18\x02 \x01(\x0e\x32\x32.google.cloud.audit.BigQueryAuditMetadata.JobState\x12:\n\x03job\x18\x03 \x01(\x0b\x32-.google.cloud.audit.BigQueryAuditMetadata.Job\x1a\xa7\x01\n\x0bJobDeletion\x12\x10\n\x08job_name\x18\x01 \x01(\t\x12L\n\x06reason\x18\x02 \x01(\x0e\x32<.google.cloud.audit.BigQueryAuditMetadata.JobDeletion.Reason"8\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x16\n\x12JOB_DELETE_REQUEST\x10\x01\x1a\xf2\x01\n\x0f\x44\x61tasetCreation\x12\x42\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x31.google.cloud.audit.BigQueryAuditMetadata.Dataset\x12P\n\x06reason\x18\x02 \x01(\x0e\x32@.google.cloud.audit.BigQueryAuditMetadata.DatasetCreation.Reason\x12\x10\n\x08job_name\x18\x03 \x01(\t"7\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\n\n\x06\x43REATE\x10\x01\x12\t\n\x05QUERY\x10\x02\x1a\x82\x02\n\rDatasetChange\x12\x42\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x31.google.cloud.audit.BigQueryAuditMetadata.Dataset\x12N\n\x06reason\x18\x02 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.DatasetChange.Reason\x12\x10\n\x08job_name\x18\x03 \x01(\t"K\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\n\n\x06UPDATE\x10\x01\x12\x12\n\x0eSET_IAM_POLICY\x10\x02\x12\t\n\x05QUERY\x10\x03\x1a\xae\x01\n\x0f\x44\x61tasetDeletion\x12P\n\x06reason\x18\x01 \x01(\x0e\x32@.google.cloud.audit.BigQueryAuditMetadata.DatasetDeletion.Reason\x12\x10\n\x08job_name\x18\x02 \x01(\t"7\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\n\n\x06\x44\x45LETE\x10\x01\x12\t\n\x05QUERY\x10\x02\x1a\x81\x02\n\rTableCreation\x12>\n\x05table\x18\x01 \x01(\x0b\x32/.google.cloud.audit.BigQueryAuditMetadata.Table\x12N\n\x06reason\x18\x03 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.TableCreation.Reason\x12\x10\n\x08job_name\x18\x04 \x01(\t"N\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x07\n\x03JOB\x10\x01\x12\t\n\x05QUERY\x10\x02\x12\x18\n\x14TABLE_INSERT_REQUEST\x10\x03\x1a\xde\x01\n\rModelCreation\x12>\n\x05model\x18\x01 \x01(\x0b\x32/.google.cloud.audit.BigQueryAuditMetadata.Model\x12N\n\x06reason\x18\x03 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.ModelCreation.Reason\x12\x10\n\x08job_name\x18\x04 \x01(\t"+\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\t\n\x05QUERY\x10\x02\x1a\x82\x02\n\x0fRoutineCreation\x12\x42\n\x07routine\x18\x01 \x01(\x0b\x32\x31.google.cloud.audit.BigQueryAuditMetadata.Routine\x12P\n\x06reason\x18\x03 \x01(\x0e\x32@.google.cloud.audit.BigQueryAuditMetadata.RoutineCreation.Reason\x12\x10\n\x08job_name\x18\x04 \x01(\t"G\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\t\n\x05QUERY\x10\x01\x12\x1a\n\x16ROUTINE_INSERT_REQUEST\x10\x02\x1a\x97\x03\n\rTableDataRead\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x18\n\x10\x66ields_truncated\x18\x08 \x01(\x08\x12\x13\n\x0bpolicy_tags\x18\t \x03(\t\x12\x1d\n\x15policy_tags_truncated\x18\n \x01(\x08\x12N\n\x06reason\x18\x03 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.TableDataRead.Reason\x12\x10\n\x08job_name\x18\x04 \x01(\t\x12\x14\n\x0csession_name\x18\x05 \x01(\t"\xaf\x01\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x07\n\x03JOB\x10\x01\x12\x1a\n\x16TABLEDATA_LIST_REQUEST\x10\x02\x12\x1d\n\x19GET_QUERY_RESULTS_REQUEST\x10\x03\x12\x11\n\rQUERY_REQUEST\x10\x04\x12\x17\n\x13\x43REATE_READ_SESSION\x10\x05\x12\x1d\n\x19MATERIALIZED_VIEW_REFRESH\x10\x06\x1a\x90\x02\n\x0bTableChange\x12>\n\x05table\x18\x01 \x01(\x0b\x32/.google.cloud.audit.BigQueryAuditMetadata.Table\x12\x11\n\ttruncated\x18\x04 \x01(\x08\x12L\n\x06reason\x18\x05 \x01(\x0e\x32<.google.cloud.audit.BigQueryAuditMetadata.TableChange.Reason\x12\x10\n\x08job_name\x18\x06 \x01(\t"N\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x18\n\x14TABLE_UPDATE_REQUEST\x10\x01\x12\x07\n\x03JOB\x10\x02\x12\t\n\x05QUERY\x10\x03\x1a\x83\x02\n\x13ModelMetadataChange\x12>\n\x05model\x18\x01 \x01(\x0b\x32/.google.cloud.audit.BigQueryAuditMetadata.Model\x12T\n\x06reason\x18\x02 \x01(\x0e\x32\x44.google.cloud.audit.BigQueryAuditMetadata.ModelMetadataChange.Reason\x12\x10\n\x08job_name\x18\x03 \x01(\t"D\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x17\n\x13MODEL_PATCH_REQUEST\x10\x01\x12\t\n\x05QUERY\x10\x02\x1a\xfe\x01\n\rRoutineChange\x12\x42\n\x07routine\x18\x01 \x01(\x0b\x32\x31.google.cloud.audit.BigQueryAuditMetadata.Routine\x12N\n\x06reason\x18\x03 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.RoutineChange.Reason\x12\x10\n\x08job_name\x18\x04 \x01(\t"G\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\t\n\x05QUERY\x10\x01\x12\x1a\n\x16ROUTINE_UPDATE_REQUEST\x10\x02\x1a\xba\x02\n\x0fTableDataChange\x12\x1a\n\x12\x64\x65leted_rows_count\x18\x01 \x01(\x03\x12\x1b\n\x13inserted_rows_count\x18\x02 \x01(\x03\x12\x11\n\ttruncated\x18\x03 \x01(\x08\x12P\n\x06reason\x18\x04 \x01(\x0e\x32@.google.cloud.audit.BigQueryAuditMetadata.TableDataChange.Reason\x12\x10\n\x08job_name\x18\x05 \x01(\t\x12\x13\n\x0bstream_name\x18\x06 \x01(\t"b\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x07\n\x03JOB\x10\x01\x12\t\n\x05QUERY\x10\x02\x12\x1d\n\x19MATERIALIZED_VIEW_REFRESH\x10\x03\x12\r\n\tWRITE_API\x10\x04\x1a\xa2\x01\n\x0fModelDataChange\x12P\n\x06reason\x18\x01 \x01(\x0e\x32@.google.cloud.audit.BigQueryAuditMetadata.ModelDataChange.Reason\x12\x10\n\x08job_name\x18\x02 \x01(\t"+\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\t\n\x05QUERY\x10\x01\x1a\x9c\x01\n\rModelDataRead\x12N\n\x06reason\x18\x01 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.ModelDataRead.Reason\x12\x10\n\x08job_name\x18\x02 \x01(\t")\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x07\n\x03JOB\x10\x01\x1a\xc5\x01\n\rTableDeletion\x12N\n\x06reason\x18\x01 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.TableDeletion.Reason\x12\x10\n\x08job_name\x18\x02 \x01(\t"R\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x18\n\x14TABLE_DELETE_REQUEST\x10\x02\x12\x0b\n\x07\x45XPIRED\x10\x03\x12\t\n\x05QUERY\x10\x04\x1a\xc5\x01\n\rModelDeletion\x12N\n\x06reason\x18\x01 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.ModelDeletion.Reason\x12\x10\n\x08job_name\x18\x02 \x01(\t"R\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x18\n\x14MODEL_DELETE_REQUEST\x10\x01\x12\x0b\n\x07\x45XPIRED\x10\x02\x12\t\n\x05QUERY\x10\x03\x1a\x82\x02\n\x0fRoutineDeletion\x12\x42\n\x07routine\x18\x01 \x01(\x0b\x32\x31.google.cloud.audit.BigQueryAuditMetadata.Routine\x12P\n\x06reason\x18\x03 \x01(\x0e\x32@.google.cloud.audit.BigQueryAuditMetadata.RoutineDeletion.Reason\x12\x10\n\x08job_name\x18\x04 \x01(\t"G\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\t\n\x05QUERY\x10\x01\x12\x1a\n\x16ROUTINE_DELETE_REQUEST\x10\x02\x1a\x81\x01\n\x17RowAccessPolicyCreation\x12T\n\x11row_access_policy\x18\x01 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.RowAccessPolicy\x12\x10\n\x08job_name\x18\x02 \x01(\t\x1a\x7f\n\x15RowAccessPolicyChange\x12T\n\x11row_access_policy\x18\x01 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.RowAccessPolicy\x12\x10\n\x08job_name\x18\x02 \x01(\t\x1a\xac\x01\n\x17RowAccessPolicyDeletion\x12V\n\x13row_access_policies\x18\x01 \x03(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.RowAccessPolicy\x12\x10\n\x08job_name\x18\x02 \x01(\t\x12\'\n\x1f\x61ll_row_access_policies_dropped\x18\x03 \x01(\x08\x1a\xc1\x01\n\rUnlinkDataset\x12\x16\n\x0elinked_dataset\x18\x01 \x01(\t\x12\x16\n\x0esource_dataset\x18\x02 \x01(\t\x12N\n\x06reason\x18\x03 \x01(\x0e\x32>.google.cloud.audit.BigQueryAuditMetadata.UnlinkDataset.Reason"0\n\x06Reason\x12\x16\n\x12REASON_UNSPECIFIED\x10\x00\x12\x0e\n\nUNLINK_API\x10\x01\x1a\xf0\x01\n\x03Job\x12\x10\n\x08job_name\x18\x01 \x01(\t\x12G\n\njob_config\x18\x02 \x01(\x0b\x32\x33.google.cloud.audit.BigQueryAuditMetadata.JobConfig\x12G\n\njob_status\x18\x03 \x01(\x0b\x32\x33.google.cloud.audit.BigQueryAuditMetadata.JobStatus\x12\x45\n\tjob_stats\x18\x04 \x01(\x0b\x32\x32.google.cloud.audit.BigQueryAuditMetadata.JobStats\x1a\xe8\x12\n\tJobConfig\x12\x46\n\x04type\x18\x01 \x01(\x0e\x32\x38.google.cloud.audit.BigQueryAuditMetadata.JobConfig.Type\x12Q\n\x0cquery_config\x18\x02 \x01(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.JobConfig.QueryH\x00\x12O\n\x0bload_config\x18\x03 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.JobConfig.LoadH\x00\x12U\n\x0e\x65xtract_config\x18\x04 \x01(\x0b\x32;.google.cloud.audit.BigQueryAuditMetadata.JobConfig.ExtractH\x00\x12Z\n\x11table_copy_config\x18\x05 \x01(\x0b\x32=.google.cloud.audit.BigQueryAuditMetadata.JobConfig.TableCopyH\x00\x12O\n\x06labels\x18\x06 \x03(\x0b\x32?.google.cloud.audit.BigQueryAuditMetadata.JobConfig.LabelsEntry\x1a\xc3\x05\n\x05Query\x12\r\n\x05query\x18\x01 \x01(\t\x12\x17\n\x0fquery_truncated\x18\n \x01(\x08\x12\x19\n\x11\x64\x65stination_table\x18\x02 \x01(\t\x12W\n\x12\x63reate_disposition\x18\x03 \x01(\x0e\x32;.google.cloud.audit.BigQueryAuditMetadata.CreateDisposition\x12U\n\x11write_disposition\x18\x04 \x01(\x0e\x32:.google.cloud.audit.BigQueryAuditMetadata.WriteDisposition\x12\x17\n\x0f\x64\x65\x66\x61ult_dataset\x18\x05 \x01(\t\x12T\n\x11table_definitions\x18\x06 \x03(\x0b\x32\x39.google.cloud.audit.BigQueryAuditMetadata.TableDefinition\x12T\n\x08priority\x18\x07 \x01(\x0e\x32\x42.google.cloud.audit.BigQueryAuditMetadata.JobConfig.Query.Priority\x12^\n\x1c\x64\x65stination_table_encryption\x18\x08 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.EncryptionInfo\x12T\n\x0estatement_type\x18\t \x01(\x0e\x32<.google.cloud.audit.BigQueryAuditMetadata.QueryStatementType"L\n\x08Priority\x12\x18\n\x14PRIORITY_UNSPECIFIED\x10\x00\x12\x15\n\x11QUERY_INTERACTIVE\x10\x01\x12\x0f\n\x0bQUERY_BATCH\x10\x02\x1a\x99\x03\n\x04Load\x12\x13\n\x0bsource_uris\x18\x01 \x03(\t\x12\x1d\n\x15source_uris_truncated\x18\x07 \x01(\x08\x12\x13\n\x0bschema_json\x18\x02 \x01(\t\x12\x1d\n\x15schema_json_truncated\x18\x08 \x01(\x08\x12\x19\n\x11\x64\x65stination_table\x18\x03 \x01(\t\x12W\n\x12\x63reate_disposition\x18\x04 \x01(\x0e\x32;.google.cloud.audit.BigQueryAuditMetadata.CreateDisposition\x12U\n\x11write_disposition\x18\x05 \x01(\x0e\x32:.google.cloud.audit.BigQueryAuditMetadata.WriteDisposition\x12^\n\x1c\x64\x65stination_table_encryption\x18\x06 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.EncryptionInfo\x1a\x81\x01\n\x07\x45xtract\x12\x18\n\x10\x64\x65stination_uris\x18\x01 \x03(\t\x12"\n\x1a\x64\x65stination_uris_truncated\x18\x03 \x01(\x08\x12\x16\n\x0csource_table\x18\x02 \x01(\tH\x00\x12\x16\n\x0csource_model\x18\x04 \x01(\tH\x00\x42\x08\n\x06source\x1a\x80\x04\n\tTableCopy\x12\x15\n\rsource_tables\x18\x01 \x03(\t\x12\x1f\n\x17source_tables_truncated\x18\x06 \x01(\x08\x12\x19\n\x11\x64\x65stination_table\x18\x02 \x01(\t\x12W\n\x12\x63reate_disposition\x18\x03 \x01(\x0e\x32;.google.cloud.audit.BigQueryAuditMetadata.CreateDisposition\x12U\n\x11write_disposition\x18\x04 \x01(\x0e\x32:.google.cloud.audit.BigQueryAuditMetadata.WriteDisposition\x12^\n\x1c\x64\x65stination_table_encryption\x18\x05 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.EncryptionInfo\x12O\n\x0eoperation_type\x18\x07 \x01(\x0e\x32\x37.google.cloud.audit.BigQueryAuditMetadata.OperationType\x12?\n\x1b\x64\x65stination_expiration_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"I\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05QUERY\x10\x01\x12\x08\n\x04\x43OPY\x10\x02\x12\n\n\x06\x45XPORT\x10\x03\x12\n\n\x06IMPORT\x10\x04\x42\x08\n\x06\x63onfig\x1a\x34\n\x0fTableDefinition\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bsource_uris\x18\x02 \x03(\t\x1a\xa0\x01\n\tJobStatus\x12\x45\n\tjob_state\x18\x01 \x01(\x0e\x32\x32.google.cloud.audit.BigQueryAuditMetadata.JobState\x12(\n\x0c\x65rror_result\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status\x12"\n\x06\x65rrors\x18\x03 \x03(\x0b\x32\x12.google.rpc.Status\x1a\xaa\x07\n\x08JobStats\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12O\n\x0bquery_stats\x18\x08 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.JobStats.QueryH\x00\x12M\n\nload_stats\x18\t \x01(\x0b\x32\x37.google.cloud.audit.BigQueryAuditMetadata.JobStats.LoadH\x00\x12S\n\rextract_stats\x18\r \x01(\x0b\x32:.google.cloud.audit.BigQueryAuditMetadata.JobStats.ExtractH\x00\x12\x15\n\rtotal_slot_ms\x18\n \x01(\x03\x12j\n\x11reservation_usage\x18\x0b \x03(\x0b\x32K.google.cloud.audit.BigQueryAuditMetadata.JobStats.ReservationResourceUsageB\x02\x18\x01\x12\x13\n\x0breservation\x18\x0e \x01(\t\x12\x17\n\x0fparent_job_name\x18\x0c \x01(\t\x1a\xd7\x01\n\x05Query\x12\x1d\n\x15total_processed_bytes\x18\x01 \x01(\x03\x12\x1a\n\x12total_billed_bytes\x18\x02 \x01(\x03\x12\x14\n\x0c\x62illing_tier\x18\x03 \x01(\x05\x12\x19\n\x11referenced_tables\x18\x06 \x03(\t\x12\x18\n\x10referenced_views\x18\x07 \x03(\t\x12\x1b\n\x13referenced_routines\x18\n \x03(\t\x12\x18\n\x10output_row_count\x18\x08 \x01(\x03\x12\x11\n\tcache_hit\x18\t \x01(\x08\x1a"\n\x04Load\x12\x1a\n\x12total_output_bytes\x18\x01 \x01(\x03\x1a$\n\x07\x45xtract\x12\x19\n\x11total_input_bytes\x18\x01 \x01(\x03\x1a\x39\n\x18ReservationResourceUsage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07slot_ms\x18\x02 \x01(\x03\x42\n\n\x08\x65xtended\x1a\xfa\x03\n\x05Table\x12\x12\n\ntable_name\x18\x01 \x01(\t\x12H\n\ntable_info\x18\n \x01(\x0b\x32\x34.google.cloud.audit.BigQueryAuditMetadata.EntityInfo\x12\x13\n\x0bschema_json\x18\x03 \x01(\t\x12\x1d\n\x15schema_json_truncated\x18\x0b \x01(\x08\x12K\n\x04view\x18\x04 \x01(\x0b\x32=.google.cloud.audit.BigQueryAuditMetadata.TableViewDefinition\x12/\n\x0b\x65xpire_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rtruncate_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\nencryption\x18\t \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.EncryptionInfo\x1a\xc6\x02\n\x05Model\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12H\n\nmodel_info\x18\x02 \x01(\x0b\x32\x34.google.cloud.audit.BigQueryAuditMetadata.EntityInfo\x12/\n\x0b\x65xpire_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\nencryption\x18\x08 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.EncryptionInfo\x1a\x81\x01\n\x07Routine\x12\x14\n\x0croutine_name\x18\x01 \x01(\t\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\xb9\x01\n\nEntityInfo\x12\x15\n\rfriendly_name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12P\n\x06labels\x18\x03 \x03(\x0b\x32@.google.cloud.audit.BigQueryAuditMetadata.EntityInfo.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a=\n\x13TableViewDefinition\x12\r\n\x05query\x18\x01 \x01(\t\x12\x17\n\x0fquery_truncated\x18\x02 \x01(\x08\x1a\xc4\x03\n\x07\x44\x61taset\x12\x14\n\x0c\x64\x61taset_name\x18\x01 \x01(\t\x12J\n\x0c\x64\x61taset_info\x18\x07 \x01(\x0b\x32\x34.google.cloud.audit.BigQueryAuditMetadata.EntityInfo\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x03\x61\x63l\x18\x05 \x01(\x0b\x32\x35.google.cloud.audit.BigQueryAuditMetadata.BigQueryAcl\x12@\n\x1d\x64\x65\x66\x61ult_table_expire_duration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12T\n\x12\x64\x65\x66\x61ult_encryption\x18\x08 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.EncryptionInfo\x12\x19\n\x11\x64\x65\x66\x61ult_collation\x18\t \x01(\t\x1aN\n\x0b\x42igQueryAcl\x12%\n\x06policy\x18\x01 \x01(\x0b\x32\x15.google.iam.v1.Policy\x12\x18\n\x10\x61uthorized_views\x18\x02 \x03(\t\x1a&\n\x0e\x45ncryptionInfo\x12\x14\n\x0ckms_key_name\x18\x01 \x01(\t\x1a\x31\n\x0fRowAccessPolicy\x12\x1e\n\x16row_access_policy_name\x18\x01 \x01(\t\x1ax\n\x15\x46irstPartyAppMetadata\x12S\n\x0fsheets_metadata\x18\x01 \x01(\x0b\x32\x38.google.cloud.audit.BigQueryAuditMetadata.SheetsMetadataH\x00\x42\n\n\x08metadata\x1a \n\x0eSheetsMetadata\x12\x0e\n\x06\x64oc_id\x18\x01 \x01(\t"_\n\x11\x43reateDisposition\x12"\n\x1e\x43REATE_DISPOSITION_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43REATE_NEVER\x10\x01\x12\x14\n\x10\x43REATE_IF_NEEDED\x10\x02"l\n\x10WriteDisposition\x12!\n\x1dWRITE_DISPOSITION_UNSPECIFIED\x10\x00\x12\x0f\n\x0bWRITE_EMPTY\x10\x01\x12\x12\n\x0eWRITE_TRUNCATE\x10\x02\x12\x10\n\x0cWRITE_APPEND\x10\x03"T\n\rOperationType\x12\x1e\n\x1aOPERATION_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x43OPY\x10\x01\x12\x0c\n\x08SNAPSHOT\x10\x02\x12\x0b\n\x07RESTORE\x10\x03"I\n\x08JobState\x12\x19\n\x15JOB_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\xf9\x05\n\x12QueryStatementType\x12$\n QUERY_STATEMENT_TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06SELECT\x10\x01\x12\n\n\x06\x41SSERT\x10\x17\x12\n\n\x06INSERT\x10\x02\x12\n\n\x06UPDATE\x10\x03\x12\n\n\x06\x44\x45LETE\x10\x04\x12\t\n\x05MERGE\x10\x05\x12\x10\n\x0c\x43REATE_TABLE\x10\x06\x12\x1a\n\x16\x43REATE_TABLE_AS_SELECT\x10\x07\x12\x0f\n\x0b\x43REATE_VIEW\x10\x08\x12\x10\n\x0c\x43REATE_MODEL\x10\t\x12\x1c\n\x18\x43REATE_MATERIALIZED_VIEW\x10\r\x12\x13\n\x0f\x43REATE_FUNCTION\x10\x0e\x12\x19\n\x15\x43REATE_TABLE_FUNCTION\x10\x38\x12\x14\n\x10\x43REATE_PROCEDURE\x10\x14\x12\x1c\n\x18\x43REATE_ROW_ACCESS_POLICY\x10\x18\x12\x11\n\rCREATE_SCHEMA\x10\x35\x12\x19\n\x15\x43REATE_SNAPSHOT_TABLE\x10;\x12\x0e\n\nDROP_TABLE\x10\n\x12\x17\n\x13\x44ROP_EXTERNAL_TABLE\x10!\x12\r\n\tDROP_VIEW\x10\x0b\x12\x0e\n\nDROP_MODEL\x10\x0c\x12\x1a\n\x16\x44ROP_MATERIALIZED_VIEW\x10\x0f\x12\x11\n\rDROP_FUNCTION\x10\x10\x12\x12\n\x0e\x44ROP_PROCEDURE\x10\x15\x12\x0f\n\x0b\x44ROP_SCHEMA\x10\x36\x12\x1a\n\x16\x44ROP_ROW_ACCESS_POLICY\x10\x19\x12\x17\n\x13\x44ROP_SNAPSHOT_TABLE\x10>\x12\x0f\n\x0b\x41LTER_TABLE\x10\x11\x12\x0e\n\nALTER_VIEW\x10\x12\x12\x1b\n\x17\x41LTER_MATERIALIZED_VIEW\x10\x16\x12\x10\n\x0c\x41LTER_SCHEMA\x10\x37\x12\n\n\x06SCRIPT\x10\x13\x12\x12\n\x0eTRUNCATE_TABLE\x10\x1a\x12\x19\n\x15\x43REATE_EXTERNAL_TABLE\x10\x1b\x12\x0f\n\x0b\x45XPORT_DATA\x10\x1c\x12\x08\n\x04\x43\x41LL\x10\x1d\x42\x07\n\x05\x65ventB\x9f\x01\n\x16\x63om.google.cloud.auditB\x1a\x42igQueryAuditMetadataProtoP\x01Z7google.golang.org/genproto/googleapis/cloud/audit;audit\xa2\x02\x03GCA\xaa\x02\x12Google.Cloud.Audit\xca\x02\x12Google\\Cloud\\Auditb\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.cloud.audit.bigquery_audit_metadata_pb2", _globals +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\026com.google.cloud.auditB\032BigQueryAuditMetadataProtoP\001Z7google.golang.org/genproto/googleapis/cloud/audit;audit\242\002\003GCA\252\002\022Google.Cloud.Audit\312\002\022Google\\Cloud\\Audit" + _BIGQUERYAUDITMETADATA_JOBCONFIG_LABELSENTRY._options = None + _BIGQUERYAUDITMETADATA_JOBCONFIG_LABELSENTRY._serialized_options = b"8\001" + _BIGQUERYAUDITMETADATA_JOBSTATS.fields_by_name["reservation_usage"]._options = None + _BIGQUERYAUDITMETADATA_JOBSTATS.fields_by_name[ + "reservation_usage" + ]._serialized_options = b"\030\001" + _BIGQUERYAUDITMETADATA_ENTITYINFO_LABELSENTRY._options = None + _BIGQUERYAUDITMETADATA_ENTITYINFO_LABELSENTRY._serialized_options = b"8\001" + _globals["_BIGQUERYAUDITMETADATA"]._serialized_start = 191 + _globals["_BIGQUERYAUDITMETADATA"]._serialized_end = 14442 + _globals["_BIGQUERYAUDITMETADATA_JOBINSERTION"]._serialized_start = 2310 + _globals["_BIGQUERYAUDITMETADATA_JOBINSERTION"]._serialized_end = 2540 + _globals["_BIGQUERYAUDITMETADATA_JOBINSERTION_REASON"]._serialized_start = 2465 + _globals["_BIGQUERYAUDITMETADATA_JOBINSERTION_REASON"]._serialized_end = 2540 + _globals["_BIGQUERYAUDITMETADATA_JOBCHANGE"]._serialized_start = 2543 + _globals["_BIGQUERYAUDITMETADATA_JOBCHANGE"]._serialized_end = 2749 + _globals["_BIGQUERYAUDITMETADATA_JOBDELETION"]._serialized_start = 2752 + _globals["_BIGQUERYAUDITMETADATA_JOBDELETION"]._serialized_end = 2919 + _globals["_BIGQUERYAUDITMETADATA_JOBDELETION_REASON"]._serialized_start = 2863 + _globals["_BIGQUERYAUDITMETADATA_JOBDELETION_REASON"]._serialized_end = 2919 + _globals["_BIGQUERYAUDITMETADATA_DATASETCREATION"]._serialized_start = 2922 + _globals["_BIGQUERYAUDITMETADATA_DATASETCREATION"]._serialized_end = 3164 + _globals["_BIGQUERYAUDITMETADATA_DATASETCREATION_REASON"]._serialized_start = 3109 + _globals["_BIGQUERYAUDITMETADATA_DATASETCREATION_REASON"]._serialized_end = 3164 + _globals["_BIGQUERYAUDITMETADATA_DATASETCHANGE"]._serialized_start = 3167 + _globals["_BIGQUERYAUDITMETADATA_DATASETCHANGE"]._serialized_end = 3425 + _globals["_BIGQUERYAUDITMETADATA_DATASETCHANGE_REASON"]._serialized_start = 3350 + _globals["_BIGQUERYAUDITMETADATA_DATASETCHANGE_REASON"]._serialized_end = 3425 + _globals["_BIGQUERYAUDITMETADATA_DATASETDELETION"]._serialized_start = 3428 + _globals["_BIGQUERYAUDITMETADATA_DATASETDELETION"]._serialized_end = 3602 + _globals["_BIGQUERYAUDITMETADATA_DATASETDELETION_REASON"]._serialized_start = 3547 + _globals["_BIGQUERYAUDITMETADATA_DATASETDELETION_REASON"]._serialized_end = 3602 + _globals["_BIGQUERYAUDITMETADATA_TABLECREATION"]._serialized_start = 3605 + _globals["_BIGQUERYAUDITMETADATA_TABLECREATION"]._serialized_end = 3862 + _globals["_BIGQUERYAUDITMETADATA_TABLECREATION_REASON"]._serialized_start = 3784 + _globals["_BIGQUERYAUDITMETADATA_TABLECREATION_REASON"]._serialized_end = 3862 + _globals["_BIGQUERYAUDITMETADATA_MODELCREATION"]._serialized_start = 3865 + _globals["_BIGQUERYAUDITMETADATA_MODELCREATION"]._serialized_end = 4087 + _globals["_BIGQUERYAUDITMETADATA_MODELCREATION_REASON"]._serialized_start = 4044 + _globals["_BIGQUERYAUDITMETADATA_MODELCREATION_REASON"]._serialized_end = 4087 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECREATION"]._serialized_start = 4090 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECREATION"]._serialized_end = 4348 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECREATION_REASON"]._serialized_start = 4277 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECREATION_REASON"]._serialized_end = 4348 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATAREAD"]._serialized_start = 4351 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATAREAD"]._serialized_end = 4758 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATAREAD_REASON"]._serialized_start = 4583 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATAREAD_REASON"]._serialized_end = 4758 + _globals["_BIGQUERYAUDITMETADATA_TABLECHANGE"]._serialized_start = 4761 + _globals["_BIGQUERYAUDITMETADATA_TABLECHANGE"]._serialized_end = 5033 + _globals["_BIGQUERYAUDITMETADATA_TABLECHANGE_REASON"]._serialized_start = 4955 + _globals["_BIGQUERYAUDITMETADATA_TABLECHANGE_REASON"]._serialized_end = 5033 + _globals["_BIGQUERYAUDITMETADATA_MODELMETADATACHANGE"]._serialized_start = 5036 + _globals["_BIGQUERYAUDITMETADATA_MODELMETADATACHANGE"]._serialized_end = 5295 + _globals[ + "_BIGQUERYAUDITMETADATA_MODELMETADATACHANGE_REASON" + ]._serialized_start = 5227 + _globals["_BIGQUERYAUDITMETADATA_MODELMETADATACHANGE_REASON"]._serialized_end = 5295 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECHANGE"]._serialized_start = 5298 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECHANGE"]._serialized_end = 5552 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECHANGE_REASON"]._serialized_start = 5481 + _globals["_BIGQUERYAUDITMETADATA_ROUTINECHANGE_REASON"]._serialized_end = 5552 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATACHANGE"]._serialized_start = 5555 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATACHANGE"]._serialized_end = 5869 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATACHANGE_REASON"]._serialized_start = 5771 + _globals["_BIGQUERYAUDITMETADATA_TABLEDATACHANGE_REASON"]._serialized_end = 5869 + _globals["_BIGQUERYAUDITMETADATA_MODELDATACHANGE"]._serialized_start = 5872 + _globals["_BIGQUERYAUDITMETADATA_MODELDATACHANGE"]._serialized_end = 6034 + _globals["_BIGQUERYAUDITMETADATA_MODELDATACHANGE_REASON"]._serialized_start = 4277 + _globals["_BIGQUERYAUDITMETADATA_MODELDATACHANGE_REASON"]._serialized_end = 4320 + _globals["_BIGQUERYAUDITMETADATA_MODELDATAREAD"]._serialized_start = 6037 + _globals["_BIGQUERYAUDITMETADATA_MODELDATAREAD"]._serialized_end = 6193 + _globals["_BIGQUERYAUDITMETADATA_MODELDATAREAD_REASON"]._serialized_start = 3784 + _globals["_BIGQUERYAUDITMETADATA_MODELDATAREAD_REASON"]._serialized_end = 3825 + _globals["_BIGQUERYAUDITMETADATA_TABLEDELETION"]._serialized_start = 6196 + _globals["_BIGQUERYAUDITMETADATA_TABLEDELETION"]._serialized_end = 6393 + _globals["_BIGQUERYAUDITMETADATA_TABLEDELETION_REASON"]._serialized_start = 6311 + _globals["_BIGQUERYAUDITMETADATA_TABLEDELETION_REASON"]._serialized_end = 6393 + _globals["_BIGQUERYAUDITMETADATA_MODELDELETION"]._serialized_start = 6396 + _globals["_BIGQUERYAUDITMETADATA_MODELDELETION"]._serialized_end = 6593 + _globals["_BIGQUERYAUDITMETADATA_MODELDELETION_REASON"]._serialized_start = 6511 + _globals["_BIGQUERYAUDITMETADATA_MODELDELETION_REASON"]._serialized_end = 6593 + _globals["_BIGQUERYAUDITMETADATA_ROUTINEDELETION"]._serialized_start = 6596 + _globals["_BIGQUERYAUDITMETADATA_ROUTINEDELETION"]._serialized_end = 6854 + _globals["_BIGQUERYAUDITMETADATA_ROUTINEDELETION_REASON"]._serialized_start = 6783 + _globals["_BIGQUERYAUDITMETADATA_ROUTINEDELETION_REASON"]._serialized_end = 6854 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICYCREATION"]._serialized_start = 6857 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICYCREATION"]._serialized_end = 6986 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICYCHANGE"]._serialized_start = 6988 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICYCHANGE"]._serialized_end = 7115 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICYDELETION"]._serialized_start = 7118 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICYDELETION"]._serialized_end = 7290 + _globals["_BIGQUERYAUDITMETADATA_UNLINKDATASET"]._serialized_start = 7293 + _globals["_BIGQUERYAUDITMETADATA_UNLINKDATASET"]._serialized_end = 7486 + _globals["_BIGQUERYAUDITMETADATA_UNLINKDATASET_REASON"]._serialized_start = 7438 + _globals["_BIGQUERYAUDITMETADATA_UNLINKDATASET_REASON"]._serialized_end = 7486 + _globals["_BIGQUERYAUDITMETADATA_JOB"]._serialized_start = 7489 + _globals["_BIGQUERYAUDITMETADATA_JOB"]._serialized_end = 7729 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG"]._serialized_start = 7732 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG"]._serialized_end = 10140 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_QUERY"]._serialized_start = 8242 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_QUERY"]._serialized_end = 8949 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_QUERY_PRIORITY"]._serialized_start = 8873 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_QUERY_PRIORITY"]._serialized_end = 8949 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_LOAD"]._serialized_start = 8952 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_LOAD"]._serialized_end = 9361 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_EXTRACT"]._serialized_start = 9364 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_EXTRACT"]._serialized_end = 9493 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_TABLECOPY"]._serialized_start = 9496 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_TABLECOPY"]._serialized_end = 10008 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_LABELSENTRY"]._serialized_start = 10010 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_LABELSENTRY"]._serialized_end = 10055 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_TYPE"]._serialized_start = 10057 + _globals["_BIGQUERYAUDITMETADATA_JOBCONFIG_TYPE"]._serialized_end = 10130 + _globals["_BIGQUERYAUDITMETADATA_TABLEDEFINITION"]._serialized_start = 10142 + _globals["_BIGQUERYAUDITMETADATA_TABLEDEFINITION"]._serialized_end = 10194 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATUS"]._serialized_start = 10197 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATUS"]._serialized_end = 10357 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS"]._serialized_start = 10360 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS"]._serialized_end = 11298 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS_QUERY"]._serialized_start = 10938 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS_QUERY"]._serialized_end = 11153 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS_LOAD"]._serialized_start = 11155 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS_LOAD"]._serialized_end = 11189 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS_EXTRACT"]._serialized_start = 11191 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATS_EXTRACT"]._serialized_end = 11227 + _globals[ + "_BIGQUERYAUDITMETADATA_JOBSTATS_RESERVATIONRESOURCEUSAGE" + ]._serialized_start = 11229 + _globals[ + "_BIGQUERYAUDITMETADATA_JOBSTATS_RESERVATIONRESOURCEUSAGE" + ]._serialized_end = 11286 + _globals["_BIGQUERYAUDITMETADATA_TABLE"]._serialized_start = 11301 + _globals["_BIGQUERYAUDITMETADATA_TABLE"]._serialized_end = 11807 + _globals["_BIGQUERYAUDITMETADATA_MODEL"]._serialized_start = 11810 + _globals["_BIGQUERYAUDITMETADATA_MODEL"]._serialized_end = 12136 + _globals["_BIGQUERYAUDITMETADATA_ROUTINE"]._serialized_start = 12139 + _globals["_BIGQUERYAUDITMETADATA_ROUTINE"]._serialized_end = 12268 + _globals["_BIGQUERYAUDITMETADATA_ENTITYINFO"]._serialized_start = 12271 + _globals["_BIGQUERYAUDITMETADATA_ENTITYINFO"]._serialized_end = 12456 + _globals["_BIGQUERYAUDITMETADATA_ENTITYINFO_LABELSENTRY"]._serialized_start = 10010 + _globals["_BIGQUERYAUDITMETADATA_ENTITYINFO_LABELSENTRY"]._serialized_end = 10055 + _globals["_BIGQUERYAUDITMETADATA_TABLEVIEWDEFINITION"]._serialized_start = 12458 + _globals["_BIGQUERYAUDITMETADATA_TABLEVIEWDEFINITION"]._serialized_end = 12519 + _globals["_BIGQUERYAUDITMETADATA_DATASET"]._serialized_start = 12522 + _globals["_BIGQUERYAUDITMETADATA_DATASET"]._serialized_end = 12974 + _globals["_BIGQUERYAUDITMETADATA_BIGQUERYACL"]._serialized_start = 12976 + _globals["_BIGQUERYAUDITMETADATA_BIGQUERYACL"]._serialized_end = 13054 + _globals["_BIGQUERYAUDITMETADATA_ENCRYPTIONINFO"]._serialized_start = 13056 + _globals["_BIGQUERYAUDITMETADATA_ENCRYPTIONINFO"]._serialized_end = 13094 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICY"]._serialized_start = 13096 + _globals["_BIGQUERYAUDITMETADATA_ROWACCESSPOLICY"]._serialized_end = 13145 + _globals["_BIGQUERYAUDITMETADATA_FIRSTPARTYAPPMETADATA"]._serialized_start = 13147 + _globals["_BIGQUERYAUDITMETADATA_FIRSTPARTYAPPMETADATA"]._serialized_end = 13267 + _globals["_BIGQUERYAUDITMETADATA_SHEETSMETADATA"]._serialized_start = 13269 + _globals["_BIGQUERYAUDITMETADATA_SHEETSMETADATA"]._serialized_end = 13301 + _globals["_BIGQUERYAUDITMETADATA_CREATEDISPOSITION"]._serialized_start = 13303 + _globals["_BIGQUERYAUDITMETADATA_CREATEDISPOSITION"]._serialized_end = 13398 + _globals["_BIGQUERYAUDITMETADATA_WRITEDISPOSITION"]._serialized_start = 13400 + _globals["_BIGQUERYAUDITMETADATA_WRITEDISPOSITION"]._serialized_end = 13508 + _globals["_BIGQUERYAUDITMETADATA_OPERATIONTYPE"]._serialized_start = 13510 + _globals["_BIGQUERYAUDITMETADATA_OPERATIONTYPE"]._serialized_end = 13594 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATE"]._serialized_start = 13596 + _globals["_BIGQUERYAUDITMETADATA_JOBSTATE"]._serialized_end = 13669 + _globals["_BIGQUERYAUDITMETADATA_QUERYSTATEMENTTYPE"]._serialized_start = 13672 + _globals["_BIGQUERYAUDITMETADATA_QUERYSTATEMENTTYPE"]._serialized_end = 14433 +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-audit-log/noxfile.py b/packages/google-cloud-audit-log/noxfile.py new file mode 100644 index 000000000000..a9ceef47133c --- /dev/null +++ b/packages/google-cloud-audit-log/noxfile.py @@ -0,0 +1,460 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + + +DEFAULT_PYTHON_VERSION = "3.10" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-audit-log/renovate.json b/packages/google-cloud-audit-log/renovate.json new file mode 100644 index 000000000000..39b2a0ec9296 --- /dev/null +++ b/packages/google-cloud-audit-log/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-audit-log/scripts/decrypt-secrets.sh b/packages/google-cloud-audit-log/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..120b0ddc4364 --- /dev/null +++ b/packages/google-cloud-audit-log/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2024 Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-audit-log/setup.cfg b/packages/google-cloud-audit-log/setup.cfg new file mode 100644 index 000000000000..052350089505 --- /dev/null +++ b/packages/google-cloud-audit-log/setup.cfg @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/packages/google-cloud-audit-log/setup.py b/packages/google-cloud-audit-log/setup.py new file mode 100644 index 000000000000..91eff134b162 --- /dev/null +++ b/packages/google-cloud-audit-log/setup.py @@ -0,0 +1,66 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools +from setuptools import find_namespace_packages + +name = "google-cloud-audit-log" +description = "Google Cloud Audit Protos" +version = "0.3.0" +release_status = "Development Status :: 4 - Beta" +dependencies = [ + "protobuf>=3.20.2,<6.0.0dev,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "googleapis-common-protos >= 1.56.2, < 2.0dev", +] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + + +setuptools.setup( + name=name, + version=version, + author="Google LLC", + author_email="googleapis-packages@google.com", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: Implementation :: CPython", + ], + description=description, + long_description=readme, + long_description_content_type="text/markdown", + install_requires=dependencies, + license="Apache-2.0", + packages=find_namespace_packages(exclude=("tests*", "testing*")), + python_requires=">=3.7", + url="https://github.com/googleapis/python-audit-log", + include_package_data=True, +) diff --git a/packages/google-cloud-audit-log/testing/.gitignore b/packages/google-cloud-audit-log/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-audit-log/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-audit-log/testing/constraints-3.10.txt b/packages/google-cloud-audit-log/testing/constraints-3.10.txt new file mode 100644 index 000000000000..6d5e14bcf4a0 --- /dev/null +++ b/packages/google-cloud-audit-log/testing/constraints-3.10.txt @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-audit-log/testing/constraints-3.11.txt b/packages/google-cloud-audit-log/testing/constraints-3.11.txt new file mode 100644 index 000000000000..6d5e14bcf4a0 --- /dev/null +++ b/packages/google-cloud-audit-log/testing/constraints-3.11.txt @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-audit-log/testing/constraints-3.12.txt b/packages/google-cloud-audit-log/testing/constraints-3.12.txt new file mode 100644 index 000000000000..6d5e14bcf4a0 --- /dev/null +++ b/packages/google-cloud-audit-log/testing/constraints-3.12.txt @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-audit-log/testing/constraints-3.13.txt b/packages/google-cloud-audit-log/testing/constraints-3.13.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-audit-log/testing/constraints-3.7.txt b/packages/google-cloud-audit-log/testing/constraints-3.7.txt new file mode 100644 index 000000000000..815739a17b8a --- /dev/null +++ b/packages/google-cloud-audit-log/testing/constraints-3.7.txt @@ -0,0 +1,22 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +protobuf==3.20.2 +googleapis-common-protos==1.56.2 diff --git a/packages/google-cloud-audit-log/testing/constraints-3.8.txt b/packages/google-cloud-audit-log/testing/constraints-3.8.txt new file mode 100644 index 000000000000..6d5e14bcf4a0 --- /dev/null +++ b/packages/google-cloud-audit-log/testing/constraints-3.8.txt @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-audit-log/testing/constraints-3.9.txt b/packages/google-cloud-audit-log/testing/constraints-3.9.txt new file mode 100644 index 000000000000..6d5e14bcf4a0 --- /dev/null +++ b/packages/google-cloud-audit-log/testing/constraints-3.9.txt @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-audit-log/tests/unit/test_audit_log.py b/packages/google-cloud-audit-log/tests/unit/test_audit_log.py new file mode 100644 index 000000000000..9c680af4d500 --- /dev/null +++ b/packages/google-cloud-audit-log/tests/unit/test_audit_log.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud.audit import audit_log_pb2 + + +def test_create_audit_log(): + # just check that the import works + # and that an AuditLog instance can be instantiated + audit_log_pb2.AuditLog() diff --git a/packages/google-cloud-audit-log/tests/unit/test_packaging.py b/packages/google-cloud-audit-log/tests/unit/test_packaging.py new file mode 100644 index 000000000000..80b0aa0d7a61 --- /dev/null +++ b/packages/google-cloud-audit-log/tests/unit/test_packaging.py @@ -0,0 +1,39 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +# See https://docs.pytest.org/en/stable/how-to/tmp_path.html#the-tmp-path-fixture +# for more information on the `tmp_path` fixture of pytest +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-audit-log``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-audit-log`. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.cloud.othermod"] + subprocess.check_call(cmd, env=env) diff --git a/packages/google-cloud-automl/CHANGELOG.md b/packages/google-cloud-automl/CHANGELOG.md index 25873d41d1c2..a931c2c61a47 100644 --- a/packages/google-cloud-automl/CHANGELOG.md +++ b/packages/google-cloud-automl/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-automl/#history +## [2.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-automl-v2.15.0...google-cloud-automl-v2.16.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [2.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-automl-v2.14.1...google-cloud-automl-v2.15.0) (2024-12-12) diff --git a/packages/google-cloud-automl/README.rst b/packages/google-cloud-automl/README.rst index 396b29aa0149..a3c8bf944fbf 100644 --- a/packages/google-cloud-automl/README.rst +++ b/packages/google-cloud-automl/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Cloud AutoML.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Cloud AutoML.: https://cloud.google.com/automl/docs/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py index 60165d54bf8a..e154065d8da8 100644 --- a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py index 60165d54bf8a..e154065d8da8 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py index 0883ef0ea2e5..dbd972ee5916 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -587,6 +589,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/transports/rest.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/transports/rest.py index e94c4df2a09e..fc2ae0d3a8bb 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/transports/rest.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/transports/rest.py @@ -241,12 +241,35 @@ def post_create_dataset( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_create_dataset` interceptor runs + before the `post_create_dataset_with_metadata` interceptor. """ return response + def post_create_dataset_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_create_dataset_with_metadata` + interceptor in new development instead of the `post_create_dataset` interceptor. + When both interceptors are used, this `post_create_dataset_with_metadata` interceptor runs after the + `post_create_dataset` interceptor. The (possibly modified) response returned by + `post_create_dataset` will be passed to + `post_create_dataset_with_metadata`. + """ + return response, metadata + def pre_create_model( self, request: service.CreateModelRequest, @@ -264,12 +287,35 @@ def post_create_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_create_model` interceptor runs + before the `post_create_model_with_metadata` interceptor. """ return response + def post_create_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_create_model_with_metadata` + interceptor in new development instead of the `post_create_model` interceptor. + When both interceptors are used, this `post_create_model_with_metadata` interceptor runs after the + `post_create_model` interceptor. The (possibly modified) response returned by + `post_create_model` will be passed to + `post_create_model_with_metadata`. + """ + return response, metadata + def pre_delete_dataset( self, request: service.DeleteDatasetRequest, @@ -287,12 +333,35 @@ def post_delete_dataset( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_delete_dataset` interceptor runs + before the `post_delete_dataset_with_metadata` interceptor. """ return response + def post_delete_dataset_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_delete_dataset_with_metadata` + interceptor in new development instead of the `post_delete_dataset` interceptor. + When both interceptors are used, this `post_delete_dataset_with_metadata` interceptor runs after the + `post_delete_dataset` interceptor. The (possibly modified) response returned by + `post_delete_dataset` will be passed to + `post_delete_dataset_with_metadata`. + """ + return response, metadata + def pre_delete_model( self, request: service.DeleteModelRequest, @@ -310,12 +379,35 @@ def post_delete_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_delete_model` interceptor runs + before the `post_delete_model_with_metadata` interceptor. """ return response + def post_delete_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_delete_model_with_metadata` + interceptor in new development instead of the `post_delete_model` interceptor. + When both interceptors are used, this `post_delete_model_with_metadata` interceptor runs after the + `post_delete_model` interceptor. The (possibly modified) response returned by + `post_delete_model` will be passed to + `post_delete_model_with_metadata`. + """ + return response, metadata + def pre_deploy_model( self, request: service.DeployModelRequest, @@ -333,12 +425,35 @@ def post_deploy_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_deploy_model` interceptor runs + before the `post_deploy_model_with_metadata` interceptor. """ return response + def post_deploy_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_deploy_model_with_metadata` + interceptor in new development instead of the `post_deploy_model` interceptor. + When both interceptors are used, this `post_deploy_model_with_metadata` interceptor runs after the + `post_deploy_model` interceptor. The (possibly modified) response returned by + `post_deploy_model` will be passed to + `post_deploy_model_with_metadata`. + """ + return response, metadata + def pre_export_data( self, request: service.ExportDataRequest, @@ -356,12 +471,35 @@ def post_export_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_export_data` interceptor runs + before the `post_export_data_with_metadata` interceptor. """ return response + def post_export_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_export_data_with_metadata` + interceptor in new development instead of the `post_export_data` interceptor. + When both interceptors are used, this `post_export_data_with_metadata` interceptor runs after the + `post_export_data` interceptor. The (possibly modified) response returned by + `post_export_data` will be passed to + `post_export_data_with_metadata`. + """ + return response, metadata + def pre_export_model( self, request: service.ExportModelRequest, @@ -379,12 +517,35 @@ def post_export_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_export_model` interceptor runs + before the `post_export_model_with_metadata` interceptor. """ return response + def post_export_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_export_model_with_metadata` + interceptor in new development instead of the `post_export_model` interceptor. + When both interceptors are used, this `post_export_model_with_metadata` interceptor runs after the + `post_export_model` interceptor. The (possibly modified) response returned by + `post_export_model` will be passed to + `post_export_model_with_metadata`. + """ + return response, metadata + def pre_get_annotation_spec( self, request: service.GetAnnotationSpecRequest, @@ -404,12 +565,35 @@ def post_get_annotation_spec( ) -> annotation_spec.AnnotationSpec: """Post-rpc interceptor for get_annotation_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_annotation_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_annotation_spec` interceptor runs + before the `post_get_annotation_spec_with_metadata` interceptor. """ return response + def post_get_annotation_spec_with_metadata( + self, + response: annotation_spec.AnnotationSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[annotation_spec.AnnotationSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_annotation_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_annotation_spec_with_metadata` + interceptor in new development instead of the `post_get_annotation_spec` interceptor. + When both interceptors are used, this `post_get_annotation_spec_with_metadata` interceptor runs after the + `post_get_annotation_spec` interceptor. The (possibly modified) response returned by + `post_get_annotation_spec` will be passed to + `post_get_annotation_spec_with_metadata`. + """ + return response, metadata + def pre_get_dataset( self, request: service.GetDatasetRequest, @@ -425,12 +609,35 @@ def pre_get_dataset( def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: """Post-rpc interceptor for get_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_dataset` interceptor runs + before the `post_get_dataset_with_metadata` interceptor. """ return response + def post_get_dataset_with_metadata( + self, + response: dataset.Dataset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_dataset_with_metadata` + interceptor in new development instead of the `post_get_dataset` interceptor. + When both interceptors are used, this `post_get_dataset_with_metadata` interceptor runs after the + `post_get_dataset` interceptor. The (possibly modified) response returned by + `post_get_dataset` will be passed to + `post_get_dataset_with_metadata`. + """ + return response, metadata + def pre_get_model( self, request: service.GetModelRequest, @@ -446,12 +653,33 @@ def pre_get_model( def post_get_model(self, response: model.Model) -> model.Model: """Post-rpc interceptor for get_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. """ return response + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + def pre_get_model_evaluation( self, request: service.GetModelEvaluationRequest, @@ -471,12 +699,37 @@ def post_get_model_evaluation( ) -> model_evaluation.ModelEvaluation: """Post-rpc interceptor for get_model_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_model_evaluation` interceptor runs + before the `post_get_model_evaluation_with_metadata` interceptor. """ return response + def post_get_model_evaluation_with_metadata( + self, + response: model_evaluation.ModelEvaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_evaluation.ModelEvaluation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_model_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_model_evaluation_with_metadata` + interceptor in new development instead of the `post_get_model_evaluation` interceptor. + When both interceptors are used, this `post_get_model_evaluation_with_metadata` interceptor runs after the + `post_get_model_evaluation` interceptor. The (possibly modified) response returned by + `post_get_model_evaluation` will be passed to + `post_get_model_evaluation_with_metadata`. + """ + return response, metadata + def pre_import_data( self, request: service.ImportDataRequest, @@ -494,12 +747,35 @@ def post_import_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_import_data` interceptor runs + before the `post_import_data_with_metadata` interceptor. """ return response + def post_import_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_import_data_with_metadata` + interceptor in new development instead of the `post_import_data` interceptor. + When both interceptors are used, this `post_import_data_with_metadata` interceptor runs after the + `post_import_data` interceptor. The (possibly modified) response returned by + `post_import_data` will be passed to + `post_import_data_with_metadata`. + """ + return response, metadata + def pre_list_datasets( self, request: service.ListDatasetsRequest, @@ -517,12 +793,35 @@ def post_list_datasets( ) -> service.ListDatasetsResponse: """Post-rpc interceptor for list_datasets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_datasets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_datasets` interceptor runs + before the `post_list_datasets_with_metadata` interceptor. """ return response + def post_list_datasets_with_metadata( + self, + response: service.ListDatasetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListDatasetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_datasets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_datasets_with_metadata` + interceptor in new development instead of the `post_list_datasets` interceptor. + When both interceptors are used, this `post_list_datasets_with_metadata` interceptor runs after the + `post_list_datasets` interceptor. The (possibly modified) response returned by + `post_list_datasets` will be passed to + `post_list_datasets_with_metadata`. + """ + return response, metadata + def pre_list_model_evaluations( self, request: service.ListModelEvaluationsRequest, @@ -542,12 +841,37 @@ def post_list_model_evaluations( ) -> service.ListModelEvaluationsResponse: """Post-rpc interceptor for list_model_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_model_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_model_evaluations` interceptor runs + before the `post_list_model_evaluations_with_metadata` interceptor. """ return response + def post_list_model_evaluations_with_metadata( + self, + response: service.ListModelEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListModelEvaluationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_model_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_model_evaluations_with_metadata` + interceptor in new development instead of the `post_list_model_evaluations` interceptor. + When both interceptors are used, this `post_list_model_evaluations_with_metadata` interceptor runs after the + `post_list_model_evaluations` interceptor. The (possibly modified) response returned by + `post_list_model_evaluations` will be passed to + `post_list_model_evaluations_with_metadata`. + """ + return response, metadata + def pre_list_models( self, request: service.ListModelsRequest, @@ -565,12 +889,35 @@ def post_list_models( ) -> service.ListModelsResponse: """Post-rpc interceptor for list_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. """ return response + def post_list_models_with_metadata( + self, + response: service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + def pre_undeploy_model( self, request: service.UndeployModelRequest, @@ -588,12 +935,35 @@ def post_undeploy_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for undeploy_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_model` interceptor runs + before the `post_undeploy_model_with_metadata` interceptor. """ return response + def post_undeploy_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_undeploy_model_with_metadata` + interceptor in new development instead of the `post_undeploy_model` interceptor. + When both interceptors are used, this `post_undeploy_model_with_metadata` interceptor runs after the + `post_undeploy_model` interceptor. The (possibly modified) response returned by + `post_undeploy_model` will be passed to + `post_undeploy_model_with_metadata`. + """ + return response, metadata + def pre_update_dataset( self, request: service.UpdateDatasetRequest, @@ -609,12 +979,35 @@ def pre_update_dataset( def post_update_dataset(self, response: gca_dataset.Dataset) -> gca_dataset.Dataset: """Post-rpc interceptor for update_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_update_dataset` interceptor runs + before the `post_update_dataset_with_metadata` interceptor. """ return response + def post_update_dataset_with_metadata( + self, + response: gca_dataset.Dataset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gca_dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_update_dataset_with_metadata` + interceptor in new development instead of the `post_update_dataset` interceptor. + When both interceptors are used, this `post_update_dataset_with_metadata` interceptor runs after the + `post_update_dataset` interceptor. The (possibly modified) response returned by + `post_update_dataset` will be passed to + `post_update_dataset_with_metadata`. + """ + return response, metadata + def pre_update_model( self, request: service.UpdateModelRequest, @@ -630,12 +1023,35 @@ def pre_update_model( def post_update_model(self, response: gca_model.Model) -> gca_model.Model: """Post-rpc interceptor for update_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_update_model` interceptor runs + before the `post_update_model_with_metadata` interceptor. """ return response + def post_update_model_with_metadata( + self, + response: gca_model.Model, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gca_model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_update_model_with_metadata` + interceptor in new development instead of the `post_update_model` interceptor. + When both interceptors are used, this `post_update_model_with_metadata` interceptor runs after the + `post_update_model` interceptor. The (possibly modified) response returned by + `post_update_model` will be passed to + `post_update_model_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AutoMlRestStub: @@ -925,6 +1341,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1073,6 +1493,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1217,6 +1641,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1359,6 +1787,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1507,6 +1939,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1655,6 +2091,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1805,6 +2245,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1952,6 +2396,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_annotation_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_annotation_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2097,6 +2545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2240,6 +2692,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2385,6 +2841,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2535,6 +2995,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2680,6 +3144,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_datasets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_datasets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2825,6 +3293,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_model_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_model_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2970,6 +3442,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3120,6 +3596,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undeploy_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3273,6 +3753,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3422,6 +3906,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py index 0ca9e7b01946..cf45f7ea3c5f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -496,6 +498,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/transports/rest.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/transports/rest.py index 5b8981acfe35..0f19943d7c45 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/transports/rest.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/transports/rest.py @@ -110,12 +110,35 @@ def post_batch_predict( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_predict - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_predict_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PredictionService server but before - it is returned to user code. + it is returned to user code. This `post_batch_predict` interceptor runs + before the `post_batch_predict_with_metadata` interceptor. """ return response + def post_batch_predict_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_predict + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PredictionService server but before it is returned to user code. + + We recommend only using this `post_batch_predict_with_metadata` + interceptor in new development instead of the `post_batch_predict` interceptor. + When both interceptors are used, this `post_batch_predict_with_metadata` interceptor runs after the + `post_batch_predict` interceptor. The (possibly modified) response returned by + `post_batch_predict` will be passed to + `post_batch_predict_with_metadata`. + """ + return response, metadata + def pre_predict( self, request: prediction_service.PredictRequest, @@ -135,12 +158,37 @@ def post_predict( ) -> prediction_service.PredictResponse: """Post-rpc interceptor for predict - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_predict_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PredictionService server but before - it is returned to user code. + it is returned to user code. This `post_predict` interceptor runs + before the `post_predict_with_metadata` interceptor. """ return response + def post_predict_with_metadata( + self, + response: prediction_service.PredictResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + prediction_service.PredictResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for predict + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PredictionService server but before it is returned to user code. + + We recommend only using this `post_predict_with_metadata` + interceptor in new development instead of the `post_predict` interceptor. + When both interceptors are used, this `post_predict_with_metadata` interceptor runs after the + `post_predict` interceptor. The (possibly modified) response returned by + `post_predict` will be passed to + `post_predict_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PredictionServiceRestStub: @@ -417,6 +465,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_predict(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_predict_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -570,6 +622,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_predict(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_predict_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py index 60165d54bf8a..e154065d8da8 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py index 779ea8ffc620..e4718068c738 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -646,6 +648,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py index 5724fc0eed1b..64c20f2211d1 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py @@ -289,12 +289,35 @@ def pre_create_dataset( def post_create_dataset(self, response: gca_dataset.Dataset) -> gca_dataset.Dataset: """Post-rpc interceptor for create_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_create_dataset` interceptor runs + before the `post_create_dataset_with_metadata` interceptor. """ return response + def post_create_dataset_with_metadata( + self, + response: gca_dataset.Dataset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gca_dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_create_dataset_with_metadata` + interceptor in new development instead of the `post_create_dataset` interceptor. + When both interceptors are used, this `post_create_dataset_with_metadata` interceptor runs after the + `post_create_dataset` interceptor. The (possibly modified) response returned by + `post_create_dataset` will be passed to + `post_create_dataset_with_metadata`. + """ + return response, metadata + def pre_create_model( self, request: service.CreateModelRequest, @@ -312,12 +335,35 @@ def post_create_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_create_model` interceptor runs + before the `post_create_model_with_metadata` interceptor. """ return response + def post_create_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_create_model_with_metadata` + interceptor in new development instead of the `post_create_model` interceptor. + When both interceptors are used, this `post_create_model_with_metadata` interceptor runs after the + `post_create_model` interceptor. The (possibly modified) response returned by + `post_create_model` will be passed to + `post_create_model_with_metadata`. + """ + return response, metadata + def pre_delete_dataset( self, request: service.DeleteDatasetRequest, @@ -335,12 +381,35 @@ def post_delete_dataset( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_delete_dataset` interceptor runs + before the `post_delete_dataset_with_metadata` interceptor. """ return response + def post_delete_dataset_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_delete_dataset_with_metadata` + interceptor in new development instead of the `post_delete_dataset` interceptor. + When both interceptors are used, this `post_delete_dataset_with_metadata` interceptor runs after the + `post_delete_dataset` interceptor. The (possibly modified) response returned by + `post_delete_dataset` will be passed to + `post_delete_dataset_with_metadata`. + """ + return response, metadata + def pre_delete_model( self, request: service.DeleteModelRequest, @@ -358,12 +427,35 @@ def post_delete_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_delete_model` interceptor runs + before the `post_delete_model_with_metadata` interceptor. """ return response + def post_delete_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_delete_model_with_metadata` + interceptor in new development instead of the `post_delete_model` interceptor. + When both interceptors are used, this `post_delete_model_with_metadata` interceptor runs after the + `post_delete_model` interceptor. The (possibly modified) response returned by + `post_delete_model` will be passed to + `post_delete_model_with_metadata`. + """ + return response, metadata + def pre_deploy_model( self, request: service.DeployModelRequest, @@ -381,12 +473,35 @@ def post_deploy_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_deploy_model` interceptor runs + before the `post_deploy_model_with_metadata` interceptor. """ return response + def post_deploy_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_deploy_model_with_metadata` + interceptor in new development instead of the `post_deploy_model` interceptor. + When both interceptors are used, this `post_deploy_model_with_metadata` interceptor runs after the + `post_deploy_model` interceptor. The (possibly modified) response returned by + `post_deploy_model` will be passed to + `post_deploy_model_with_metadata`. + """ + return response, metadata + def pre_export_data( self, request: service.ExportDataRequest, @@ -404,12 +519,35 @@ def post_export_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_export_data` interceptor runs + before the `post_export_data_with_metadata` interceptor. """ return response + def post_export_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_export_data_with_metadata` + interceptor in new development instead of the `post_export_data` interceptor. + When both interceptors are used, this `post_export_data_with_metadata` interceptor runs after the + `post_export_data` interceptor. The (possibly modified) response returned by + `post_export_data` will be passed to + `post_export_data_with_metadata`. + """ + return response, metadata + def pre_export_evaluated_examples( self, request: service.ExportEvaluatedExamplesRequest, @@ -429,12 +567,35 @@ def post_export_evaluated_examples( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_evaluated_examples - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_evaluated_examples_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_export_evaluated_examples` interceptor runs + before the `post_export_evaluated_examples_with_metadata` interceptor. """ return response + def post_export_evaluated_examples_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_evaluated_examples + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_export_evaluated_examples_with_metadata` + interceptor in new development instead of the `post_export_evaluated_examples` interceptor. + When both interceptors are used, this `post_export_evaluated_examples_with_metadata` interceptor runs after the + `post_export_evaluated_examples` interceptor. The (possibly modified) response returned by + `post_export_evaluated_examples` will be passed to + `post_export_evaluated_examples_with_metadata`. + """ + return response, metadata + def pre_export_model( self, request: service.ExportModelRequest, @@ -452,12 +613,35 @@ def post_export_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_export_model` interceptor runs + before the `post_export_model_with_metadata` interceptor. """ return response + def post_export_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_export_model_with_metadata` + interceptor in new development instead of the `post_export_model` interceptor. + When both interceptors are used, this `post_export_model_with_metadata` interceptor runs after the + `post_export_model` interceptor. The (possibly modified) response returned by + `post_export_model` will be passed to + `post_export_model_with_metadata`. + """ + return response, metadata + def pre_get_annotation_spec( self, request: service.GetAnnotationSpecRequest, @@ -477,12 +661,35 @@ def post_get_annotation_spec( ) -> annotation_spec.AnnotationSpec: """Post-rpc interceptor for get_annotation_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_annotation_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_annotation_spec` interceptor runs + before the `post_get_annotation_spec_with_metadata` interceptor. """ return response + def post_get_annotation_spec_with_metadata( + self, + response: annotation_spec.AnnotationSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[annotation_spec.AnnotationSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_annotation_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_annotation_spec_with_metadata` + interceptor in new development instead of the `post_get_annotation_spec` interceptor. + When both interceptors are used, this `post_get_annotation_spec_with_metadata` interceptor runs after the + `post_get_annotation_spec` interceptor. The (possibly modified) response returned by + `post_get_annotation_spec` will be passed to + `post_get_annotation_spec_with_metadata`. + """ + return response, metadata + def pre_get_column_spec( self, request: service.GetColumnSpecRequest, @@ -500,12 +707,35 @@ def post_get_column_spec( ) -> column_spec.ColumnSpec: """Post-rpc interceptor for get_column_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_column_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_column_spec` interceptor runs + before the `post_get_column_spec_with_metadata` interceptor. """ return response + def post_get_column_spec_with_metadata( + self, + response: column_spec.ColumnSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[column_spec.ColumnSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_column_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_column_spec_with_metadata` + interceptor in new development instead of the `post_get_column_spec` interceptor. + When both interceptors are used, this `post_get_column_spec_with_metadata` interceptor runs after the + `post_get_column_spec` interceptor. The (possibly modified) response returned by + `post_get_column_spec` will be passed to + `post_get_column_spec_with_metadata`. + """ + return response, metadata + def pre_get_dataset( self, request: service.GetDatasetRequest, @@ -521,12 +751,35 @@ def pre_get_dataset( def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: """Post-rpc interceptor for get_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_dataset` interceptor runs + before the `post_get_dataset_with_metadata` interceptor. """ return response + def post_get_dataset_with_metadata( + self, + response: dataset.Dataset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_dataset_with_metadata` + interceptor in new development instead of the `post_get_dataset` interceptor. + When both interceptors are used, this `post_get_dataset_with_metadata` interceptor runs after the + `post_get_dataset` interceptor. The (possibly modified) response returned by + `post_get_dataset` will be passed to + `post_get_dataset_with_metadata`. + """ + return response, metadata + def pre_get_model( self, request: service.GetModelRequest, @@ -542,12 +795,33 @@ def pre_get_model( def post_get_model(self, response: model.Model) -> model.Model: """Post-rpc interceptor for get_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_model` interceptor runs + before the `post_get_model_with_metadata` interceptor. """ return response + def post_get_model_with_metadata( + self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_model_with_metadata` + interceptor in new development instead of the `post_get_model` interceptor. + When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the + `post_get_model` interceptor. The (possibly modified) response returned by + `post_get_model` will be passed to + `post_get_model_with_metadata`. + """ + return response, metadata + def pre_get_model_evaluation( self, request: service.GetModelEvaluationRequest, @@ -567,12 +841,37 @@ def post_get_model_evaluation( ) -> model_evaluation.ModelEvaluation: """Post-rpc interceptor for get_model_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_model_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_model_evaluation` interceptor runs + before the `post_get_model_evaluation_with_metadata` interceptor. """ return response + def post_get_model_evaluation_with_metadata( + self, + response: model_evaluation.ModelEvaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + model_evaluation.ModelEvaluation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_model_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_model_evaluation_with_metadata` + interceptor in new development instead of the `post_get_model_evaluation` interceptor. + When both interceptors are used, this `post_get_model_evaluation_with_metadata` interceptor runs after the + `post_get_model_evaluation` interceptor. The (possibly modified) response returned by + `post_get_model_evaluation` will be passed to + `post_get_model_evaluation_with_metadata`. + """ + return response, metadata + def pre_get_table_spec( self, request: service.GetTableSpecRequest, @@ -590,12 +889,35 @@ def post_get_table_spec( ) -> table_spec.TableSpec: """Post-rpc interceptor for get_table_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_table_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_get_table_spec` interceptor runs + before the `post_get_table_spec_with_metadata` interceptor. """ return response + def post_get_table_spec_with_metadata( + self, + response: table_spec.TableSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[table_spec.TableSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_table_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_get_table_spec_with_metadata` + interceptor in new development instead of the `post_get_table_spec` interceptor. + When both interceptors are used, this `post_get_table_spec_with_metadata` interceptor runs after the + `post_get_table_spec` interceptor. The (possibly modified) response returned by + `post_get_table_spec` will be passed to + `post_get_table_spec_with_metadata`. + """ + return response, metadata + def pre_import_data( self, request: service.ImportDataRequest, @@ -613,12 +935,35 @@ def post_import_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_import_data` interceptor runs + before the `post_import_data_with_metadata` interceptor. """ return response + def post_import_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_import_data_with_metadata` + interceptor in new development instead of the `post_import_data` interceptor. + When both interceptors are used, this `post_import_data_with_metadata` interceptor runs after the + `post_import_data` interceptor. The (possibly modified) response returned by + `post_import_data` will be passed to + `post_import_data_with_metadata`. + """ + return response, metadata + def pre_list_column_specs( self, request: service.ListColumnSpecsRequest, @@ -636,12 +981,37 @@ def post_list_column_specs( ) -> service.ListColumnSpecsResponse: """Post-rpc interceptor for list_column_specs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_column_specs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_column_specs` interceptor runs + before the `post_list_column_specs_with_metadata` interceptor. """ return response + def post_list_column_specs_with_metadata( + self, + response: service.ListColumnSpecsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListColumnSpecsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_column_specs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_column_specs_with_metadata` + interceptor in new development instead of the `post_list_column_specs` interceptor. + When both interceptors are used, this `post_list_column_specs_with_metadata` interceptor runs after the + `post_list_column_specs` interceptor. The (possibly modified) response returned by + `post_list_column_specs` will be passed to + `post_list_column_specs_with_metadata`. + """ + return response, metadata + def pre_list_datasets( self, request: service.ListDatasetsRequest, @@ -659,12 +1029,35 @@ def post_list_datasets( ) -> service.ListDatasetsResponse: """Post-rpc interceptor for list_datasets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_datasets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_datasets` interceptor runs + before the `post_list_datasets_with_metadata` interceptor. """ return response + def post_list_datasets_with_metadata( + self, + response: service.ListDatasetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListDatasetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_datasets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_datasets_with_metadata` + interceptor in new development instead of the `post_list_datasets` interceptor. + When both interceptors are used, this `post_list_datasets_with_metadata` interceptor runs after the + `post_list_datasets` interceptor. The (possibly modified) response returned by + `post_list_datasets` will be passed to + `post_list_datasets_with_metadata`. + """ + return response, metadata + def pre_list_model_evaluations( self, request: service.ListModelEvaluationsRequest, @@ -684,12 +1077,37 @@ def post_list_model_evaluations( ) -> service.ListModelEvaluationsResponse: """Post-rpc interceptor for list_model_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_model_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_model_evaluations` interceptor runs + before the `post_list_model_evaluations_with_metadata` interceptor. """ return response + def post_list_model_evaluations_with_metadata( + self, + response: service.ListModelEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListModelEvaluationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_model_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_model_evaluations_with_metadata` + interceptor in new development instead of the `post_list_model_evaluations` interceptor. + When both interceptors are used, this `post_list_model_evaluations_with_metadata` interceptor runs after the + `post_list_model_evaluations` interceptor. The (possibly modified) response returned by + `post_list_model_evaluations` will be passed to + `post_list_model_evaluations_with_metadata`. + """ + return response, metadata + def pre_list_models( self, request: service.ListModelsRequest, @@ -707,12 +1125,35 @@ def post_list_models( ) -> service.ListModelsResponse: """Post-rpc interceptor for list_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_models` interceptor runs + before the `post_list_models_with_metadata` interceptor. """ return response + def post_list_models_with_metadata( + self, + response: service.ListModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_models_with_metadata` + interceptor in new development instead of the `post_list_models` interceptor. + When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the + `post_list_models` interceptor. The (possibly modified) response returned by + `post_list_models` will be passed to + `post_list_models_with_metadata`. + """ + return response, metadata + def pre_list_table_specs( self, request: service.ListTableSpecsRequest, @@ -730,12 +1171,35 @@ def post_list_table_specs( ) -> service.ListTableSpecsResponse: """Post-rpc interceptor for list_table_specs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_table_specs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_list_table_specs` interceptor runs + before the `post_list_table_specs_with_metadata` interceptor. """ return response + def post_list_table_specs_with_metadata( + self, + response: service.ListTableSpecsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListTableSpecsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_table_specs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_list_table_specs_with_metadata` + interceptor in new development instead of the `post_list_table_specs` interceptor. + When both interceptors are used, this `post_list_table_specs_with_metadata` interceptor runs after the + `post_list_table_specs` interceptor. The (possibly modified) response returned by + `post_list_table_specs` will be passed to + `post_list_table_specs_with_metadata`. + """ + return response, metadata + def pre_undeploy_model( self, request: service.UndeployModelRequest, @@ -753,12 +1217,35 @@ def post_undeploy_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for undeploy_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_model` interceptor runs + before the `post_undeploy_model_with_metadata` interceptor. """ return response + def post_undeploy_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_undeploy_model_with_metadata` + interceptor in new development instead of the `post_undeploy_model` interceptor. + When both interceptors are used, this `post_undeploy_model_with_metadata` interceptor runs after the + `post_undeploy_model` interceptor. The (possibly modified) response returned by + `post_undeploy_model` will be passed to + `post_undeploy_model_with_metadata`. + """ + return response, metadata + def pre_update_column_spec( self, request: service.UpdateColumnSpecRequest, @@ -778,12 +1265,35 @@ def post_update_column_spec( ) -> gca_column_spec.ColumnSpec: """Post-rpc interceptor for update_column_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_column_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_update_column_spec` interceptor runs + before the `post_update_column_spec_with_metadata` interceptor. """ return response + def post_update_column_spec_with_metadata( + self, + response: gca_column_spec.ColumnSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gca_column_spec.ColumnSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_column_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_update_column_spec_with_metadata` + interceptor in new development instead of the `post_update_column_spec` interceptor. + When both interceptors are used, this `post_update_column_spec_with_metadata` interceptor runs after the + `post_update_column_spec` interceptor. The (possibly modified) response returned by + `post_update_column_spec` will be passed to + `post_update_column_spec_with_metadata`. + """ + return response, metadata + def pre_update_dataset( self, request: service.UpdateDatasetRequest, @@ -799,12 +1309,35 @@ def pre_update_dataset( def post_update_dataset(self, response: gca_dataset.Dataset) -> gca_dataset.Dataset: """Post-rpc interceptor for update_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_update_dataset` interceptor runs + before the `post_update_dataset_with_metadata` interceptor. """ return response + def post_update_dataset_with_metadata( + self, + response: gca_dataset.Dataset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gca_dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_update_dataset_with_metadata` + interceptor in new development instead of the `post_update_dataset` interceptor. + When both interceptors are used, this `post_update_dataset_with_metadata` interceptor runs after the + `post_update_dataset` interceptor. The (possibly modified) response returned by + `post_update_dataset` will be passed to + `post_update_dataset_with_metadata`. + """ + return response, metadata + def pre_update_table_spec( self, request: service.UpdateTableSpecRequest, @@ -822,12 +1355,35 @@ def post_update_table_spec( ) -> gca_table_spec.TableSpec: """Post-rpc interceptor for update_table_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_table_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoMl server but before - it is returned to user code. + it is returned to user code. This `post_update_table_spec` interceptor runs + before the `post_update_table_spec_with_metadata` interceptor. """ return response + def post_update_table_spec_with_metadata( + self, + response: gca_table_spec.TableSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gca_table_spec.TableSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_table_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoMl server but before it is returned to user code. + + We recommend only using this `post_update_table_spec_with_metadata` + interceptor in new development instead of the `post_update_table_spec` interceptor. + When both interceptors are used, this `post_update_table_spec_with_metadata` interceptor runs after the + `post_update_table_spec` interceptor. The (possibly modified) response returned by + `post_update_table_spec` will be passed to + `post_update_table_spec_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AutoMlRestStub: @@ -1120,6 +1676,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1268,6 +1828,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1412,6 +1976,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1554,6 +2122,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1702,6 +2274,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1850,6 +2426,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2000,6 +2580,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_evaluated_examples(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_evaluated_examples_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2150,6 +2734,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2297,6 +2885,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_annotation_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_annotation_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2445,6 +3037,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_column_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_column_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2590,6 +3186,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2733,6 +3333,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2878,6 +3482,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_model_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_model_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3034,6 +3642,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_table_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_table_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3182,6 +3794,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3331,6 +3947,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_column_specs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_column_specs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3476,6 +4096,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_datasets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_datasets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3621,6 +4245,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_model_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_model_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3766,6 +4394,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3913,6 +4545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_table_specs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_table_specs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4063,6 +4699,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undeploy_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4223,6 +4863,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_column_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_column_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4376,6 +5020,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4540,6 +5188,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_table_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_table_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py index ae7e7884258f..57c510136c4c 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -496,6 +498,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py index 342dd9cbcfa6..0bd7ff1de9e9 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py @@ -110,12 +110,35 @@ def post_batch_predict( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_predict - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_predict_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PredictionService server but before - it is returned to user code. + it is returned to user code. This `post_batch_predict` interceptor runs + before the `post_batch_predict_with_metadata` interceptor. """ return response + def post_batch_predict_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_predict + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PredictionService server but before it is returned to user code. + + We recommend only using this `post_batch_predict_with_metadata` + interceptor in new development instead of the `post_batch_predict` interceptor. + When both interceptors are used, this `post_batch_predict_with_metadata` interceptor runs after the + `post_batch_predict` interceptor. The (possibly modified) response returned by + `post_batch_predict` will be passed to + `post_batch_predict_with_metadata`. + """ + return response, metadata + def pre_predict( self, request: prediction_service.PredictRequest, @@ -135,12 +158,37 @@ def post_predict( ) -> prediction_service.PredictResponse: """Post-rpc interceptor for predict - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_predict_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PredictionService server but before - it is returned to user code. + it is returned to user code. This `post_predict` interceptor runs + before the `post_predict_with_metadata` interceptor. """ return response + def post_predict_with_metadata( + self, + response: prediction_service.PredictResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + prediction_service.PredictResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for predict + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PredictionService server but before it is returned to user code. + + We recommend only using this `post_predict_with_metadata` + interceptor in new development instead of the `post_predict` interceptor. + When both interceptors are used, this `post_predict_with_metadata` interceptor runs after the + `post_predict` interceptor. The (possibly modified) response returned by + `post_predict` will be passed to + `post_predict_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PredictionServiceRestStub: @@ -417,6 +465,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_predict(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_predict_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -570,6 +622,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_predict(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_predict_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-automl/noxfile.py b/packages/google-cloud-automl/noxfile.py index 0c1bfbb552b1..a24c0f907c6a 100644 --- a/packages/google-cloud-automl/noxfile.py +++ b/packages/google-cloud-automl/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json index 34d77771f1fe..87eb6ca88463 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.15.0" + "version": "2.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json index 7551b7fb524e..65deff55ce68 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.15.0" + "version": "2.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py index bed20847bf91..a353c0885cbc 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_auto_ml.py @@ -87,6 +87,13 @@ from google.cloud.automl_v1.types import model from google.cloud.automl_v1.types import model as gca_model +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -309,6 +316,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutoMlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutoMlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12544,10 +12594,13 @@ def test_create_dataset_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_create_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_create_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_create_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateDatasetRequest.pb(service.CreateDatasetRequest()) transcode.return_value = { "method": "post", @@ -12569,6 +12622,7 @@ def test_create_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_dataset( request, @@ -12580,6 +12634,7 @@ def test_create_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_dataset_rest_bad_request(request_type=service.GetDatasetRequest): @@ -12668,10 +12723,13 @@ def test_get_dataset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetDatasetRequest.pb(service.GetDatasetRequest()) transcode.return_value = { "method": "post", @@ -12693,6 +12751,7 @@ def test_get_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataset.Dataset() + post_with_metadata.return_value = dataset.Dataset(), metadata client.get_dataset( request, @@ -12704,6 +12763,7 @@ def test_get_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_datasets_rest_bad_request(request_type=service.ListDatasetsRequest): @@ -12784,10 +12844,13 @@ def test_list_datasets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_datasets" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_datasets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_datasets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListDatasetsRequest.pb(service.ListDatasetsRequest()) transcode.return_value = { "method": "post", @@ -12811,6 +12874,7 @@ def test_list_datasets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListDatasetsResponse() + post_with_metadata.return_value = service.ListDatasetsResponse(), metadata client.list_datasets( request, @@ -12822,6 +12886,7 @@ def test_list_datasets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_dataset_rest_bad_request(request_type=service.UpdateDatasetRequest): @@ -12999,10 +13064,13 @@ def test_update_dataset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_update_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_update_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_update_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateDatasetRequest.pb(service.UpdateDatasetRequest()) transcode.return_value = { "method": "post", @@ -13024,6 +13092,7 @@ def test_update_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gca_dataset.Dataset() + post_with_metadata.return_value = gca_dataset.Dataset(), metadata client.update_dataset( request, @@ -13035,6 +13104,7 @@ def test_update_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_dataset_rest_bad_request(request_type=service.DeleteDatasetRequest): @@ -13111,10 +13181,13 @@ def test_delete_dataset_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_delete_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_delete_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_delete_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteDatasetRequest.pb(service.DeleteDatasetRequest()) transcode.return_value = { "method": "post", @@ -13136,6 +13209,7 @@ def test_delete_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_dataset( request, @@ -13147,6 +13221,7 @@ def test_delete_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_data_rest_bad_request(request_type=service.ImportDataRequest): @@ -13223,10 +13298,13 @@ def test_import_data_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_import_data" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_import_data_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_import_data" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ImportDataRequest.pb(service.ImportDataRequest()) transcode.return_value = { "method": "post", @@ -13248,6 +13326,7 @@ def test_import_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_data( request, @@ -13259,6 +13338,7 @@ def test_import_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_data_rest_bad_request(request_type=service.ExportDataRequest): @@ -13335,10 +13415,13 @@ def test_export_data_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_export_data" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_export_data_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_export_data" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExportDataRequest.pb(service.ExportDataRequest()) transcode.return_value = { "method": "post", @@ -13360,6 +13443,7 @@ def test_export_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_data( request, @@ -13371,6 +13455,7 @@ def test_export_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_annotation_spec_rest_bad_request( @@ -13461,10 +13546,13 @@ def test_get_annotation_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_annotation_spec" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_annotation_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_annotation_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetAnnotationSpecRequest.pb( service.GetAnnotationSpecRequest() ) @@ -13490,6 +13578,7 @@ def test_get_annotation_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = annotation_spec.AnnotationSpec() + post_with_metadata.return_value = annotation_spec.AnnotationSpec(), metadata client.get_annotation_spec( request, @@ -13501,6 +13590,7 @@ def test_get_annotation_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_model_rest_bad_request(request_type=service.CreateModelRequest): @@ -13679,10 +13769,13 @@ def test_create_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_create_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_create_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_create_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateModelRequest.pb(service.CreateModelRequest()) transcode.return_value = { "method": "post", @@ -13704,6 +13797,7 @@ def test_create_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_model( request, @@ -13715,6 +13809,7 @@ def test_create_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_model_rest_bad_request(request_type=service.GetModelRequest): @@ -13803,10 +13898,13 @@ def test_get_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetModelRequest.pb(service.GetModelRequest()) transcode.return_value = { "method": "post", @@ -13828,6 +13926,7 @@ def test_get_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata client.get_model( request, @@ -13839,6 +13938,7 @@ def test_get_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_models_rest_bad_request(request_type=service.ListModelsRequest): @@ -13919,10 +14019,13 @@ def test_list_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_models" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListModelsRequest.pb(service.ListModelsRequest()) transcode.return_value = { "method": "post", @@ -13944,6 +14047,7 @@ def test_list_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListModelsResponse() + post_with_metadata.return_value = service.ListModelsResponse(), metadata client.list_models( request, @@ -13955,6 +14059,7 @@ def test_list_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_model_rest_bad_request(request_type=service.DeleteModelRequest): @@ -14031,10 +14136,13 @@ def test_delete_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_delete_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_delete_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_delete_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteModelRequest.pb(service.DeleteModelRequest()) transcode.return_value = { "method": "post", @@ -14056,6 +14164,7 @@ def test_delete_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_model( request, @@ -14067,6 +14176,7 @@ def test_delete_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_model_rest_bad_request(request_type=service.UpdateModelRequest): @@ -14261,10 +14371,13 @@ def test_update_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_update_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_update_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_update_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateModelRequest.pb(service.UpdateModelRequest()) transcode.return_value = { "method": "post", @@ -14286,6 +14399,7 @@ def test_update_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gca_model.Model() + post_with_metadata.return_value = gca_model.Model(), metadata client.update_model( request, @@ -14297,6 +14411,7 @@ def test_update_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_model_rest_bad_request(request_type=service.DeployModelRequest): @@ -14373,10 +14488,13 @@ def test_deploy_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_deploy_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_deploy_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_deploy_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeployModelRequest.pb(service.DeployModelRequest()) transcode.return_value = { "method": "post", @@ -14398,6 +14516,7 @@ def test_deploy_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_model( request, @@ -14409,6 +14528,7 @@ def test_deploy_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_model_rest_bad_request(request_type=service.UndeployModelRequest): @@ -14485,10 +14605,13 @@ def test_undeploy_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_undeploy_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_undeploy_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_undeploy_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UndeployModelRequest.pb(service.UndeployModelRequest()) transcode.return_value = { "method": "post", @@ -14510,6 +14633,7 @@ def test_undeploy_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undeploy_model( request, @@ -14521,6 +14645,7 @@ def test_undeploy_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_model_rest_bad_request(request_type=service.ExportModelRequest): @@ -14597,10 +14722,13 @@ def test_export_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_export_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_export_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_export_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExportModelRequest.pb(service.ExportModelRequest()) transcode.return_value = { "method": "post", @@ -14622,6 +14750,7 @@ def test_export_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_model( request, @@ -14633,6 +14762,7 @@ def test_export_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_model_evaluation_rest_bad_request( @@ -14725,10 +14855,13 @@ def test_get_model_evaluation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_model_evaluation" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_model_evaluation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_model_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetModelEvaluationRequest.pb( service.GetModelEvaluationRequest() ) @@ -14754,6 +14887,7 @@ def test_get_model_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_evaluation.ModelEvaluation() + post_with_metadata.return_value = model_evaluation.ModelEvaluation(), metadata client.get_model_evaluation( request, @@ -14765,6 +14899,7 @@ def test_get_model_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_model_evaluations_rest_bad_request( @@ -14847,10 +14982,13 @@ def test_list_model_evaluations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_model_evaluations" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_model_evaluations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_model_evaluations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListModelEvaluationsRequest.pb( service.ListModelEvaluationsRequest() ) @@ -14876,6 +15014,10 @@ def test_list_model_evaluations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListModelEvaluationsResponse() + post_with_metadata.return_value = ( + service.ListModelEvaluationsResponse(), + metadata, + ) client.list_model_evaluations( request, @@ -14887,6 +15029,7 @@ def test_list_model_evaluations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py index 25ca1ff6bc95..82c2df245ddb 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1/test_prediction_service.py @@ -77,6 +77,13 @@ text_segment, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2549,10 +2599,13 @@ def test_predict_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PredictionServiceRestInterceptor, "post_predict" ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PredictionServiceRestInterceptor, "pre_predict" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = prediction_service.PredictRequest.pb( prediction_service.PredictRequest() ) @@ -2578,6 +2631,7 @@ def test_predict_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = prediction_service.PredictResponse() + post_with_metadata.return_value = prediction_service.PredictResponse(), metadata client.predict( request, @@ -2589,6 +2643,7 @@ def test_predict_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_predict_rest_bad_request( @@ -2669,10 +2724,13 @@ def test_batch_predict_rest_interceptors(null_interceptor): ), mock.patch.object( transports.PredictionServiceRestInterceptor, "post_batch_predict" ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_batch_predict_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PredictionServiceRestInterceptor, "pre_batch_predict" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = prediction_service.BatchPredictRequest.pb( prediction_service.BatchPredictRequest() ) @@ -2696,6 +2754,7 @@ def test_batch_predict_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_predict( request, @@ -2707,6 +2766,7 @@ def test_batch_predict_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py index 0b3cbcce8160..a5d9886264c0 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_auto_ml.py @@ -97,6 +97,13 @@ from google.cloud.automl_v1beta1.types import table_spec from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutoMlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutoMlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -16571,10 +16621,13 @@ def test_create_dataset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_create_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_create_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_create_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateDatasetRequest.pb(service.CreateDatasetRequest()) transcode.return_value = { "method": "post", @@ -16596,6 +16649,7 @@ def test_create_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gca_dataset.Dataset() + post_with_metadata.return_value = gca_dataset.Dataset(), metadata client.create_dataset( request, @@ -16607,6 +16661,7 @@ def test_create_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_dataset_rest_bad_request(request_type=service.GetDatasetRequest): @@ -16695,10 +16750,13 @@ def test_get_dataset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetDatasetRequest.pb(service.GetDatasetRequest()) transcode.return_value = { "method": "post", @@ -16720,6 +16778,7 @@ def test_get_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataset.Dataset() + post_with_metadata.return_value = dataset.Dataset(), metadata client.get_dataset( request, @@ -16731,6 +16790,7 @@ def test_get_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_datasets_rest_bad_request(request_type=service.ListDatasetsRequest): @@ -16811,10 +16871,13 @@ def test_list_datasets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_datasets" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_datasets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_datasets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListDatasetsRequest.pb(service.ListDatasetsRequest()) transcode.return_value = { "method": "post", @@ -16838,6 +16901,7 @@ def test_list_datasets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListDatasetsResponse() + post_with_metadata.return_value = service.ListDatasetsResponse(), metadata client.list_datasets( request, @@ -16849,6 +16913,7 @@ def test_list_datasets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_dataset_rest_bad_request(request_type=service.UpdateDatasetRequest): @@ -17035,10 +17100,13 @@ def test_update_dataset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_update_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_update_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_update_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateDatasetRequest.pb(service.UpdateDatasetRequest()) transcode.return_value = { "method": "post", @@ -17060,6 +17128,7 @@ def test_update_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gca_dataset.Dataset() + post_with_metadata.return_value = gca_dataset.Dataset(), metadata client.update_dataset( request, @@ -17071,6 +17140,7 @@ def test_update_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_dataset_rest_bad_request(request_type=service.DeleteDatasetRequest): @@ -17147,10 +17217,13 @@ def test_delete_dataset_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_delete_dataset" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_delete_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_delete_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteDatasetRequest.pb(service.DeleteDatasetRequest()) transcode.return_value = { "method": "post", @@ -17172,6 +17245,7 @@ def test_delete_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_dataset( request, @@ -17183,6 +17257,7 @@ def test_delete_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_data_rest_bad_request(request_type=service.ImportDataRequest): @@ -17259,10 +17334,13 @@ def test_import_data_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_import_data" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_import_data_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_import_data" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ImportDataRequest.pb(service.ImportDataRequest()) transcode.return_value = { "method": "post", @@ -17284,6 +17362,7 @@ def test_import_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_data( request, @@ -17295,6 +17374,7 @@ def test_import_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_data_rest_bad_request(request_type=service.ExportDataRequest): @@ -17371,10 +17451,13 @@ def test_export_data_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_export_data" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_export_data_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_export_data" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExportDataRequest.pb(service.ExportDataRequest()) transcode.return_value = { "method": "post", @@ -17396,6 +17479,7 @@ def test_export_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_data( request, @@ -17407,6 +17491,7 @@ def test_export_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_annotation_spec_rest_bad_request( @@ -17497,10 +17582,13 @@ def test_get_annotation_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_annotation_spec" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_annotation_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_annotation_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetAnnotationSpecRequest.pb( service.GetAnnotationSpecRequest() ) @@ -17526,6 +17614,7 @@ def test_get_annotation_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = annotation_spec.AnnotationSpec() + post_with_metadata.return_value = annotation_spec.AnnotationSpec(), metadata client.get_annotation_spec( request, @@ -17537,6 +17626,7 @@ def test_get_annotation_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_table_spec_rest_bad_request(request_type=service.GetTableSpecRequest): @@ -17631,10 +17721,13 @@ def test_get_table_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_table_spec" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_table_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_table_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetTableSpecRequest.pb(service.GetTableSpecRequest()) transcode.return_value = { "method": "post", @@ -17656,6 +17749,7 @@ def test_get_table_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = table_spec.TableSpec() + post_with_metadata.return_value = table_spec.TableSpec(), metadata client.get_table_spec( request, @@ -17667,6 +17761,7 @@ def test_get_table_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_table_specs_rest_bad_request(request_type=service.ListTableSpecsRequest): @@ -17747,10 +17842,13 @@ def test_list_table_specs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_table_specs" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_table_specs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_table_specs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListTableSpecsRequest.pb(service.ListTableSpecsRequest()) transcode.return_value = { "method": "post", @@ -17774,6 +17872,7 @@ def test_list_table_specs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListTableSpecsResponse() + post_with_metadata.return_value = service.ListTableSpecsResponse(), metadata client.list_table_specs( request, @@ -17785,6 +17884,7 @@ def test_list_table_specs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_table_spec_rest_bad_request( @@ -17969,10 +18069,13 @@ def test_update_table_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_update_table_spec" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_update_table_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_update_table_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateTableSpecRequest.pb(service.UpdateTableSpecRequest()) transcode.return_value = { "method": "post", @@ -17994,6 +18097,7 @@ def test_update_table_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gca_table_spec.TableSpec() + post_with_metadata.return_value = gca_table_spec.TableSpec(), metadata client.update_table_spec( request, @@ -18005,6 +18109,7 @@ def test_update_table_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_column_spec_rest_bad_request(request_type=service.GetColumnSpecRequest): @@ -18093,10 +18198,13 @@ def test_get_column_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_column_spec" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_column_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_column_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetColumnSpecRequest.pb(service.GetColumnSpecRequest()) transcode.return_value = { "method": "post", @@ -18118,6 +18226,7 @@ def test_get_column_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = column_spec.ColumnSpec() + post_with_metadata.return_value = column_spec.ColumnSpec(), metadata client.get_column_spec( request, @@ -18129,6 +18238,7 @@ def test_get_column_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_column_specs_rest_bad_request( @@ -18215,10 +18325,13 @@ def test_list_column_specs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_column_specs" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_column_specs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_column_specs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListColumnSpecsRequest.pb(service.ListColumnSpecsRequest()) transcode.return_value = { "method": "post", @@ -18242,6 +18355,7 @@ def test_list_column_specs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListColumnSpecsResponse() + post_with_metadata.return_value = service.ListColumnSpecsResponse(), metadata client.list_column_specs( request, @@ -18253,6 +18367,7 @@ def test_list_column_specs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_column_spec_rest_bad_request( @@ -18452,10 +18567,13 @@ def test_update_column_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_update_column_spec" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_update_column_spec_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_update_column_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateColumnSpecRequest.pb( service.UpdateColumnSpecRequest() ) @@ -18479,6 +18597,7 @@ def test_update_column_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gca_column_spec.ColumnSpec() + post_with_metadata.return_value = gca_column_spec.ColumnSpec(), metadata client.update_column_spec( request, @@ -18490,6 +18609,7 @@ def test_update_column_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_model_rest_bad_request(request_type=service.CreateModelRequest): @@ -18724,10 +18844,13 @@ def test_create_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_create_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_create_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_create_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateModelRequest.pb(service.CreateModelRequest()) transcode.return_value = { "method": "post", @@ -18749,6 +18872,7 @@ def test_create_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_model( request, @@ -18760,6 +18884,7 @@ def test_create_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_model_rest_bad_request(request_type=service.GetModelRequest): @@ -18846,10 +18971,13 @@ def test_get_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetModelRequest.pb(service.GetModelRequest()) transcode.return_value = { "method": "post", @@ -18871,6 +18999,7 @@ def test_get_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model.Model() + post_with_metadata.return_value = model.Model(), metadata client.get_model( request, @@ -18882,6 +19011,7 @@ def test_get_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_models_rest_bad_request(request_type=service.ListModelsRequest): @@ -18962,10 +19092,13 @@ def test_list_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_models" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_models_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListModelsRequest.pb(service.ListModelsRequest()) transcode.return_value = { "method": "post", @@ -18987,6 +19120,7 @@ def test_list_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListModelsResponse() + post_with_metadata.return_value = service.ListModelsResponse(), metadata client.list_models( request, @@ -18998,6 +19132,7 @@ def test_list_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_model_rest_bad_request(request_type=service.DeleteModelRequest): @@ -19074,10 +19209,13 @@ def test_delete_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_delete_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_delete_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_delete_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteModelRequest.pb(service.DeleteModelRequest()) transcode.return_value = { "method": "post", @@ -19099,6 +19237,7 @@ def test_delete_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_model( request, @@ -19110,6 +19249,7 @@ def test_delete_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_model_rest_bad_request(request_type=service.DeployModelRequest): @@ -19186,10 +19326,13 @@ def test_deploy_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_deploy_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_deploy_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_deploy_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeployModelRequest.pb(service.DeployModelRequest()) transcode.return_value = { "method": "post", @@ -19211,6 +19354,7 @@ def test_deploy_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_model( request, @@ -19222,6 +19366,7 @@ def test_deploy_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_model_rest_bad_request(request_type=service.UndeployModelRequest): @@ -19298,10 +19443,13 @@ def test_undeploy_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_undeploy_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_undeploy_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_undeploy_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UndeployModelRequest.pb(service.UndeployModelRequest()) transcode.return_value = { "method": "post", @@ -19323,6 +19471,7 @@ def test_undeploy_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undeploy_model( request, @@ -19334,6 +19483,7 @@ def test_undeploy_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_model_rest_bad_request(request_type=service.ExportModelRequest): @@ -19410,10 +19560,13 @@ def test_export_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_export_model" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_export_model_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_export_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExportModelRequest.pb(service.ExportModelRequest()) transcode.return_value = { "method": "post", @@ -19435,6 +19588,7 @@ def test_export_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_model( request, @@ -19446,6 +19600,7 @@ def test_export_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_evaluated_examples_rest_bad_request( @@ -19524,10 +19679,13 @@ def test_export_evaluated_examples_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutoMlRestInterceptor, "post_export_evaluated_examples" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_export_evaluated_examples_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_export_evaluated_examples" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ExportEvaluatedExamplesRequest.pb( service.ExportEvaluatedExamplesRequest() ) @@ -19551,6 +19709,7 @@ def test_export_evaluated_examples_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_evaluated_examples( request, @@ -19562,6 +19721,7 @@ def test_export_evaluated_examples_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_model_evaluation_rest_bad_request( @@ -19654,10 +19814,13 @@ def test_get_model_evaluation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_get_model_evaluation" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_get_model_evaluation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_get_model_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetModelEvaluationRequest.pb( service.GetModelEvaluationRequest() ) @@ -19683,6 +19846,7 @@ def test_get_model_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = model_evaluation.ModelEvaluation() + post_with_metadata.return_value = model_evaluation.ModelEvaluation(), metadata client.get_model_evaluation( request, @@ -19694,6 +19858,7 @@ def test_get_model_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_model_evaluations_rest_bad_request( @@ -19776,10 +19941,13 @@ def test_list_model_evaluations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoMlRestInterceptor, "post_list_model_evaluations" ) as post, mock.patch.object( + transports.AutoMlRestInterceptor, "post_list_model_evaluations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoMlRestInterceptor, "pre_list_model_evaluations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListModelEvaluationsRequest.pb( service.ListModelEvaluationsRequest() ) @@ -19805,6 +19973,10 @@ def test_list_model_evaluations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListModelEvaluationsResponse() + post_with_metadata.return_value = ( + service.ListModelEvaluationsResponse(), + metadata, + ) client.list_model_evaluations( request, @@ -19816,6 +19988,7 @@ def test_list_model_evaluations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py index 7b578a53cecd..90e6ed63e197 100644 --- a/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py +++ b/packages/google-cloud-automl/tests/unit/gapic/automl_v1beta1/test_prediction_service.py @@ -78,6 +78,13 @@ text_segment, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -345,6 +352,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PredictionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2551,10 +2601,13 @@ def test_predict_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PredictionServiceRestInterceptor, "post_predict" ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_predict_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PredictionServiceRestInterceptor, "pre_predict" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = prediction_service.PredictRequest.pb( prediction_service.PredictRequest() ) @@ -2580,6 +2633,7 @@ def test_predict_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = prediction_service.PredictResponse() + post_with_metadata.return_value = prediction_service.PredictResponse(), metadata client.predict( request, @@ -2591,6 +2645,7 @@ def test_predict_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_predict_rest_bad_request( @@ -2671,10 +2726,13 @@ def test_batch_predict_rest_interceptors(null_interceptor): ), mock.patch.object( transports.PredictionServiceRestInterceptor, "post_batch_predict" ) as post, mock.patch.object( + transports.PredictionServiceRestInterceptor, "post_batch_predict_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PredictionServiceRestInterceptor, "pre_batch_predict" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = prediction_service.BatchPredictRequest.pb( prediction_service.BatchPredictRequest() ) @@ -2698,6 +2756,7 @@ def test_batch_predict_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_predict( request, @@ -2709,6 +2768,7 @@ def test_batch_predict_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-backupdr/CHANGELOG.md b/packages/google-cloud-backupdr/CHANGELOG.md index 25f61222d273..8f77993ddef8 100644 --- a/packages/google-cloud-backupdr/CHANGELOG.md +++ b/packages/google-cloud-backupdr/CHANGELOG.md @@ -1,5 +1,47 @@ # Changelog +## [0.2.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.2.0...google-cloud-backupdr-v0.2.1) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + +## [0.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.7...google-cloud-backupdr-v0.2.0) (2025-01-13) + + +### ⚠ BREAKING CHANGES + +* Update field behavior of `resource_type` field in message `BackupPlanAssociation` to `REQUIRED` + +### Features + +* `ignore_backup_plan_references` added to the DeleteBackupVaultRequest ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* add enum to Backup Vault Access Restriction field ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* add InitializeServiceAPI ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* Update field behavior of `networks` field in message `ManagementServer` to `OPTIONAL` ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) + + +### Bug Fixes + +* Update field behavior of `resource_type` field in message `BackupPlanAssociation` to `REQUIRED` ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) + + +### Documentation + +* A comment for enum value `ACCESS_RESTRICTION_UNSPECIFIED` in enum `AccessRestriction` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `access_restriction` in message `.google.cloud.backupdr.v1.BackupVault` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `backup_retention_days` in message `.google.cloud.backupdr.v1.BackupRule` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `data_source` in message `.google.cloud.backupdr.v1.BackupPlanAssociation` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `last_backup_error` in message `.google.cloud.backupdr.v1.RuleConfigInfo` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `networks` in message `.google.cloud.backupdr.v1.ManagementServer` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `requested_cancellation` in message `.google.cloud.backupdr.v1.OperationMetadata` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `resource_type` in message `.google.cloud.backupdr.v1.BackupPlan` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `resource_type` in message `.google.cloud.backupdr.v1.BackupPlanAssociation` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `rule_id` in message `.google.cloud.backupdr.v1.RuleConfigInfo` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) +* A comment for field `uid` in message `.google.cloud.backupdr.v1.BackupVault` is changed ([b5cdea3](https://github.com/googleapis/google-cloud-python/commit/b5cdea3f1d59f67ff0bd01d1891abf948a4f5582)) + ## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-backupdr-v0.1.6...google-cloud-backupdr-v0.1.7) (2024-12-12) diff --git a/packages/google-cloud-backupdr/README.rst b/packages/google-cloud-backupdr/README.rst index bca44a4c97c1..245aece9a373 100644 --- a/packages/google-cloud-backupdr/README.rst +++ b/packages/google-cloud-backupdr/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Backup and DR Service API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Backup and DR Service API.: https://cloud.google.com/backup-disaster-recovery/docs/concepts/backup-dr -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index 5ab4e805a40b..346133baf2bf 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -24,6 +24,8 @@ CreateManagementServerRequest, DeleteManagementServerRequest, GetManagementServerRequest, + InitializeServiceRequest, + InitializeServiceResponse, ListManagementServersRequest, ListManagementServersResponse, ManagementServer, @@ -128,6 +130,8 @@ "CreateManagementServerRequest", "DeleteManagementServerRequest", "GetManagementServerRequest", + "InitializeServiceRequest", + "InitializeServiceResponse", "ListManagementServersRequest", "ListManagementServersResponse", "ManagementServer", diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index cf5493b86bbc..6d8247638d59 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.2.1" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index eddcfa53658f..35f5bc1adad8 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -23,6 +23,8 @@ CreateManagementServerRequest, DeleteManagementServerRequest, GetManagementServerRequest, + InitializeServiceRequest, + InitializeServiceResponse, ListManagementServersRequest, ListManagementServersResponse, ManagementServer, @@ -173,6 +175,8 @@ "GetDataSourceRequest", "GetManagementServerRequest", "GuestOsFeature", + "InitializeServiceRequest", + "InitializeServiceResponse", "InstanceParams", "KeyRevocationActionType", "ListBackupPlanAssociationsRequest", diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 902530688c39..5534a346d83c 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -90,6 +90,11 @@ "get_management_server" ] }, + "InitializeService": { + "methods": [ + "initialize_service" + ] + }, "ListBackupPlanAssociations": { "methods": [ "list_backup_plan_associations" @@ -230,6 +235,11 @@ "get_management_server" ] }, + "InitializeService": { + "methods": [ + "initialize_service" + ] + }, "ListBackupPlanAssociations": { "methods": [ "list_backup_plan_associations" @@ -370,6 +380,11 @@ "get_management_server" ] }, + "InitializeService": { + "methods": [ + "initialize_service" + ] + }, "ListBackupPlanAssociations": { "methods": [ "list_backup_plan_associations" diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index cf5493b86bbc..6d8247638d59 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.7" # {x-release-please-version} +__version__ = "0.2.1" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index c8870919dd24..c19c93260a4c 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -3153,6 +3153,7 @@ async def sample_create_backup_plan_association(): # Initialize request argument(s) backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" backup_plan_association.resource = "resource_value" backup_plan_association.backup_plan = "backup_plan_value" @@ -3796,6 +3797,108 @@ async def sample_trigger_backup(): # Done; return the response. return response + async def initialize_service( + self, + request: Optional[Union[backupdr.InitializeServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Initializes the service related config for a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_initialize_service(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + # Make the request + operation = client.initialize_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.InitializeServiceRequest, dict]]): + The request object. Request message for initializing the + service. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.InitializeServiceResponse` + Response message for initializing the service. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupdr.InitializeServiceRequest): + request = backupdr.InitializeServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.initialize_service + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupdr.InitializeServiceResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index fa774032bfe4..063f960859f8 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -621,6 +623,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3590,6 +3619,7 @@ def sample_create_backup_plan_association(): # Initialize request argument(s) backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" backup_plan_association.resource = "resource_value" backup_plan_association.backup_plan = "backup_plan_value" @@ -4226,6 +4256,106 @@ def sample_trigger_backup(): # Done; return the response. return response + def initialize_service( + self, + request: Optional[Union[backupdr.InitializeServiceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Initializes the service related config for a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_initialize_service(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + # Make the request + operation = client.initialize_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.InitializeServiceRequest, dict]): + The request object. Request message for initializing the + service. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.backupdr_v1.types.InitializeServiceResponse` + Response message for initializing the service. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupdr.InitializeServiceRequest): + request = backupdr.InitializeServiceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.initialize_service] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupdr.InitializeServiceResponse, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "BackupDRClient": return self @@ -4283,16 +4413,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4338,16 +4472,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4570,16 +4708,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -4692,16 +4834,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -4752,16 +4898,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -4807,16 +4957,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4862,16 +5016,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 9b2382573ee9..776ac8fe0b13 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -371,6 +371,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.initialize_service: gapic_v1.method.wrap_method( + self.initialize_service, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -698,6 +712,15 @@ def trigger_backup( ]: raise NotImplementedError() + @property + def initialize_service( + self, + ) -> Callable[ + [backupdr.InitializeServiceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index e2e13a00be74..e1ba2336e745 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -1089,6 +1089,32 @@ def trigger_backup( ) return self._stubs["trigger_backup"] + @property + def initialize_service( + self, + ) -> Callable[[backupdr.InitializeServiceRequest], operations_pb2.Operation]: + r"""Return a callable for the initialize service method over gRPC. + + Initializes the service related config for a project. + + Returns: + Callable[[~.InitializeServiceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_service" not in self._stubs: + self._stubs["initialize_service"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/InitializeService", + request_serializer=backupdr.InitializeServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_service"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 80a10bbe29be..8f1fb5520184 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -1131,6 +1131,34 @@ def trigger_backup( ) return self._stubs["trigger_backup"] + @property + def initialize_service( + self, + ) -> Callable[ + [backupdr.InitializeServiceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the initialize service method over gRPC. + + Initializes the service related config for a project. + + Returns: + Callable[[~.InitializeServiceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "initialize_service" not in self._stubs: + self._stubs["initialize_service"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/InitializeService", + request_serializer=backupdr.InitializeServiceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["initialize_service"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1368,6 +1396,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.initialize_service: self._wrap_method( + self.initialize_service, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index e4db8c8a1cd0..e8ebc4f2a209 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -205,6 +205,14 @@ def post_get_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_initialize_service(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_initialize_service(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_backup_plan_associations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -318,12 +326,35 @@ def post_create_backup_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_create_backup_plan` interceptor runs + before the `post_create_backup_plan_with_metadata` interceptor. """ return response + def post_create_backup_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_create_backup_plan_with_metadata` + interceptor in new development instead of the `post_create_backup_plan` interceptor. + When both interceptors are used, this `post_create_backup_plan_with_metadata` interceptor runs after the + `post_create_backup_plan` interceptor. The (possibly modified) response returned by + `post_create_backup_plan` will be passed to + `post_create_backup_plan_with_metadata`. + """ + return response, metadata + def pre_create_backup_plan_association( self, request: backupplanassociation.CreateBackupPlanAssociationRequest, @@ -344,12 +375,35 @@ def post_create_backup_plan_association( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup_plan_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_plan_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_create_backup_plan_association` interceptor runs + before the `post_create_backup_plan_association_with_metadata` interceptor. """ return response + def post_create_backup_plan_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_plan_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_create_backup_plan_association_with_metadata` + interceptor in new development instead of the `post_create_backup_plan_association` interceptor. + When both interceptors are used, this `post_create_backup_plan_association_with_metadata` interceptor runs after the + `post_create_backup_plan_association` interceptor. The (possibly modified) response returned by + `post_create_backup_plan_association` will be passed to + `post_create_backup_plan_association_with_metadata`. + """ + return response, metadata + def pre_create_backup_vault( self, request: backupvault.CreateBackupVaultRequest, @@ -369,12 +423,35 @@ def post_create_backup_vault( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup_vault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_vault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_create_backup_vault` interceptor runs + before the `post_create_backup_vault_with_metadata` interceptor. """ return response + def post_create_backup_vault_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_vault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_create_backup_vault_with_metadata` + interceptor in new development instead of the `post_create_backup_vault` interceptor. + When both interceptors are used, this `post_create_backup_vault_with_metadata` interceptor runs after the + `post_create_backup_vault` interceptor. The (possibly modified) response returned by + `post_create_backup_vault` will be passed to + `post_create_backup_vault_with_metadata`. + """ + return response, metadata + def pre_create_management_server( self, request: backupdr.CreateManagementServerRequest, @@ -394,12 +471,35 @@ def post_create_management_server( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_management_server - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_management_server_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_create_management_server` interceptor runs + before the `post_create_management_server_with_metadata` interceptor. """ return response + def post_create_management_server_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_management_server + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_create_management_server_with_metadata` + interceptor in new development instead of the `post_create_management_server` interceptor. + When both interceptors are used, this `post_create_management_server_with_metadata` interceptor runs after the + `post_create_management_server` interceptor. The (possibly modified) response returned by + `post_create_management_server` will be passed to + `post_create_management_server_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: backupvault.DeleteBackupRequest, @@ -419,12 +519,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_backup_plan( self, request: backupplan.DeleteBackupPlanRequest, @@ -444,12 +567,35 @@ def post_delete_backup_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup_plan` interceptor runs + before the `post_delete_backup_plan_with_metadata` interceptor. """ return response + def post_delete_backup_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_delete_backup_plan_with_metadata` + interceptor in new development instead of the `post_delete_backup_plan` interceptor. + When both interceptors are used, this `post_delete_backup_plan_with_metadata` interceptor runs after the + `post_delete_backup_plan` interceptor. The (possibly modified) response returned by + `post_delete_backup_plan` will be passed to + `post_delete_backup_plan_with_metadata`. + """ + return response, metadata + def pre_delete_backup_plan_association( self, request: backupplanassociation.DeleteBackupPlanAssociationRequest, @@ -470,12 +616,35 @@ def post_delete_backup_plan_association( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup_plan_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_plan_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup_plan_association` interceptor runs + before the `post_delete_backup_plan_association_with_metadata` interceptor. """ return response + def post_delete_backup_plan_association_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup_plan_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_delete_backup_plan_association_with_metadata` + interceptor in new development instead of the `post_delete_backup_plan_association` interceptor. + When both interceptors are used, this `post_delete_backup_plan_association_with_metadata` interceptor runs after the + `post_delete_backup_plan_association` interceptor. The (possibly modified) response returned by + `post_delete_backup_plan_association` will be passed to + `post_delete_backup_plan_association_with_metadata`. + """ + return response, metadata + def pre_delete_backup_vault( self, request: backupvault.DeleteBackupVaultRequest, @@ -495,12 +664,35 @@ def post_delete_backup_vault( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup_vault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_vault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup_vault` interceptor runs + before the `post_delete_backup_vault_with_metadata` interceptor. """ return response + def post_delete_backup_vault_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup_vault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_delete_backup_vault_with_metadata` + interceptor in new development instead of the `post_delete_backup_vault` interceptor. + When both interceptors are used, this `post_delete_backup_vault_with_metadata` interceptor runs after the + `post_delete_backup_vault` interceptor. The (possibly modified) response returned by + `post_delete_backup_vault` will be passed to + `post_delete_backup_vault_with_metadata`. + """ + return response, metadata + def pre_delete_management_server( self, request: backupdr.DeleteManagementServerRequest, @@ -520,12 +712,35 @@ def post_delete_management_server( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_management_server - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_management_server_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_delete_management_server` interceptor runs + before the `post_delete_management_server_with_metadata` interceptor. """ return response + def post_delete_management_server_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_management_server + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_delete_management_server_with_metadata` + interceptor in new development instead of the `post_delete_management_server` interceptor. + When both interceptors are used, this `post_delete_management_server_with_metadata` interceptor runs after the + `post_delete_management_server` interceptor. The (possibly modified) response returned by + `post_delete_management_server` will be passed to + `post_delete_management_server_with_metadata`. + """ + return response, metadata + def pre_fetch_usable_backup_vaults( self, request: backupvault.FetchUsableBackupVaultsRequest, @@ -546,12 +761,38 @@ def post_fetch_usable_backup_vaults( ) -> backupvault.FetchUsableBackupVaultsResponse: """Post-rpc interceptor for fetch_usable_backup_vaults - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_usable_backup_vaults_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_fetch_usable_backup_vaults` interceptor runs + before the `post_fetch_usable_backup_vaults_with_metadata` interceptor. """ return response + def post_fetch_usable_backup_vaults_with_metadata( + self, + response: backupvault.FetchUsableBackupVaultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.FetchUsableBackupVaultsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_usable_backup_vaults + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_fetch_usable_backup_vaults_with_metadata` + interceptor in new development instead of the `post_fetch_usable_backup_vaults` interceptor. + When both interceptors are used, this `post_fetch_usable_backup_vaults_with_metadata` interceptor runs after the + `post_fetch_usable_backup_vaults` interceptor. The (possibly modified) response returned by + `post_fetch_usable_backup_vaults` will be passed to + `post_fetch_usable_backup_vaults_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: backupvault.GetBackupRequest, @@ -567,12 +808,35 @@ def pre_get_backup( def post_get_backup(self, response: backupvault.Backup) -> backupvault.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: backupvault.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backupvault.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_backup_plan( self, request: backupplan.GetBackupPlanRequest, @@ -592,12 +856,35 @@ def post_get_backup_plan( ) -> backupplan.BackupPlan: """Post-rpc interceptor for get_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_get_backup_plan` interceptor runs + before the `post_get_backup_plan_with_metadata` interceptor. """ return response + def post_get_backup_plan_with_metadata( + self, + response: backupplan.BackupPlan, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backupplan.BackupPlan, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_backup_plan_with_metadata` + interceptor in new development instead of the `post_get_backup_plan` interceptor. + When both interceptors are used, this `post_get_backup_plan_with_metadata` interceptor runs after the + `post_get_backup_plan` interceptor. The (possibly modified) response returned by + `post_get_backup_plan` will be passed to + `post_get_backup_plan_with_metadata`. + """ + return response, metadata + def pre_get_backup_plan_association( self, request: backupplanassociation.GetBackupPlanAssociationRequest, @@ -618,12 +905,38 @@ def post_get_backup_plan_association( ) -> backupplanassociation.BackupPlanAssociation: """Post-rpc interceptor for get_backup_plan_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_plan_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_get_backup_plan_association` interceptor runs + before the `post_get_backup_plan_association_with_metadata` interceptor. """ return response + def post_get_backup_plan_association_with_metadata( + self, + response: backupplanassociation.BackupPlanAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplanassociation.BackupPlanAssociation, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_backup_plan_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_backup_plan_association_with_metadata` + interceptor in new development instead of the `post_get_backup_plan_association` interceptor. + When both interceptors are used, this `post_get_backup_plan_association_with_metadata` interceptor runs after the + `post_get_backup_plan_association` interceptor. The (possibly modified) response returned by + `post_get_backup_plan_association` will be passed to + `post_get_backup_plan_association_with_metadata`. + """ + return response, metadata + def pre_get_backup_vault( self, request: backupvault.GetBackupVaultRequest, @@ -643,12 +956,35 @@ def post_get_backup_vault( ) -> backupvault.BackupVault: """Post-rpc interceptor for get_backup_vault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_vault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_get_backup_vault` interceptor runs + before the `post_get_backup_vault_with_metadata` interceptor. """ return response + def post_get_backup_vault_with_metadata( + self, + response: backupvault.BackupVault, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backupvault.BackupVault, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_vault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_backup_vault_with_metadata` + interceptor in new development instead of the `post_get_backup_vault` interceptor. + When both interceptors are used, this `post_get_backup_vault_with_metadata` interceptor runs after the + `post_get_backup_vault` interceptor. The (possibly modified) response returned by + `post_get_backup_vault` will be passed to + `post_get_backup_vault_with_metadata`. + """ + return response, metadata + def pre_get_data_source( self, request: backupvault.GetDataSourceRequest, @@ -668,12 +1004,35 @@ def post_get_data_source( ) -> backupvault.DataSource: """Post-rpc interceptor for get_data_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_get_data_source` interceptor runs + before the `post_get_data_source_with_metadata` interceptor. """ return response + def post_get_data_source_with_metadata( + self, + response: backupvault.DataSource, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backupvault.DataSource, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_data_source_with_metadata` + interceptor in new development instead of the `post_get_data_source` interceptor. + When both interceptors are used, this `post_get_data_source_with_metadata` interceptor runs after the + `post_get_data_source` interceptor. The (possibly modified) response returned by + `post_get_data_source` will be passed to + `post_get_data_source_with_metadata`. + """ + return response, metadata + def pre_get_management_server( self, request: backupdr.GetManagementServerRequest, @@ -693,12 +1052,83 @@ def post_get_management_server( ) -> backupdr.ManagementServer: """Post-rpc interceptor for get_management_server - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_management_server_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_get_management_server` interceptor runs + before the `post_get_management_server_with_metadata` interceptor. """ return response + def post_get_management_server_with_metadata( + self, + response: backupdr.ManagementServer, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backupdr.ManagementServer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_management_server + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_management_server_with_metadata` + interceptor in new development instead of the `post_get_management_server` interceptor. + When both interceptors are used, this `post_get_management_server_with_metadata` interceptor runs after the + `post_get_management_server` interceptor. The (possibly modified) response returned by + `post_get_management_server` will be passed to + `post_get_management_server_with_metadata`. + """ + return response, metadata + + def pre_initialize_service( + self, + request: backupdr.InitializeServiceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupdr.InitializeServiceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for initialize_service + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_initialize_service( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for initialize_service + + DEPRECATED. Please use the `post_initialize_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_initialize_service` interceptor runs + before the `post_initialize_service_with_metadata` interceptor. + """ + return response + + def post_initialize_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for initialize_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_initialize_service_with_metadata` + interceptor in new development instead of the `post_initialize_service` interceptor. + When both interceptors are used, this `post_initialize_service_with_metadata` interceptor runs after the + `post_initialize_service` interceptor. The (possibly modified) response returned by + `post_initialize_service` will be passed to + `post_initialize_service_with_metadata`. + """ + return response, metadata + def pre_list_backup_plan_associations( self, request: backupplanassociation.ListBackupPlanAssociationsRequest, @@ -719,12 +1149,38 @@ def post_list_backup_plan_associations( ) -> backupplanassociation.ListBackupPlanAssociationsResponse: """Post-rpc interceptor for list_backup_plan_associations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backup_plan_associations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_list_backup_plan_associations` interceptor runs + before the `post_list_backup_plan_associations_with_metadata` interceptor. """ return response + def post_list_backup_plan_associations_with_metadata( + self, + response: backupplanassociation.ListBackupPlanAssociationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplanassociation.ListBackupPlanAssociationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_backup_plan_associations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_backup_plan_associations_with_metadata` + interceptor in new development instead of the `post_list_backup_plan_associations` interceptor. + When both interceptors are used, this `post_list_backup_plan_associations_with_metadata` interceptor runs after the + `post_list_backup_plan_associations` interceptor. The (possibly modified) response returned by + `post_list_backup_plan_associations` will be passed to + `post_list_backup_plan_associations_with_metadata`. + """ + return response, metadata + def pre_list_backup_plans( self, request: backupplan.ListBackupPlansRequest, @@ -744,12 +1200,37 @@ def post_list_backup_plans( ) -> backupplan.ListBackupPlansResponse: """Post-rpc interceptor for list_backup_plans - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backup_plans_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_list_backup_plans` interceptor runs + before the `post_list_backup_plans_with_metadata` interceptor. """ return response + def post_list_backup_plans_with_metadata( + self, + response: backupplan.ListBackupPlansResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplan.ListBackupPlansResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_backup_plans + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_backup_plans_with_metadata` + interceptor in new development instead of the `post_list_backup_plans` interceptor. + When both interceptors are used, this `post_list_backup_plans_with_metadata` interceptor runs after the + `post_list_backup_plans` interceptor. The (possibly modified) response returned by + `post_list_backup_plans` will be passed to + `post_list_backup_plans_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: backupvault.ListBackupsRequest, @@ -767,12 +1248,37 @@ def post_list_backups( ) -> backupvault.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: backupvault.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_backup_vaults( self, request: backupvault.ListBackupVaultsRequest, @@ -792,12 +1298,37 @@ def post_list_backup_vaults( ) -> backupvault.ListBackupVaultsResponse: """Post-rpc interceptor for list_backup_vaults - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backup_vaults_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_list_backup_vaults` interceptor runs + before the `post_list_backup_vaults_with_metadata` interceptor. """ return response + def post_list_backup_vaults_with_metadata( + self, + response: backupvault.ListBackupVaultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.ListBackupVaultsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_backup_vaults + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_backup_vaults_with_metadata` + interceptor in new development instead of the `post_list_backup_vaults` interceptor. + When both interceptors are used, this `post_list_backup_vaults_with_metadata` interceptor runs after the + `post_list_backup_vaults` interceptor. The (possibly modified) response returned by + `post_list_backup_vaults` will be passed to + `post_list_backup_vaults_with_metadata`. + """ + return response, metadata + def pre_list_data_sources( self, request: backupvault.ListDataSourcesRequest, @@ -817,12 +1348,37 @@ def post_list_data_sources( ) -> backupvault.ListDataSourcesResponse: """Post-rpc interceptor for list_data_sources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_sources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_list_data_sources` interceptor runs + before the `post_list_data_sources_with_metadata` interceptor. """ return response + def post_list_data_sources_with_metadata( + self, + response: backupvault.ListDataSourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.ListDataSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_data_sources_with_metadata` + interceptor in new development instead of the `post_list_data_sources` interceptor. + When both interceptors are used, this `post_list_data_sources_with_metadata` interceptor runs after the + `post_list_data_sources` interceptor. The (possibly modified) response returned by + `post_list_data_sources` will be passed to + `post_list_data_sources_with_metadata`. + """ + return response, metadata + def pre_list_management_servers( self, request: backupdr.ListManagementServersRequest, @@ -842,12 +1398,37 @@ def post_list_management_servers( ) -> backupdr.ListManagementServersResponse: """Post-rpc interceptor for list_management_servers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_management_servers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_list_management_servers` interceptor runs + before the `post_list_management_servers_with_metadata` interceptor. """ return response + def post_list_management_servers_with_metadata( + self, + response: backupdr.ListManagementServersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupdr.ListManagementServersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_management_servers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_management_servers_with_metadata` + interceptor in new development instead of the `post_list_management_servers` interceptor. + When both interceptors are used, this `post_list_management_servers_with_metadata` interceptor runs after the + `post_list_management_servers` interceptor. The (possibly modified) response returned by + `post_list_management_servers` will be passed to + `post_list_management_servers_with_metadata`. + """ + return response, metadata + def pre_restore_backup( self, request: backupvault.RestoreBackupRequest, @@ -867,12 +1448,35 @@ def post_restore_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_restore_backup` interceptor runs + before the `post_restore_backup_with_metadata` interceptor. """ return response + def post_restore_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_restore_backup_with_metadata` + interceptor in new development instead of the `post_restore_backup` interceptor. + When both interceptors are used, this `post_restore_backup_with_metadata` interceptor runs after the + `post_restore_backup` interceptor. The (possibly modified) response returned by + `post_restore_backup` will be passed to + `post_restore_backup_with_metadata`. + """ + return response, metadata + def pre_trigger_backup( self, request: backupplanassociation.TriggerBackupRequest, @@ -893,12 +1497,35 @@ def post_trigger_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for trigger_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_trigger_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_trigger_backup` interceptor runs + before the `post_trigger_backup_with_metadata` interceptor. """ return response + def post_trigger_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for trigger_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_trigger_backup_with_metadata` + interceptor in new development instead of the `post_trigger_backup` interceptor. + When both interceptors are used, this `post_trigger_backup_with_metadata` interceptor runs after the + `post_trigger_backup` interceptor. The (possibly modified) response returned by + `post_trigger_backup` will be passed to + `post_trigger_backup_with_metadata`. + """ + return response, metadata + def pre_update_backup( self, request: backupvault.UpdateBackupRequest, @@ -918,12 +1545,35 @@ def post_update_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. """ return response + def post_update_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + def pre_update_backup_vault( self, request: backupvault.UpdateBackupVaultRequest, @@ -943,12 +1593,35 @@ def post_update_backup_vault( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup_vault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_vault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_update_backup_vault` interceptor runs + before the `post_update_backup_vault_with_metadata` interceptor. """ return response + def post_update_backup_vault_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_update_backup_vault_with_metadata` + interceptor in new development instead of the `post_update_backup_vault` interceptor. + When both interceptors are used, this `post_update_backup_vault_with_metadata` interceptor runs after the + `post_update_backup_vault` interceptor. The (possibly modified) response returned by + `post_update_backup_vault` will be passed to + `post_update_backup_vault_with_metadata`. + """ + return response, metadata + def pre_update_data_source( self, request: backupvault.UpdateDataSourceRequest, @@ -968,12 +1641,35 @@ def post_update_data_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_data_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_update_data_source` interceptor runs + before the `post_update_data_source_with_metadata` interceptor. """ return response + def post_update_data_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_update_data_source_with_metadata` + interceptor in new development instead of the `post_update_data_source` interceptor. + When both interceptors are used, this `post_update_data_source_with_metadata` interceptor runs after the + `post_update_data_source` interceptor. The (possibly modified) response returned by + `post_update_data_source` will be passed to + `post_update_data_source_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1463,6 +2159,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1614,6 +2314,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup_plan_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_backup_plan_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1763,6 +2470,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup_vault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_vault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1913,6 +2624,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_management_server(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_management_server_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2056,6 +2771,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2201,6 +2920,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2346,6 +3069,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup_plan_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_backup_plan_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2489,6 +3219,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup_vault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_vault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2633,6 +3367,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_management_server(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_management_server_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2779,6 +3517,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_usable_backup_vaults(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_usable_backup_vaults_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2921,6 +3663,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3070,6 +3816,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3218,6 +3968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup_plan_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_plan_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3369,6 +4123,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup_vault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_vault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3518,6 +4276,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3663,6 +4425,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_management_server(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_management_server_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3686,6 +4452,160 @@ def __call__( ) return resp + class _InitializeService( + _BaseBackupDRRestTransport._BaseInitializeService, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.InitializeService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: backupdr.InitializeServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the initialize service method over HTTP. + + Args: + request (~.backupdr.InitializeServiceRequest): + The request object. Request message for initializing the + service. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseInitializeService._get_http_options() + ) + + request, metadata = self._interceptor.pre_initialize_service( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseInitializeService._get_transcoded_request( + http_options, request + ) + + body = _BaseBackupDRRestTransport._BaseInitializeService._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseInitializeService._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.InitializeService", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "InitializeService", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._InitializeService._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_initialize_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_initialize_service_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.initialize_service", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "InitializeService", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _ListBackupPlanAssociations( _BaseBackupDRRestTransport._BaseListBackupPlanAssociations, BackupDRRestStub ): @@ -3809,6 +4729,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backup_plan_associations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_backup_plan_associations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3959,6 +4886,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backup_plans(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_plans_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4103,6 +5034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4250,6 +5185,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backup_vaults(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_vaults_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4401,6 +5340,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_sources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_sources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4548,6 +5491,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_management_servers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_management_servers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4702,6 +5649,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4854,6 +5805,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_trigger_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_trigger_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5004,6 +5959,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5154,6 +6113,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup_vault(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_vault_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5308,6 +6271,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5471,6 +6438,14 @@ def get_management_server( # In C++ this would require a dynamic_cast return self._GetManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def initialize_service( + self, + ) -> Callable[[backupdr.InitializeServiceRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._InitializeService(self._session, self._host, self._interceptor) # type: ignore + @property def list_backup_plan_associations( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py index d8e87f3e754d..358a1995e71e 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py @@ -902,6 +902,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseInitializeService: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/serviceConfig}:initialize", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupdr.InitializeServiceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseInitializeService._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListBackupPlanAssociations: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 951186d655ee..8bd452cf0248 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -17,6 +17,8 @@ CreateManagementServerRequest, DeleteManagementServerRequest, GetManagementServerRequest, + InitializeServiceRequest, + InitializeServiceResponse, ListManagementServersRequest, ListManagementServersResponse, ManagementServer, @@ -117,6 +119,8 @@ "CreateManagementServerRequest", "DeleteManagementServerRequest", "GetManagementServerRequest", + "InitializeServiceRequest", + "InitializeServiceResponse", "ListManagementServersRequest", "ListManagementServersResponse", "ManagementServer", diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index 2cbce001d7d5..4eac794dffeb 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -34,6 +34,8 @@ "GetManagementServerRequest", "CreateManagementServerRequest", "DeleteManagementServerRequest", + "InitializeServiceRequest", + "InitializeServiceResponse", "OperationMetadata", }, ) @@ -185,9 +187,11 @@ class ManagementServer(proto.Message): state (google.cloud.backupdr_v1.types.ManagementServer.InstanceState): Output only. The ManagementServer state. networks (MutableSequence[google.cloud.backupdr_v1.types.NetworkConfig]): - Required. VPC networks to which the + Optional. VPC networks to which the ManagementServer instance is connected. For this version, only a single network is supported. + This field is optional if MS is created without + PSA etag (str): Optional. Server specified ETag for the ManagementServer resource to prevent @@ -548,6 +552,79 @@ class DeleteManagementServerRequest(proto.Message): ) +class InitializeServiceRequest(proto.Message): + r"""Request message for initializing the service. + + Attributes: + name (str): + Required. The resource name of the serviceConfig used to + initialize the service. Format: + ``projects/{project_id}/locations/{location}/serviceConfig``. + resource_type (str): + Required. The resource type to which the + default service config will be applied. Examples + include, "compute.googleapis.com/Instance" and + "storage.googleapis.com/Bucket". + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class InitializeServiceResponse(proto.Message): + r"""Response message for initializing the service. + + Attributes: + backup_vault_name (str): + The resource name of the default ``BackupVault`` created. + Format: + ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}``. + backup_plan_name (str): + The resource name of the default ``BackupPlan`` created. + Format: + ``projects/{project_id}/locations/{location}/backupPlans/{backup_plan_id}``. + """ + + backup_vault_name: str = proto.Field( + proto.STRING, + number=1, + ) + backup_plan_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. @@ -570,9 +647,11 @@ class OperationMetadata(proto.Message): requested_cancellation (bool): Output only. Identifies whether the user has requested cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to 'Code.CANCELLED'. + successfully been cancelled have + [google.longrunning.Operation.error][google.longrunning.Operation.error] + value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to 'Code.CANCELLED'. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py index ca0eabb6d9f1..afbd52562729 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py @@ -76,8 +76,9 @@ class BackupPlan(proto.Message): resource_type (str): Required. The resource type to which the ``BackupPlan`` will be applied. Examples include, - "compute.googleapis.com/Instance" and - "storage.googleapis.com/Bucket". + "compute.googleapis.com/Instance", + "sqladmin.googleapis.com/Instance", or + "alloydb.googleapis.com/Cluster". etag (str): Optional. ``etag`` is returned from the service in the response. As a user of the service, you may provide an etag @@ -187,6 +188,14 @@ class BackupRule(proto.Message): “days”. The value should be greater than or equal to minimum enforced retention of the backup vault. + + Minimum value is 1 and maximum value is 90 for + hourly backups. Minimum value is 1 and maximum + value is 90 for daily backups. Minimum value is + 7 and maximum value is 186 for weekly backups. + Minimum value is 30 and maximum value is 732 for + monthly backups. Minimum value is 365 and + maximum value is 36159 for yearly backups. standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): Required. Defines a schedule that runs within the confines of a defined window of time. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py index 23a4309a3fd8..0ba462095b1d 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py @@ -48,8 +48,8 @@ class BackupPlanAssociation(proto.Message): projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId} resource_type (str): - Optional. Resource type of workload on which - backupplan is applied + Required. Immutable. Resource type of + workload on which backupplan is applied resource (str): Required. Immutable. Resource name of workload on which backupplan is applied @@ -71,11 +71,9 @@ class BackupPlanAssociation(proto.Message): Output only. The config info related to backup rules. data_source (str): - Output only. Output Only. - - Resource name of data source which will be used - as storage location for backups taken. - Format : + Output only. Resource name of data source + which will be used as storage location for + backups taken. Format : projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} """ @@ -150,16 +148,13 @@ class RuleConfigInfo(proto.Message): Attributes: rule_id (str): - Output only. Output Only. - - Backup Rule id fetched from backup plan. + Output only. Backup Rule id fetched from + backup plan. last_backup_state (google.cloud.backupdr_v1.types.RuleConfigInfo.LastBackupState): Output only. The last backup state for rule. last_backup_error (google.rpc.status_pb2.Status): - Output only. Output Only. - - google.rpc.Status object to store the last - backup error. + Output only. google.rpc.Status object to + store the last backup error. last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The point in time when the last successful backup was captured from the source. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py index ced3cd195702..5e953af015f2 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -199,9 +199,8 @@ class BackupVault(proto.Message): Output only. Total size of the storage used by all backup resources. uid (str): - Output only. Output only - Immutable after resource creation until resource - deletion. + Output only. Immutable after resource + creation until resource deletion. annotations (MutableMapping[str, str]): Optional. User annotations. See https://google.aip.dev/128#annotations Stores @@ -210,8 +209,6 @@ class BackupVault(proto.Message): Optional. Note: This field is added for future use case and will not be supported in the current release. - Optional. - Access restriction for the backup vault. Default value is WITHIN_ORGANIZATION if not provided during creation. """ @@ -244,7 +241,9 @@ class AccessRestriction(proto.Enum): Values: ACCESS_RESTRICTION_UNSPECIFIED (0): - Access restriction not set. + Access restriction not set. If user does not provide any + value or pass this value, it will be changed to + WITHIN_ORGANIZATION. WITHIN_PROJECT (1): Access to or from resources outside your current project will be denied. @@ -253,11 +252,16 @@ class AccessRestriction(proto.Enum): current organization will be denied. UNRESTRICTED (3): No access restriction. + WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA (4): + Access to or from resources outside your + current organization will be denied except for + backup appliance. """ ACCESS_RESTRICTION_UNSPECIFIED = 0 WITHIN_PROJECT = 1 WITHIN_ORGANIZATION = 2 UNRESTRICTED = 3 + WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA = 4 name: str = proto.Field( proto.STRING, @@ -1532,6 +1536,11 @@ class DeleteBackupVaultRequest(proto.Message): Optional. If true and the BackupVault is not found, the request will succeed but no action will be taken. + ignore_backup_plan_references (bool): + Optional. If set to true, backupvault + deletion will proceed even if there are backup + plans referencing the backupvault. The default + is 'false'. """ name: str = proto.Field( @@ -1558,6 +1567,10 @@ class DeleteBackupVaultRequest(proto.Message): proto.BOOL, number=6, ) + ignore_backup_plan_references: bool = proto.Field( + proto.BOOL, + number=7, + ) class ListDataSourcesRequest(proto.Message): diff --git a/packages/google-cloud-backupdr/noxfile.py b/packages/google-cloud-backupdr/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-backupdr/noxfile.py +++ b/packages/google-cloud-backupdr/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py index 25dbf9cca081..d60bb539a629 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py @@ -40,6 +40,7 @@ async def sample_create_backup_plan_association(): # Initialize request argument(s) backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" backup_plan_association.resource = "resource_value" backup_plan_association.backup_plan = "backup_plan_value" diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py index fc82ca77f706..2c439946c9a7 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py @@ -40,6 +40,7 @@ def sample_create_backup_plan_association(): # Initialize request argument(s) backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" backup_plan_association.resource = "resource_value" backup_plan_association.backup_plan = "backup_plan_value" diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py new file mode 100644 index 000000000000..c938b4515018 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_InitializeService_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_initialize_service(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + # Make the request + operation = client.initialize_service(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_InitializeService_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py new file mode 100644 index 000000000000..8bd114b067c2 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for InitializeService +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_InitializeService_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_initialize_service(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + # Make the request + operation = client.initialize_service(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_InitializeService_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index 8963d463b241..b4fb12439766 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.1.7" + "version": "0.2.1" }, "snippets": [ { @@ -68,12 +68,12 @@ "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async", "segments": [ { - "end": 61, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 62, "start": 27, "type": "SHORT" }, @@ -83,18 +83,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -156,12 +156,12 @@ "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync", "segments": [ { - "end": 61, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 62, "start": 27, "type": "SHORT" }, @@ -171,18 +171,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -2651,6 +2651,159 @@ ], "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.initialize_service", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "InitializeService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "initialize_service" + }, + "description": "Sample for InitializeService", + "file": "backupdr_v1_generated_backup_dr_initialize_service_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_initialize_service_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.initialize_service", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "InitializeService" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "initialize_service" + }, + "description": "Sample for InitializeService", + "file": "backupdr_v1_generated_backup_dr_initialize_service_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_initialize_service_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index c0dd15568f46..a3de29cabf96 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -46,7 +46,7 @@ class backupdrCallTransformer(cst.CSTTransformer): 'delete_backup': ('name', 'request_id', ), 'delete_backup_plan': ('name', 'request_id', ), 'delete_backup_plan_association': ('name', 'request_id', ), - 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', ), + 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', 'ignore_backup_plan_references', ), 'delete_management_server': ('name', 'request_id', ), 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'get_backup': ('name', 'view', ), @@ -55,6 +55,7 @@ class backupdrCallTransformer(cst.CSTTransformer): 'get_backup_vault': ('name', 'view', ), 'get_data_source': ('name', ), 'get_management_server': ('name', ), + 'initialize_service': ('name', 'resource_type', 'request_id', ), 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 80493a625975..cf9dc3302b77 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -90,6 +90,13 @@ backupvault_gce, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BackupDRClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BackupDRClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -11911,6 +11961,267 @@ async def test_trigger_backup_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + backupdr.InitializeServiceRequest, + dict, + ], +) +def test_initialize_service(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupdr.InitializeServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_initialize_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupdr.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.initialize_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupdr.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + +def test_initialize_service_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.initialize_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.initialize_service + ] = mock_rpc + request = {} + client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_initialize_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.initialize_service + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.initialize_service + ] = mock_rpc + + request = {} + await client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.initialize_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_initialize_service_async( + transport: str = "grpc_asyncio", request_type=backupdr.InitializeServiceRequest +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupdr.InitializeServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_initialize_service_async_from_dict(): + await test_initialize_service_async(request_type=dict) + + +def test_initialize_service_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupdr.InitializeServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_initialize_service_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupdr.InitializeServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + def test_list_management_servers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13991,6 +14302,7 @@ def test_delete_backup_vault_rest_required_fields( "allow_missing", "etag", "force", + "ignore_backup_plan_references", "request_id", "validate_only", ) @@ -14052,6 +14364,7 @@ def test_delete_backup_vault_rest_unset_required_fields(): "allowMissing", "etag", "force", + "ignoreBackupPlanReferences", "requestId", "validateOnly", ) @@ -17690,25 +18003,163 @@ def test_trigger_backup_rest_flattened_error(transport: str = "rest"): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_initialize_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BackupDRClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.initialize_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.initialize_service + ] = mock_rpc + + request = {} + client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_initialize_service_rest_required_fields( + request_type=backupdr.InitializeServiceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request_init["resource_type"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).initialize_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["resourceType"] = "resource_type_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).initialize_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == "resource_type_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.initialize_service(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_initialize_service_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.initialize_service._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "resourceType", + ) + ) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDRClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) # It is an error to provide an api_key and a transport instance. @@ -18399,6 +18850,29 @@ def test_trigger_backup_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_initialize_service_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.InitializeServiceRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = BackupDRAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -19194,6 +19668,33 @@ async def test_trigger_backup_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_initialize_service_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.initialize_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.InitializeServiceRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = BackupDRClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -19283,10 +19784,13 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_list_management_servers" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_list_management_servers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupdr.ListManagementServersRequest.pb( backupdr.ListManagementServersRequest() ) @@ -19312,6 +19816,10 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupdr.ListManagementServersResponse() + post_with_metadata.return_value = ( + backupdr.ListManagementServersResponse(), + metadata, + ) client.list_management_servers( request, @@ -19323,6 +19831,7 @@ def test_list_management_servers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_management_server_rest_bad_request( @@ -19423,10 +19932,13 @@ def test_get_management_server_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_get_management_server" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_get_management_server" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupdr.GetManagementServerRequest.pb( backupdr.GetManagementServerRequest() ) @@ -19450,6 +19962,7 @@ def test_get_management_server_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupdr.ManagementServer() + post_with_metadata.return_value = backupdr.ManagementServer(), metadata client.get_management_server( request, @@ -19461,6 +19974,7 @@ def test_get_management_server_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_management_server_rest_bad_request( @@ -19630,10 +20144,14 @@ def test_create_management_server_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_create_management_server" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_create_management_server_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_create_management_server" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupdr.CreateManagementServerRequest.pb( backupdr.CreateManagementServerRequest() ) @@ -19657,6 +20175,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_management_server( request, @@ -19668,6 +20187,7 @@ def test_create_management_server_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_management_server_rest_bad_request( @@ -19750,10 +20270,14 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_delete_management_server" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_delete_management_server_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_delete_management_server" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupdr.DeleteManagementServerRequest.pb( backupdr.DeleteManagementServerRequest() ) @@ -19777,6 +20301,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_management_server( request, @@ -19788,6 +20313,7 @@ def test_delete_management_server_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_vault_rest_bad_request( @@ -19951,10 +20477,13 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_create_backup_vault" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_create_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.CreateBackupVaultRequest.pb( backupvault.CreateBackupVaultRequest() ) @@ -19978,6 +20507,7 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup_vault( request, @@ -19989,6 +20519,7 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backup_vaults_rest_bad_request( @@ -20073,10 +20604,13 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_list_backup_vaults" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_list_backup_vaults" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.ListBackupVaultsRequest.pb( backupvault.ListBackupVaultsRequest() ) @@ -20102,6 +20636,10 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.ListBackupVaultsResponse() + post_with_metadata.return_value = ( + backupvault.ListBackupVaultsResponse(), + metadata, + ) client.list_backup_vaults( request, @@ -20113,6 +20651,7 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_usable_backup_vaults_rest_bad_request( @@ -20197,10 +20736,14 @@ def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_fetch_usable_backup_vaults_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( backupvault.FetchUsableBackupVaultsRequest() ) @@ -20226,6 +20769,10 @@ def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.FetchUsableBackupVaultsResponse() + post_with_metadata.return_value = ( + backupvault.FetchUsableBackupVaultsResponse(), + metadata, + ) client.fetch_usable_backup_vaults( request, @@ -20237,6 +20784,7 @@ def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_vault_rest_bad_request( @@ -20340,10 +20888,13 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_get_backup_vault" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_get_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.GetBackupVaultRequest.pb( backupvault.GetBackupVaultRequest() ) @@ -20367,6 +20918,7 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.BackupVault() + post_with_metadata.return_value = backupvault.BackupVault(), metadata client.get_backup_vault( request, @@ -20378,6 +20930,7 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_vault_rest_bad_request( @@ -20549,10 +21102,13 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_update_backup_vault" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_update_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.UpdateBackupVaultRequest.pb( backupvault.UpdateBackupVaultRequest() ) @@ -20576,6 +21132,7 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup_vault( request, @@ -20587,6 +21144,7 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_vault_rest_bad_request( @@ -20665,10 +21223,13 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_delete_backup_vault" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_delete_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.DeleteBackupVaultRequest.pb( backupvault.DeleteBackupVaultRequest() ) @@ -20692,6 +21253,7 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup_vault( request, @@ -20703,6 +21265,7 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_sources_rest_bad_request( @@ -20787,10 +21350,13 @@ def test_list_data_sources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_list_data_sources" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_data_sources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_list_data_sources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.ListDataSourcesRequest.pb( backupvault.ListDataSourcesRequest() ) @@ -20816,6 +21382,10 @@ def test_list_data_sources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.ListDataSourcesResponse() + post_with_metadata.return_value = ( + backupvault.ListDataSourcesResponse(), + metadata, + ) client.list_data_sources( request, @@ -20827,6 +21397,7 @@ def test_list_data_sources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_source_rest_bad_request( @@ -20923,10 +21494,13 @@ def test_get_data_source_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_get_data_source" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_data_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_get_data_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.GetDataSourceRequest.pb( backupvault.GetDataSourceRequest() ) @@ -20950,6 +21524,7 @@ def test_get_data_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.DataSource() + post_with_metadata.return_value = backupvault.DataSource(), metadata client.get_data_source( request, @@ -20961,6 +21536,7 @@ def test_get_data_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_source_rest_bad_request( @@ -21178,10 +21754,13 @@ def test_update_data_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_update_data_source" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_update_data_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.UpdateDataSourceRequest.pb( backupvault.UpdateDataSourceRequest() ) @@ -21205,6 +21784,7 @@ def test_update_data_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_data_source( request, @@ -21216,6 +21796,7 @@ def test_update_data_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsRequest): @@ -21302,10 +21883,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -21329,6 +21913,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.ListBackupsResponse() + post_with_metadata.return_value = backupvault.ListBackupsResponse(), metadata client.list_backups( request, @@ -21340,6 +21925,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=backupvault.GetBackupRequest): @@ -21434,10 +22020,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -21459,6 +22048,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupvault.Backup() + post_with_metadata.return_value = backupvault.Backup(), metadata client.get_backup( request, @@ -21470,6 +22060,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_rest_bad_request(request_type=backupvault.UpdateBackupRequest): @@ -21762,10 +22353,13 @@ def test_update_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_update_backup" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.UpdateBackupRequest.pb( backupvault.UpdateBackupRequest() ) @@ -21789,6 +22383,7 @@ def test_update_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup( request, @@ -21800,6 +22395,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=backupvault.DeleteBackupRequest): @@ -21880,10 +22476,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.DeleteBackupRequest.pb( backupvault.DeleteBackupRequest() ) @@ -21907,6 +22506,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -21918,6 +22518,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_backup_rest_bad_request(request_type=backupvault.RestoreBackupRequest): @@ -21998,10 +22599,13 @@ def test_restore_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_restore_backup" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_restore_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_restore_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupvault.RestoreBackupRequest.pb( backupvault.RestoreBackupRequest() ) @@ -22025,6 +22629,7 @@ def test_restore_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_backup( request, @@ -22036,6 +22641,7 @@ def test_restore_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_plan_rest_bad_request( @@ -22212,10 +22818,13 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_create_backup_plan" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_create_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplan.CreateBackupPlanRequest.pb( backupplan.CreateBackupPlanRequest() ) @@ -22239,6 +22848,7 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup_plan( request, @@ -22250,6 +22860,7 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_plan_rest_bad_request(request_type=backupplan.GetBackupPlanRequest): @@ -22342,10 +22953,13 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_get_backup_plan" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_get_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplan.GetBackupPlanRequest.pb( backupplan.GetBackupPlanRequest() ) @@ -22369,6 +22983,7 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupplan.BackupPlan() + post_with_metadata.return_value = backupplan.BackupPlan(), metadata client.get_backup_plan( request, @@ -22380,6 +22995,7 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backup_plans_rest_bad_request( @@ -22464,10 +23080,13 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_list_backup_plans" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_plans_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_list_backup_plans" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplan.ListBackupPlansRequest.pb( backupplan.ListBackupPlansRequest() ) @@ -22493,6 +23112,7 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupplan.ListBackupPlansResponse() + post_with_metadata.return_value = backupplan.ListBackupPlansResponse(), metadata client.list_backup_plans( request, @@ -22504,6 +23124,7 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_plan_rest_bad_request( @@ -22582,10 +23203,13 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_delete_backup_plan" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_delete_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplan.DeleteBackupPlanRequest.pb( backupplan.DeleteBackupPlanRequest() ) @@ -22609,6 +23233,7 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup_plan( request, @@ -22620,6 +23245,7 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_plan_association_rest_bad_request( @@ -22796,10 +23422,14 @@ def test_create_backup_plan_association_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_create_backup_plan_association" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_create_backup_plan_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( backupplanassociation.CreateBackupPlanAssociationRequest() ) @@ -22823,6 +23453,7 @@ def test_create_backup_plan_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup_plan_association( request, @@ -22834,6 +23465,7 @@ def test_create_backup_plan_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_plan_association_rest_bad_request( @@ -22930,10 +23562,14 @@ def test_get_backup_plan_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_get_backup_plan_association" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_get_backup_plan_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( backupplanassociation.GetBackupPlanAssociationRequest() ) @@ -22959,6 +23595,10 @@ def test_get_backup_plan_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupplanassociation.BackupPlanAssociation() + post_with_metadata.return_value = ( + backupplanassociation.BackupPlanAssociation(), + metadata, + ) client.get_backup_plan_association( request, @@ -22970,6 +23610,7 @@ def test_get_backup_plan_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backup_plan_associations_rest_bad_request( @@ -23056,10 +23697,14 @@ def test_list_backup_plan_associations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_list_backup_plan_associations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( backupplanassociation.ListBackupPlanAssociationsRequest() ) @@ -23085,6 +23730,10 @@ def test_list_backup_plan_associations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + post_with_metadata.return_value = ( + backupplanassociation.ListBackupPlanAssociationsResponse(), + metadata, + ) client.list_backup_plan_associations( request, @@ -23096,6 +23745,7 @@ def test_list_backup_plan_associations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_plan_association_rest_bad_request( @@ -23178,10 +23828,14 @@ def test_delete_backup_plan_association_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_delete_backup_plan_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( backupplanassociation.DeleteBackupPlanAssociationRequest() ) @@ -23205,6 +23859,7 @@ def test_delete_backup_plan_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup_plan_association( request, @@ -23216,6 +23871,7 @@ def test_delete_backup_plan_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_trigger_backup_rest_bad_request( @@ -23298,10 +23954,13 @@ def test_trigger_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupDRRestInterceptor, "post_trigger_backup" ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_trigger_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupDRRestInterceptor, "pre_trigger_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = backupplanassociation.TriggerBackupRequest.pb( backupplanassociation.TriggerBackupRequest() ) @@ -23325,6 +23984,7 @@ def test_trigger_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.trigger_backup( request, @@ -23336,6 +23996,128 @@ def test_trigger_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_service_rest_bad_request( + request_type=backupdr.InitializeServiceRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/serviceConfig"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.initialize_service(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.InitializeServiceRequest, + dict, + ], +) +def test_initialize_service_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/serviceConfig"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.initialize_service(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_initialize_service_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_initialize_service" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_initialize_service_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_initialize_service" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupdr.InitializeServiceRequest.pb( + backupdr.InitializeServiceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backupdr.InitializeServiceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.initialize_service( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): @@ -24484,6 +25266,28 @@ def test_trigger_backup_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_initialize_service_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + client.initialize_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.InitializeServiceRequest() + + assert args[0] == request_msg + + def test_backup_dr_rest_lro_client(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -24561,6 +25365,7 @@ def test_backup_dr_base_transport(): "list_backup_plan_associations", "delete_backup_plan_association", "trigger_backup", + "initialize_service", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -24911,6 +25716,9 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.trigger_backup._session session2 = client2.transport.trigger_backup._session assert session1 != session2 + session1 = client1.transport.initialize_service._session + session2 = client2.transport.initialize_service._session + assert session1 != session2 def test_backup_dr_grpc_transport_channel(): diff --git a/packages/google-cloud-bare-metal-solution/CHANGELOG.md b/packages/google-cloud-bare-metal-solution/CHANGELOG.md index 343674d93cad..8e3e2ff8e942 100644 --- a/packages/google-cloud-bare-metal-solution/CHANGELOG.md +++ b/packages/google-cloud-bare-metal-solution/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bare-metal-solution-v1.9.0...google-cloud-bare-metal-solution-v1.10.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bare-metal-solution-v1.8.1...google-cloud-bare-metal-solution-v1.9.0) (2024-12-12) diff --git a/packages/google-cloud-bare-metal-solution/README.rst b/packages/google-cloud-bare-metal-solution/README.rst index 6d7b0c92ef0a..1c42ac8c58d8 100644 --- a/packages/google-cloud-bare-metal-solution/README.rst +++ b/packages/google-cloud-bare-metal-solution/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Bare Metal Solution.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Bare Metal Solution.: https://cloud.google.com/bare-metal/docs -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py index 8c0b957907e4..a36d016965c9 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -853,6 +855,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -6292,16 +6321,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -6347,16 +6380,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py index 9f1bd00eefd2..b09bfde6b4e0 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py @@ -457,12 +457,35 @@ def post_create_nfs_share( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_nfs_share - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_nfs_share_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_create_nfs_share` interceptor runs + before the `post_create_nfs_share_with_metadata` interceptor. """ return response + def post_create_nfs_share_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_nfs_share + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_create_nfs_share_with_metadata` + interceptor in new development instead of the `post_create_nfs_share` interceptor. + When both interceptors are used, this `post_create_nfs_share_with_metadata` interceptor runs after the + `post_create_nfs_share` interceptor. The (possibly modified) response returned by + `post_create_nfs_share` will be passed to + `post_create_nfs_share_with_metadata`. + """ + return response, metadata + def pre_create_provisioning_config( self, request: provisioning.CreateProvisioningConfigRequest, @@ -483,12 +506,37 @@ def post_create_provisioning_config( ) -> provisioning.ProvisioningConfig: """Post-rpc interceptor for create_provisioning_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_provisioning_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_create_provisioning_config` interceptor runs + before the `post_create_provisioning_config_with_metadata` interceptor. """ return response + def post_create_provisioning_config_with_metadata( + self, + response: provisioning.ProvisioningConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + provisioning.ProvisioningConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_provisioning_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_create_provisioning_config_with_metadata` + interceptor in new development instead of the `post_create_provisioning_config` interceptor. + When both interceptors are used, this `post_create_provisioning_config_with_metadata` interceptor runs after the + `post_create_provisioning_config` interceptor. The (possibly modified) response returned by + `post_create_provisioning_config` will be passed to + `post_create_provisioning_config_with_metadata`. + """ + return response, metadata + def pre_create_ssh_key( self, request: gcb_ssh_key.CreateSSHKeyRequest, @@ -506,12 +554,35 @@ def pre_create_ssh_key( def post_create_ssh_key(self, response: gcb_ssh_key.SSHKey) -> gcb_ssh_key.SSHKey: """Post-rpc interceptor for create_ssh_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_ssh_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_create_ssh_key` interceptor runs + before the `post_create_ssh_key_with_metadata` interceptor. """ return response + def post_create_ssh_key_with_metadata( + self, + response: gcb_ssh_key.SSHKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcb_ssh_key.SSHKey, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_ssh_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_create_ssh_key_with_metadata` + interceptor in new development instead of the `post_create_ssh_key` interceptor. + When both interceptors are used, this `post_create_ssh_key_with_metadata` interceptor runs after the + `post_create_ssh_key` interceptor. The (possibly modified) response returned by + `post_create_ssh_key` will be passed to + `post_create_ssh_key_with_metadata`. + """ + return response, metadata + def pre_create_volume_snapshot( self, request: gcb_volume_snapshot.CreateVolumeSnapshotRequest, @@ -532,12 +603,37 @@ def post_create_volume_snapshot( ) -> gcb_volume_snapshot.VolumeSnapshot: """Post-rpc interceptor for create_volume_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_volume_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_create_volume_snapshot` interceptor runs + before the `post_create_volume_snapshot_with_metadata` interceptor. """ return response + def post_create_volume_snapshot_with_metadata( + self, + response: gcb_volume_snapshot.VolumeSnapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcb_volume_snapshot.VolumeSnapshot, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_volume_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_create_volume_snapshot_with_metadata` + interceptor in new development instead of the `post_create_volume_snapshot` interceptor. + When both interceptors are used, this `post_create_volume_snapshot_with_metadata` interceptor runs after the + `post_create_volume_snapshot` interceptor. The (possibly modified) response returned by + `post_create_volume_snapshot` will be passed to + `post_create_volume_snapshot_with_metadata`. + """ + return response, metadata + def pre_delete_nfs_share( self, request: nfs_share.DeleteNfsShareRequest, @@ -557,12 +653,35 @@ def post_delete_nfs_share( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_nfs_share - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_nfs_share_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_delete_nfs_share` interceptor runs + before the `post_delete_nfs_share_with_metadata` interceptor. """ return response + def post_delete_nfs_share_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_nfs_share + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_delete_nfs_share_with_metadata` + interceptor in new development instead of the `post_delete_nfs_share` interceptor. + When both interceptors are used, this `post_delete_nfs_share_with_metadata` interceptor runs after the + `post_delete_nfs_share` interceptor. The (possibly modified) response returned by + `post_delete_nfs_share` will be passed to + `post_delete_nfs_share_with_metadata`. + """ + return response, metadata + def pre_delete_ssh_key( self, request: ssh_key.DeleteSSHKeyRequest, @@ -607,12 +726,35 @@ def post_detach_lun( ) -> operations_pb2.Operation: """Post-rpc interceptor for detach_lun - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_lun_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_detach_lun` interceptor runs + before the `post_detach_lun_with_metadata` interceptor. """ return response + def post_detach_lun_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detach_lun + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_detach_lun_with_metadata` + interceptor in new development instead of the `post_detach_lun` interceptor. + When both interceptors are used, this `post_detach_lun_with_metadata` interceptor runs after the + `post_detach_lun` interceptor. The (possibly modified) response returned by + `post_detach_lun` will be passed to + `post_detach_lun_with_metadata`. + """ + return response, metadata + def pre_disable_interactive_serial_console( self, request: instance.DisableInteractiveSerialConsoleRequest, @@ -633,12 +775,35 @@ def post_disable_interactive_serial_console( ) -> operations_pb2.Operation: """Post-rpc interceptor for disable_interactive_serial_console - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_interactive_serial_console_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_disable_interactive_serial_console` interceptor runs + before the `post_disable_interactive_serial_console_with_metadata` interceptor. """ return response + def post_disable_interactive_serial_console_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_interactive_serial_console + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_disable_interactive_serial_console_with_metadata` + interceptor in new development instead of the `post_disable_interactive_serial_console` interceptor. + When both interceptors are used, this `post_disable_interactive_serial_console_with_metadata` interceptor runs after the + `post_disable_interactive_serial_console` interceptor. The (possibly modified) response returned by + `post_disable_interactive_serial_console` will be passed to + `post_disable_interactive_serial_console_with_metadata`. + """ + return response, metadata + def pre_enable_interactive_serial_console( self, request: instance.EnableInteractiveSerialConsoleRequest, @@ -659,12 +824,35 @@ def post_enable_interactive_serial_console( ) -> operations_pb2.Operation: """Post-rpc interceptor for enable_interactive_serial_console - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_interactive_serial_console_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_enable_interactive_serial_console` interceptor runs + before the `post_enable_interactive_serial_console_with_metadata` interceptor. """ return response + def post_enable_interactive_serial_console_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_interactive_serial_console + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_enable_interactive_serial_console_with_metadata` + interceptor in new development instead of the `post_enable_interactive_serial_console` interceptor. + When both interceptors are used, this `post_enable_interactive_serial_console_with_metadata` interceptor runs after the + `post_enable_interactive_serial_console` interceptor. The (possibly modified) response returned by + `post_enable_interactive_serial_console` will be passed to + `post_enable_interactive_serial_console_with_metadata`. + """ + return response, metadata + def pre_evict_lun( self, request: lun.EvictLunRequest, @@ -682,12 +870,35 @@ def post_evict_lun( ) -> operations_pb2.Operation: """Post-rpc interceptor for evict_lun - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_evict_lun_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_evict_lun` interceptor runs + before the `post_evict_lun_with_metadata` interceptor. """ return response + def post_evict_lun_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for evict_lun + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_evict_lun_with_metadata` + interceptor in new development instead of the `post_evict_lun` interceptor. + When both interceptors are used, this `post_evict_lun_with_metadata` interceptor runs after the + `post_evict_lun` interceptor. The (possibly modified) response returned by + `post_evict_lun` will be passed to + `post_evict_lun_with_metadata`. + """ + return response, metadata + def pre_evict_volume( self, request: volume.EvictVolumeRequest, @@ -705,12 +916,35 @@ def post_evict_volume( ) -> operations_pb2.Operation: """Post-rpc interceptor for evict_volume - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_evict_volume_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_evict_volume` interceptor runs + before the `post_evict_volume_with_metadata` interceptor. """ return response + def post_evict_volume_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for evict_volume + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_evict_volume_with_metadata` + interceptor in new development instead of the `post_evict_volume` interceptor. + When both interceptors are used, this `post_evict_volume_with_metadata` interceptor runs after the + `post_evict_volume` interceptor. The (possibly modified) response returned by + `post_evict_volume` will be passed to + `post_evict_volume_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: instance.GetInstanceRequest, @@ -726,12 +960,35 @@ def pre_get_instance( def post_get_instance(self, response: instance.Instance) -> instance.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: instance.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[instance.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_lun( self, request: lun.GetLunRequest, @@ -747,12 +1004,33 @@ def pre_get_lun( def post_get_lun(self, response: lun.Lun) -> lun.Lun: """Post-rpc interceptor for get_lun - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_lun_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_lun` interceptor runs + before the `post_get_lun_with_metadata` interceptor. """ return response + def post_get_lun_with_metadata( + self, response: lun.Lun, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[lun.Lun, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_lun + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_lun_with_metadata` + interceptor in new development instead of the `post_get_lun` interceptor. + When both interceptors are used, this `post_get_lun_with_metadata` interceptor runs after the + `post_get_lun` interceptor. The (possibly modified) response returned by + `post_get_lun` will be passed to + `post_get_lun_with_metadata`. + """ + return response, metadata + def pre_get_network( self, request: network.GetNetworkRequest, @@ -768,12 +1046,35 @@ def pre_get_network( def post_get_network(self, response: network.Network) -> network.Network: """Post-rpc interceptor for get_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_network` interceptor runs + before the `post_get_network_with_metadata` interceptor. """ return response + def post_get_network_with_metadata( + self, + response: network.Network, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[network.Network, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_network_with_metadata` + interceptor in new development instead of the `post_get_network` interceptor. + When both interceptors are used, this `post_get_network_with_metadata` interceptor runs after the + `post_get_network` interceptor. The (possibly modified) response returned by + `post_get_network` will be passed to + `post_get_network_with_metadata`. + """ + return response, metadata + def pre_get_nfs_share( self, request: nfs_share.GetNfsShareRequest, @@ -789,12 +1090,35 @@ def pre_get_nfs_share( def post_get_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: """Post-rpc interceptor for get_nfs_share - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_nfs_share_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_nfs_share` interceptor runs + before the `post_get_nfs_share_with_metadata` interceptor. """ return response + def post_get_nfs_share_with_metadata( + self, + response: nfs_share.NfsShare, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[nfs_share.NfsShare, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_nfs_share + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_nfs_share_with_metadata` + interceptor in new development instead of the `post_get_nfs_share` interceptor. + When both interceptors are used, this `post_get_nfs_share_with_metadata` interceptor runs after the + `post_get_nfs_share` interceptor. The (possibly modified) response returned by + `post_get_nfs_share` will be passed to + `post_get_nfs_share_with_metadata`. + """ + return response, metadata + def pre_get_provisioning_config( self, request: provisioning.GetProvisioningConfigRequest, @@ -815,12 +1139,37 @@ def post_get_provisioning_config( ) -> provisioning.ProvisioningConfig: """Post-rpc interceptor for get_provisioning_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_provisioning_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_provisioning_config` interceptor runs + before the `post_get_provisioning_config_with_metadata` interceptor. """ return response + def post_get_provisioning_config_with_metadata( + self, + response: provisioning.ProvisioningConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + provisioning.ProvisioningConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_provisioning_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_provisioning_config_with_metadata` + interceptor in new development instead of the `post_get_provisioning_config` interceptor. + When both interceptors are used, this `post_get_provisioning_config_with_metadata` interceptor runs after the + `post_get_provisioning_config` interceptor. The (possibly modified) response returned by + `post_get_provisioning_config` will be passed to + `post_get_provisioning_config_with_metadata`. + """ + return response, metadata + def pre_get_volume( self, request: volume.GetVolumeRequest, @@ -836,12 +1185,33 @@ def pre_get_volume( def post_get_volume(self, response: volume.Volume) -> volume.Volume: """Post-rpc interceptor for get_volume - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_volume_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_volume` interceptor runs + before the `post_get_volume_with_metadata` interceptor. """ return response + def post_get_volume_with_metadata( + self, response: volume.Volume, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[volume.Volume, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_volume + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_volume_with_metadata` + interceptor in new development instead of the `post_get_volume` interceptor. + When both interceptors are used, this `post_get_volume_with_metadata` interceptor runs after the + `post_get_volume` interceptor. The (possibly modified) response returned by + `post_get_volume` will be passed to + `post_get_volume_with_metadata`. + """ + return response, metadata + def pre_get_volume_snapshot( self, request: volume_snapshot.GetVolumeSnapshotRequest, @@ -862,12 +1232,35 @@ def post_get_volume_snapshot( ) -> volume_snapshot.VolumeSnapshot: """Post-rpc interceptor for get_volume_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_volume_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_get_volume_snapshot` interceptor runs + before the `post_get_volume_snapshot_with_metadata` interceptor. """ return response + def post_get_volume_snapshot_with_metadata( + self, + response: volume_snapshot.VolumeSnapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[volume_snapshot.VolumeSnapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_volume_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_get_volume_snapshot_with_metadata` + interceptor in new development instead of the `post_get_volume_snapshot` interceptor. + When both interceptors are used, this `post_get_volume_snapshot_with_metadata` interceptor runs after the + `post_get_volume_snapshot` interceptor. The (possibly modified) response returned by + `post_get_volume_snapshot` will be passed to + `post_get_volume_snapshot_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: instance.ListInstancesRequest, @@ -885,12 +1278,35 @@ def post_list_instances( ) -> instance.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: instance.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[instance.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_list_luns( self, request: lun.ListLunsRequest, @@ -906,12 +1322,35 @@ def pre_list_luns( def post_list_luns(self, response: lun.ListLunsResponse) -> lun.ListLunsResponse: """Post-rpc interceptor for list_luns - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_luns_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_luns` interceptor runs + before the `post_list_luns_with_metadata` interceptor. """ return response + def post_list_luns_with_metadata( + self, + response: lun.ListLunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lun.ListLunsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_luns + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_luns_with_metadata` + interceptor in new development instead of the `post_list_luns` interceptor. + When both interceptors are used, this `post_list_luns_with_metadata` interceptor runs after the + `post_list_luns` interceptor. The (possibly modified) response returned by + `post_list_luns` will be passed to + `post_list_luns_with_metadata`. + """ + return response, metadata + def pre_list_networks( self, request: network.ListNetworksRequest, @@ -929,12 +1368,35 @@ def post_list_networks( ) -> network.ListNetworksResponse: """Post-rpc interceptor for list_networks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_networks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_networks` interceptor runs + before the `post_list_networks_with_metadata` interceptor. """ return response + def post_list_networks_with_metadata( + self, + response: network.ListNetworksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[network.ListNetworksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_networks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_networks_with_metadata` + interceptor in new development instead of the `post_list_networks` interceptor. + When both interceptors are used, this `post_list_networks_with_metadata` interceptor runs after the + `post_list_networks` interceptor. The (possibly modified) response returned by + `post_list_networks` will be passed to + `post_list_networks_with_metadata`. + """ + return response, metadata + def pre_list_network_usage( self, request: network.ListNetworkUsageRequest, @@ -954,12 +1416,37 @@ def post_list_network_usage( ) -> network.ListNetworkUsageResponse: """Post-rpc interceptor for list_network_usage - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_network_usage_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_network_usage` interceptor runs + before the `post_list_network_usage_with_metadata` interceptor. """ return response + def post_list_network_usage_with_metadata( + self, + response: network.ListNetworkUsageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + network.ListNetworkUsageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_network_usage + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_network_usage_with_metadata` + interceptor in new development instead of the `post_list_network_usage` interceptor. + When both interceptors are used, this `post_list_network_usage_with_metadata` interceptor runs after the + `post_list_network_usage` interceptor. The (possibly modified) response returned by + `post_list_network_usage` will be passed to + `post_list_network_usage_with_metadata`. + """ + return response, metadata + def pre_list_nfs_shares( self, request: nfs_share.ListNfsSharesRequest, @@ -977,12 +1464,37 @@ def post_list_nfs_shares( ) -> nfs_share.ListNfsSharesResponse: """Post-rpc interceptor for list_nfs_shares - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_nfs_shares_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_nfs_shares` interceptor runs + before the `post_list_nfs_shares_with_metadata` interceptor. """ return response + def post_list_nfs_shares_with_metadata( + self, + response: nfs_share.ListNfsSharesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + nfs_share.ListNfsSharesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_nfs_shares + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_nfs_shares_with_metadata` + interceptor in new development instead of the `post_list_nfs_shares` interceptor. + When both interceptors are used, this `post_list_nfs_shares_with_metadata` interceptor runs after the + `post_list_nfs_shares` interceptor. The (possibly modified) response returned by + `post_list_nfs_shares` will be passed to + `post_list_nfs_shares_with_metadata`. + """ + return response, metadata + def pre_list_os_images( self, request: osimage.ListOSImagesRequest, @@ -1000,12 +1512,35 @@ def post_list_os_images( ) -> osimage.ListOSImagesResponse: """Post-rpc interceptor for list_os_images - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_os_images_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_os_images` interceptor runs + before the `post_list_os_images_with_metadata` interceptor. """ return response + def post_list_os_images_with_metadata( + self, + response: osimage.ListOSImagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[osimage.ListOSImagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_os_images + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_os_images_with_metadata` + interceptor in new development instead of the `post_list_os_images` interceptor. + When both interceptors are used, this `post_list_os_images_with_metadata` interceptor runs after the + `post_list_os_images` interceptor. The (possibly modified) response returned by + `post_list_os_images` will be passed to + `post_list_os_images_with_metadata`. + """ + return response, metadata + def pre_list_provisioning_quotas( self, request: provisioning.ListProvisioningQuotasRequest, @@ -1026,12 +1561,38 @@ def post_list_provisioning_quotas( ) -> provisioning.ListProvisioningQuotasResponse: """Post-rpc interceptor for list_provisioning_quotas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_provisioning_quotas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_provisioning_quotas` interceptor runs + before the `post_list_provisioning_quotas_with_metadata` interceptor. """ return response + def post_list_provisioning_quotas_with_metadata( + self, + response: provisioning.ListProvisioningQuotasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + provisioning.ListProvisioningQuotasResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_provisioning_quotas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_provisioning_quotas_with_metadata` + interceptor in new development instead of the `post_list_provisioning_quotas` interceptor. + When both interceptors are used, this `post_list_provisioning_quotas_with_metadata` interceptor runs after the + `post_list_provisioning_quotas` interceptor. The (possibly modified) response returned by + `post_list_provisioning_quotas` will be passed to + `post_list_provisioning_quotas_with_metadata`. + """ + return response, metadata + def pre_list_ssh_keys( self, request: ssh_key.ListSSHKeysRequest, @@ -1049,12 +1610,35 @@ def post_list_ssh_keys( ) -> ssh_key.ListSSHKeysResponse: """Post-rpc interceptor for list_ssh_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_ssh_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_ssh_keys` interceptor runs + before the `post_list_ssh_keys_with_metadata` interceptor. """ return response + def post_list_ssh_keys_with_metadata( + self, + response: ssh_key.ListSSHKeysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ssh_key.ListSSHKeysResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_ssh_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_ssh_keys_with_metadata` + interceptor in new development instead of the `post_list_ssh_keys` interceptor. + When both interceptors are used, this `post_list_ssh_keys_with_metadata` interceptor runs after the + `post_list_ssh_keys` interceptor. The (possibly modified) response returned by + `post_list_ssh_keys` will be passed to + `post_list_ssh_keys_with_metadata`. + """ + return response, metadata + def pre_list_volumes( self, request: volume.ListVolumesRequest, @@ -1072,12 +1656,35 @@ def post_list_volumes( ) -> volume.ListVolumesResponse: """Post-rpc interceptor for list_volumes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_volumes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_volumes` interceptor runs + before the `post_list_volumes_with_metadata` interceptor. """ return response + def post_list_volumes_with_metadata( + self, + response: volume.ListVolumesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[volume.ListVolumesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_volumes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_volumes_with_metadata` + interceptor in new development instead of the `post_list_volumes` interceptor. + When both interceptors are used, this `post_list_volumes_with_metadata` interceptor runs after the + `post_list_volumes` interceptor. The (possibly modified) response returned by + `post_list_volumes` will be passed to + `post_list_volumes_with_metadata`. + """ + return response, metadata + def pre_list_volume_snapshots( self, request: volume_snapshot.ListVolumeSnapshotsRequest, @@ -1098,12 +1705,38 @@ def post_list_volume_snapshots( ) -> volume_snapshot.ListVolumeSnapshotsResponse: """Post-rpc interceptor for list_volume_snapshots - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_volume_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_list_volume_snapshots` interceptor runs + before the `post_list_volume_snapshots_with_metadata` interceptor. """ return response + def post_list_volume_snapshots_with_metadata( + self, + response: volume_snapshot.ListVolumeSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + volume_snapshot.ListVolumeSnapshotsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_volume_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_list_volume_snapshots_with_metadata` + interceptor in new development instead of the `post_list_volume_snapshots` interceptor. + When both interceptors are used, this `post_list_volume_snapshots_with_metadata` interceptor runs after the + `post_list_volume_snapshots` interceptor. The (possibly modified) response returned by + `post_list_volume_snapshots` will be passed to + `post_list_volume_snapshots_with_metadata`. + """ + return response, metadata + def pre_rename_instance( self, request: instance.RenameInstanceRequest, @@ -1119,12 +1752,35 @@ def pre_rename_instance( def post_rename_instance(self, response: instance.Instance) -> instance.Instance: """Post-rpc interceptor for rename_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_rename_instance` interceptor runs + before the `post_rename_instance_with_metadata` interceptor. """ return response + def post_rename_instance_with_metadata( + self, + response: instance.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[instance.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_rename_instance_with_metadata` + interceptor in new development instead of the `post_rename_instance` interceptor. + When both interceptors are used, this `post_rename_instance_with_metadata` interceptor runs after the + `post_rename_instance` interceptor. The (possibly modified) response returned by + `post_rename_instance` will be passed to + `post_rename_instance_with_metadata`. + """ + return response, metadata + def pre_rename_network( self, request: network.RenameNetworkRequest, @@ -1140,12 +1796,35 @@ def pre_rename_network( def post_rename_network(self, response: network.Network) -> network.Network: """Post-rpc interceptor for rename_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_rename_network` interceptor runs + before the `post_rename_network_with_metadata` interceptor. """ return response + def post_rename_network_with_metadata( + self, + response: network.Network, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[network.Network, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_rename_network_with_metadata` + interceptor in new development instead of the `post_rename_network` interceptor. + When both interceptors are used, this `post_rename_network_with_metadata` interceptor runs after the + `post_rename_network` interceptor. The (possibly modified) response returned by + `post_rename_network` will be passed to + `post_rename_network_with_metadata`. + """ + return response, metadata + def pre_rename_nfs_share( self, request: nfs_share.RenameNfsShareRequest, @@ -1163,12 +1842,35 @@ def pre_rename_nfs_share( def post_rename_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: """Post-rpc interceptor for rename_nfs_share - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_nfs_share_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_rename_nfs_share` interceptor runs + before the `post_rename_nfs_share_with_metadata` interceptor. """ return response + def post_rename_nfs_share_with_metadata( + self, + response: nfs_share.NfsShare, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[nfs_share.NfsShare, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_nfs_share + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_rename_nfs_share_with_metadata` + interceptor in new development instead of the `post_rename_nfs_share` interceptor. + When both interceptors are used, this `post_rename_nfs_share_with_metadata` interceptor runs after the + `post_rename_nfs_share` interceptor. The (possibly modified) response returned by + `post_rename_nfs_share` will be passed to + `post_rename_nfs_share_with_metadata`. + """ + return response, metadata + def pre_rename_volume( self, request: volume.RenameVolumeRequest, @@ -1184,12 +1886,33 @@ def pre_rename_volume( def post_rename_volume(self, response: volume.Volume) -> volume.Volume: """Post-rpc interceptor for rename_volume - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_volume_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_rename_volume` interceptor runs + before the `post_rename_volume_with_metadata` interceptor. """ return response + def post_rename_volume_with_metadata( + self, response: volume.Volume, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[volume.Volume, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_volume + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_rename_volume_with_metadata` + interceptor in new development instead of the `post_rename_volume` interceptor. + When both interceptors are used, this `post_rename_volume_with_metadata` interceptor runs after the + `post_rename_volume` interceptor. The (possibly modified) response returned by + `post_rename_volume` will be passed to + `post_rename_volume_with_metadata`. + """ + return response, metadata + def pre_reset_instance( self, request: instance.ResetInstanceRequest, @@ -1207,12 +1930,35 @@ def post_reset_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for reset_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reset_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_reset_instance` interceptor runs + before the `post_reset_instance_with_metadata` interceptor. """ return response + def post_reset_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reset_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_reset_instance_with_metadata` + interceptor in new development instead of the `post_reset_instance` interceptor. + When both interceptors are used, this `post_reset_instance_with_metadata` interceptor runs after the + `post_reset_instance` interceptor. The (possibly modified) response returned by + `post_reset_instance` will be passed to + `post_reset_instance_with_metadata`. + """ + return response, metadata + def pre_resize_volume( self, request: gcb_volume.ResizeVolumeRequest, @@ -1230,12 +1976,35 @@ def post_resize_volume( ) -> operations_pb2.Operation: """Post-rpc interceptor for resize_volume - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_volume_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_resize_volume` interceptor runs + before the `post_resize_volume_with_metadata` interceptor. """ return response + def post_resize_volume_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize_volume + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_resize_volume_with_metadata` + interceptor in new development instead of the `post_resize_volume` interceptor. + When both interceptors are used, this `post_resize_volume_with_metadata` interceptor runs after the + `post_resize_volume` interceptor. The (possibly modified) response returned by + `post_resize_volume` will be passed to + `post_resize_volume_with_metadata`. + """ + return response, metadata + def pre_restore_volume_snapshot( self, request: gcb_volume_snapshot.RestoreVolumeSnapshotRequest, @@ -1256,12 +2025,35 @@ def post_restore_volume_snapshot( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_volume_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_volume_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_restore_volume_snapshot` interceptor runs + before the `post_restore_volume_snapshot_with_metadata` interceptor. """ return response + def post_restore_volume_snapshot_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_volume_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_restore_volume_snapshot_with_metadata` + interceptor in new development instead of the `post_restore_volume_snapshot` interceptor. + When both interceptors are used, this `post_restore_volume_snapshot_with_metadata` interceptor runs after the + `post_restore_volume_snapshot` interceptor. The (possibly modified) response returned by + `post_restore_volume_snapshot` will be passed to + `post_restore_volume_snapshot_with_metadata`. + """ + return response, metadata + def pre_start_instance( self, request: instance.StartInstanceRequest, @@ -1279,12 +2071,35 @@ def post_start_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for start_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_start_instance` interceptor runs + before the `post_start_instance_with_metadata` interceptor. """ return response + def post_start_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_start_instance_with_metadata` + interceptor in new development instead of the `post_start_instance` interceptor. + When both interceptors are used, this `post_start_instance_with_metadata` interceptor runs after the + `post_start_instance` interceptor. The (possibly modified) response returned by + `post_start_instance` will be passed to + `post_start_instance_with_metadata`. + """ + return response, metadata + def pre_stop_instance( self, request: instance.StopInstanceRequest, @@ -1302,12 +2117,35 @@ def post_stop_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for stop_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_stop_instance` interceptor runs + before the `post_stop_instance_with_metadata` interceptor. """ return response + def post_stop_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_stop_instance_with_metadata` + interceptor in new development instead of the `post_stop_instance` interceptor. + When both interceptors are used, this `post_stop_instance_with_metadata` interceptor runs after the + `post_stop_instance` interceptor. The (possibly modified) response returned by + `post_stop_instance` will be passed to + `post_stop_instance_with_metadata`. + """ + return response, metadata + def pre_submit_provisioning_config( self, request: provisioning.SubmitProvisioningConfigRequest, @@ -1328,12 +2166,38 @@ def post_submit_provisioning_config( ) -> provisioning.SubmitProvisioningConfigResponse: """Post-rpc interceptor for submit_provisioning_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_submit_provisioning_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_submit_provisioning_config` interceptor runs + before the `post_submit_provisioning_config_with_metadata` interceptor. """ return response + def post_submit_provisioning_config_with_metadata( + self, + response: provisioning.SubmitProvisioningConfigResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + provisioning.SubmitProvisioningConfigResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for submit_provisioning_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_submit_provisioning_config_with_metadata` + interceptor in new development instead of the `post_submit_provisioning_config` interceptor. + When both interceptors are used, this `post_submit_provisioning_config_with_metadata` interceptor runs after the + `post_submit_provisioning_config` interceptor. The (possibly modified) response returned by + `post_submit_provisioning_config` will be passed to + `post_submit_provisioning_config_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: gcb_instance.UpdateInstanceRequest, @@ -1353,12 +2217,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_network( self, request: gcb_network.UpdateNetworkRequest, @@ -1378,12 +2265,35 @@ def post_update_network( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_update_network` interceptor runs + before the `post_update_network_with_metadata` interceptor. """ return response + def post_update_network_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_update_network_with_metadata` + interceptor in new development instead of the `post_update_network` interceptor. + When both interceptors are used, this `post_update_network_with_metadata` interceptor runs after the + `post_update_network` interceptor. The (possibly modified) response returned by + `post_update_network` will be passed to + `post_update_network_with_metadata`. + """ + return response, metadata + def pre_update_nfs_share( self, request: gcb_nfs_share.UpdateNfsShareRequest, @@ -1403,12 +2313,35 @@ def post_update_nfs_share( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_nfs_share - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_nfs_share_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_update_nfs_share` interceptor runs + before the `post_update_nfs_share_with_metadata` interceptor. """ return response + def post_update_nfs_share_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_nfs_share + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_update_nfs_share_with_metadata` + interceptor in new development instead of the `post_update_nfs_share` interceptor. + When both interceptors are used, this `post_update_nfs_share_with_metadata` interceptor runs after the + `post_update_nfs_share` interceptor. The (possibly modified) response returned by + `post_update_nfs_share` will be passed to + `post_update_nfs_share_with_metadata`. + """ + return response, metadata + def pre_update_provisioning_config( self, request: provisioning.UpdateProvisioningConfigRequest, @@ -1429,12 +2362,37 @@ def post_update_provisioning_config( ) -> provisioning.ProvisioningConfig: """Post-rpc interceptor for update_provisioning_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_provisioning_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_update_provisioning_config` interceptor runs + before the `post_update_provisioning_config_with_metadata` interceptor. """ return response + def post_update_provisioning_config_with_metadata( + self, + response: provisioning.ProvisioningConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + provisioning.ProvisioningConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_provisioning_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_update_provisioning_config_with_metadata` + interceptor in new development instead of the `post_update_provisioning_config` interceptor. + When both interceptors are used, this `post_update_provisioning_config_with_metadata` interceptor runs after the + `post_update_provisioning_config` interceptor. The (possibly modified) response returned by + `post_update_provisioning_config` will be passed to + `post_update_provisioning_config_with_metadata`. + """ + return response, metadata + def pre_update_volume( self, request: gcb_volume.UpdateVolumeRequest, @@ -1452,12 +2410,35 @@ def post_update_volume( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_volume - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_volume_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BareMetalSolution server but before - it is returned to user code. + it is returned to user code. This `post_update_volume` interceptor runs + before the `post_update_volume_with_metadata` interceptor. """ return response + def post_update_volume_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_volume + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BareMetalSolution server but before it is returned to user code. + + We recommend only using this `post_update_volume_with_metadata` + interceptor in new development instead of the `post_update_volume` interceptor. + When both interceptors are used, this `post_update_volume_with_metadata` interceptor runs after the + `post_update_volume` interceptor. The (possibly modified) response returned by + `post_update_volume` will be passed to + `post_update_volume_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1758,6 +2739,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_nfs_share(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_nfs_share_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1910,6 +2895,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_provisioning_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_provisioning_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2059,6 +3048,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_ssh_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_ssh_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2213,6 +3206,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_volume_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_volume_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2359,6 +3356,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_nfs_share(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_nfs_share_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2726,6 +3727,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_detach_lun(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_lun_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2883,6 +3888,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_interactive_serial_console(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_disable_interactive_serial_console_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3035,6 +4047,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_interactive_serial_console(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_enable_interactive_serial_console_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3183,6 +4202,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_evict_lun(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_evict_lun_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3331,6 +4354,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_evict_volume(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_evict_volume_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3472,6 +4499,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3619,6 +4650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_lun(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_lun_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3760,6 +4795,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3901,6 +4940,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_nfs_share(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_nfs_share_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4046,6 +5089,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_provisioning_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_provisioning_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4187,6 +5234,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_volume(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_volume_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4333,6 +5384,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_volume_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_volume_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4477,6 +5532,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4620,6 +5679,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_luns(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_luns_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4763,6 +5826,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_networks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_networks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4906,6 +5973,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_network_usage(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_network_usage_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5052,6 +6123,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_nfs_shares(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_nfs_shares_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5195,6 +6270,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_os_images(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_os_images_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5343,6 +6422,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_provisioning_quotas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_provisioning_quotas_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5486,6 +6569,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_ssh_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_ssh_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5629,6 +6716,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_volumes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_volumes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5777,6 +6868,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_volume_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_volume_snapshots_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5927,6 +7022,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6075,6 +7174,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6225,6 +7328,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_nfs_share(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_nfs_share_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6372,6 +7479,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_volume(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_volume_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6520,6 +7631,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reset_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reset_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6667,6 +7782,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize_volume(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_volume_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6820,6 +7939,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_volume_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_volume_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6968,6 +8091,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7115,6 +8242,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7269,6 +8400,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_submit_provisioning_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_submit_provisioning_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7420,6 +8555,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7569,6 +8708,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7720,6 +8863,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_nfs_share(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_nfs_share_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7873,6 +9020,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_provisioning_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_provisioning_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8020,6 +9171,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_volume(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_volume_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bare-metal-solution/noxfile.py b/packages/google-cloud-bare-metal-solution/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bare-metal-solution/noxfile.py +++ b/packages/google-cloud-bare-metal-solution/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json index 5bdb9139016c..38629334665b 100644 --- a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bare-metal-solution", - "version": "1.9.0" + "version": "1.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py index 58c31864500f..e92b9b4511fe 100644 --- a/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ b/packages/google-cloud-bare-metal-solution/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py @@ -93,6 +93,13 @@ from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume from google.cloud.bare_metal_solution_v2.types import volume_snapshot +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -360,6 +367,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BareMetalSolutionClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BareMetalSolutionClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -29413,10 +29463,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.ListInstancesRequest.pb(instance.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -29440,6 +29493,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = instance.ListInstancesResponse() + post_with_metadata.return_value = instance.ListInstancesResponse(), metadata client.list_instances( request, @@ -29451,6 +29505,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=instance.GetInstanceRequest): @@ -29555,10 +29610,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.GetInstanceRequest.pb(instance.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -29580,6 +29638,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = instance.Instance() + post_with_metadata.return_value = instance.Instance(), metadata client.get_instance( request, @@ -29591,6 +29650,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -29871,10 +29931,14 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_update_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_instance.UpdateInstanceRequest.pb( gcb_instance.UpdateInstanceRequest() ) @@ -29898,6 +29962,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -29909,6 +29974,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_instance_rest_bad_request(request_type=instance.RenameInstanceRequest): @@ -30013,10 +30079,14 @@ def test_rename_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_rename_instance" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_rename_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_rename_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.RenameInstanceRequest.pb(instance.RenameInstanceRequest()) transcode.return_value = { "method": "post", @@ -30038,6 +30108,7 @@ def test_rename_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = instance.Instance() + post_with_metadata.return_value = instance.Instance(), metadata client.rename_instance( request, @@ -30049,6 +30120,7 @@ def test_rename_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reset_instance_rest_bad_request(request_type=instance.ResetInstanceRequest): @@ -30127,10 +30199,13 @@ def test_reset_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_reset_instance" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_reset_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_reset_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.ResetInstanceRequest.pb(instance.ResetInstanceRequest()) transcode.return_value = { "method": "post", @@ -30152,6 +30227,7 @@ def test_reset_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reset_instance( request, @@ -30163,6 +30239,7 @@ def test_reset_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_instance_rest_bad_request(request_type=instance.StartInstanceRequest): @@ -30241,10 +30318,13 @@ def test_start_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_start_instance" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_start_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_start_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.StartInstanceRequest.pb(instance.StartInstanceRequest()) transcode.return_value = { "method": "post", @@ -30266,6 +30346,7 @@ def test_start_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.start_instance( request, @@ -30277,6 +30358,7 @@ def test_start_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_instance_rest_bad_request(request_type=instance.StopInstanceRequest): @@ -30355,10 +30437,13 @@ def test_stop_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_stop_instance" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_stop_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_stop_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.StopInstanceRequest.pb(instance.StopInstanceRequest()) transcode.return_value = { "method": "post", @@ -30380,6 +30465,7 @@ def test_stop_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.stop_instance( request, @@ -30391,6 +30477,7 @@ def test_stop_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_interactive_serial_console_rest_bad_request( @@ -30472,11 +30559,15 @@ def test_enable_interactive_serial_console_rest_interceptors(null_interceptor): transports.BareMetalSolutionRestInterceptor, "post_enable_interactive_serial_console", ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_enable_interactive_serial_console_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_enable_interactive_serial_console", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.EnableInteractiveSerialConsoleRequest.pb( instance.EnableInteractiveSerialConsoleRequest() ) @@ -30500,6 +30591,7 @@ def test_enable_interactive_serial_console_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.enable_interactive_serial_console( request, @@ -30511,6 +30603,7 @@ def test_enable_interactive_serial_console_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_interactive_serial_console_rest_bad_request( @@ -30592,11 +30685,15 @@ def test_disable_interactive_serial_console_rest_interceptors(null_interceptor): transports.BareMetalSolutionRestInterceptor, "post_disable_interactive_serial_console", ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_disable_interactive_serial_console_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_disable_interactive_serial_console", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = instance.DisableInteractiveSerialConsoleRequest.pb( instance.DisableInteractiveSerialConsoleRequest() ) @@ -30620,6 +30717,7 @@ def test_disable_interactive_serial_console_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.disable_interactive_serial_console( request, @@ -30631,6 +30729,7 @@ def test_disable_interactive_serial_console_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_detach_lun_rest_bad_request(request_type=gcb_instance.DetachLunRequest): @@ -30709,10 +30808,13 @@ def test_detach_lun_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_detach_lun" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_detach_lun_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_detach_lun" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_instance.DetachLunRequest.pb(gcb_instance.DetachLunRequest()) transcode.return_value = { "method": "post", @@ -30734,6 +30836,7 @@ def test_detach_lun_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.detach_lun( request, @@ -30745,6 +30848,7 @@ def test_detach_lun_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_ssh_keys_rest_bad_request(request_type=ssh_key.ListSSHKeysRequest): @@ -30827,10 +30931,13 @@ def test_list_ssh_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_ssh_keys" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_ssh_keys_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_ssh_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ssh_key.ListSSHKeysRequest.pb(ssh_key.ListSSHKeysRequest()) transcode.return_value = { "method": "post", @@ -30854,6 +30961,7 @@ def test_list_ssh_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ssh_key.ListSSHKeysResponse() + post_with_metadata.return_value = ssh_key.ListSSHKeysResponse(), metadata client.list_ssh_keys( request, @@ -30865,6 +30973,7 @@ def test_list_ssh_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_ssh_key_rest_bad_request(request_type=gcb_ssh_key.CreateSSHKeyRequest): @@ -31017,10 +31126,13 @@ def test_create_ssh_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_create_ssh_key" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_create_ssh_key_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_create_ssh_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_ssh_key.CreateSSHKeyRequest.pb( gcb_ssh_key.CreateSSHKeyRequest() ) @@ -31044,6 +31156,7 @@ def test_create_ssh_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_ssh_key.SSHKey() + post_with_metadata.return_value = gcb_ssh_key.SSHKey(), metadata client.create_ssh_key( request, @@ -31055,6 +31168,7 @@ def test_create_ssh_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_ssh_key_rest_bad_request(request_type=ssh_key.DeleteSSHKeyRequest): @@ -31244,10 +31358,13 @@ def test_list_volumes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_volumes" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_volumes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_volumes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = volume.ListVolumesRequest.pb(volume.ListVolumesRequest()) transcode.return_value = { "method": "post", @@ -31269,6 +31386,7 @@ def test_list_volumes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume.ListVolumesResponse() + post_with_metadata.return_value = volume.ListVolumesResponse(), metadata client.list_volumes( request, @@ -31280,6 +31398,7 @@ def test_list_volumes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_volume_rest_bad_request(request_type=volume.GetVolumeRequest): @@ -31408,10 +31527,13 @@ def test_get_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_volume" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_volume_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_volume" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = volume.GetVolumeRequest.pb(volume.GetVolumeRequest()) transcode.return_value = { "method": "post", @@ -31433,6 +31555,7 @@ def test_get_volume_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume.Volume() + post_with_metadata.return_value = volume.Volume(), metadata client.get_volume( request, @@ -31444,6 +31567,7 @@ def test_get_volume_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_volume_rest_bad_request(request_type=gcb_volume.UpdateVolumeRequest): @@ -31624,10 +31748,13 @@ def test_update_volume_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_update_volume" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_update_volume_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_update_volume" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_volume.UpdateVolumeRequest.pb(gcb_volume.UpdateVolumeRequest()) transcode.return_value = { "method": "post", @@ -31649,6 +31776,7 @@ def test_update_volume_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_volume( request, @@ -31660,6 +31788,7 @@ def test_update_volume_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_volume_rest_bad_request(request_type=volume.RenameVolumeRequest): @@ -31788,10 +31917,13 @@ def test_rename_volume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_rename_volume" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_rename_volume_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_rename_volume" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = volume.RenameVolumeRequest.pb(volume.RenameVolumeRequest()) transcode.return_value = { "method": "post", @@ -31813,6 +31945,7 @@ def test_rename_volume_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume.Volume() + post_with_metadata.return_value = volume.Volume(), metadata client.rename_volume( request, @@ -31824,6 +31957,7 @@ def test_rename_volume_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_evict_volume_rest_bad_request(request_type=volume.EvictVolumeRequest): @@ -31902,10 +32036,13 @@ def test_evict_volume_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_evict_volume" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_evict_volume_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_evict_volume" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = volume.EvictVolumeRequest.pb(volume.EvictVolumeRequest()) transcode.return_value = { "method": "post", @@ -31927,6 +32064,7 @@ def test_evict_volume_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.evict_volume( request, @@ -31938,6 +32076,7 @@ def test_evict_volume_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_volume_rest_bad_request(request_type=gcb_volume.ResizeVolumeRequest): @@ -32016,10 +32155,13 @@ def test_resize_volume_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_resize_volume" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_resize_volume_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_resize_volume" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_volume.ResizeVolumeRequest.pb(gcb_volume.ResizeVolumeRequest()) transcode.return_value = { "method": "post", @@ -32041,6 +32183,7 @@ def test_resize_volume_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.resize_volume( request, @@ -32052,6 +32195,7 @@ def test_resize_volume_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_networks_rest_bad_request(request_type=network.ListNetworksRequest): @@ -32136,10 +32280,13 @@ def test_list_networks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_networks" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_networks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_networks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = network.ListNetworksRequest.pb(network.ListNetworksRequest()) transcode.return_value = { "method": "post", @@ -32163,6 +32310,7 @@ def test_list_networks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = network.ListNetworksResponse() + post_with_metadata.return_value = network.ListNetworksResponse(), metadata client.list_networks( request, @@ -32174,6 +32322,7 @@ def test_list_networks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_network_usage_rest_bad_request( @@ -32255,10 +32404,14 @@ def test_list_network_usage_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_network_usage" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_list_network_usage_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_network_usage" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = network.ListNetworkUsageRequest.pb( network.ListNetworkUsageRequest() ) @@ -32284,6 +32437,7 @@ def test_list_network_usage_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = network.ListNetworkUsageResponse() + post_with_metadata.return_value = network.ListNetworkUsageResponse(), metadata client.list_network_usage( request, @@ -32295,6 +32449,7 @@ def test_list_network_usage_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_network_rest_bad_request(request_type=network.GetNetworkRequest): @@ -32399,10 +32554,13 @@ def test_get_network_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_network" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = network.GetNetworkRequest.pb(network.GetNetworkRequest()) transcode.return_value = { "method": "post", @@ -32424,6 +32582,7 @@ def test_get_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = network.Network() + post_with_metadata.return_value = network.Network(), metadata client.get_network( request, @@ -32435,6 +32594,7 @@ def test_get_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_network_rest_bad_request(request_type=gcb_network.UpdateNetworkRequest): @@ -32630,10 +32790,13 @@ def test_update_network_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_update_network" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_update_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_update_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_network.UpdateNetworkRequest.pb( gcb_network.UpdateNetworkRequest() ) @@ -32657,6 +32820,7 @@ def test_update_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_network( request, @@ -32668,6 +32832,7 @@ def test_update_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_volume_snapshot_rest_bad_request( @@ -32837,10 +33002,14 @@ def test_create_volume_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_create_volume_snapshot" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_create_volume_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_create_volume_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_volume_snapshot.CreateVolumeSnapshotRequest.pb( gcb_volume_snapshot.CreateVolumeSnapshotRequest() ) @@ -32866,6 +33035,7 @@ def test_create_volume_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_volume_snapshot.VolumeSnapshot() + post_with_metadata.return_value = gcb_volume_snapshot.VolumeSnapshot(), metadata client.create_volume_snapshot( request, @@ -32877,6 +33047,7 @@ def test_create_volume_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_volume_snapshot_rest_bad_request( @@ -32961,10 +33132,14 @@ def test_restore_volume_snapshot_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_restore_volume_snapshot" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_restore_volume_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_restore_volume_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_volume_snapshot.RestoreVolumeSnapshotRequest.pb( gcb_volume_snapshot.RestoreVolumeSnapshotRequest() ) @@ -32988,6 +33163,7 @@ def test_restore_volume_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_volume_snapshot( request, @@ -32999,6 +33175,7 @@ def test_restore_volume_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_volume_snapshot_rest_bad_request( @@ -33208,10 +33385,14 @@ def test_get_volume_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_volume_snapshot" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_get_volume_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_volume_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = volume_snapshot.GetVolumeSnapshotRequest.pb( volume_snapshot.GetVolumeSnapshotRequest() ) @@ -33237,6 +33418,7 @@ def test_get_volume_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume_snapshot.VolumeSnapshot() + post_with_metadata.return_value = volume_snapshot.VolumeSnapshot(), metadata client.get_volume_snapshot( request, @@ -33248,6 +33430,7 @@ def test_get_volume_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_volume_snapshots_rest_bad_request( @@ -33334,10 +33517,14 @@ def test_list_volume_snapshots_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_volume_snapshots" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_list_volume_snapshots_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_volume_snapshots" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = volume_snapshot.ListVolumeSnapshotsRequest.pb( volume_snapshot.ListVolumeSnapshotsRequest() ) @@ -33363,6 +33550,10 @@ def test_list_volume_snapshots_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume_snapshot.ListVolumeSnapshotsResponse() + post_with_metadata.return_value = ( + volume_snapshot.ListVolumeSnapshotsResponse(), + metadata, + ) client.list_volume_snapshots( request, @@ -33374,6 +33565,7 @@ def test_list_volume_snapshots_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_lun_rest_bad_request(request_type=lun.GetLunRequest): @@ -33480,10 +33672,13 @@ def test_get_lun_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_lun" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_lun_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_lun" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lun.GetLunRequest.pb(lun.GetLunRequest()) transcode.return_value = { "method": "post", @@ -33505,6 +33700,7 @@ def test_get_lun_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lun.Lun() + post_with_metadata.return_value = lun.Lun(), metadata client.get_lun( request, @@ -33516,6 +33712,7 @@ def test_get_lun_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_luns_rest_bad_request(request_type=lun.ListLunsRequest): @@ -33600,10 +33797,13 @@ def test_list_luns_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_luns" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_luns_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_luns" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lun.ListLunsRequest.pb(lun.ListLunsRequest()) transcode.return_value = { "method": "post", @@ -33625,6 +33825,7 @@ def test_list_luns_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lun.ListLunsResponse() + post_with_metadata.return_value = lun.ListLunsResponse(), metadata client.list_luns( request, @@ -33636,6 +33837,7 @@ def test_list_luns_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_evict_lun_rest_bad_request(request_type=lun.EvictLunRequest): @@ -33718,10 +33920,13 @@ def test_evict_lun_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_evict_lun" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_evict_lun_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_evict_lun" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lun.EvictLunRequest.pb(lun.EvictLunRequest()) transcode.return_value = { "method": "post", @@ -33743,6 +33948,7 @@ def test_evict_lun_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.evict_lun( request, @@ -33754,6 +33960,7 @@ def test_evict_lun_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_nfs_share_rest_bad_request(request_type=nfs_share.GetNfsShareRequest): @@ -33848,10 +34055,13 @@ def test_get_nfs_share_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_nfs_share" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_get_nfs_share_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = nfs_share.GetNfsShareRequest.pb(nfs_share.GetNfsShareRequest()) transcode.return_value = { "method": "post", @@ -33873,6 +34083,7 @@ def test_get_nfs_share_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = nfs_share.NfsShare() + post_with_metadata.return_value = nfs_share.NfsShare(), metadata client.get_nfs_share( request, @@ -33884,6 +34095,7 @@ def test_get_nfs_share_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_nfs_shares_rest_bad_request(request_type=nfs_share.ListNfsSharesRequest): @@ -33968,10 +34180,14 @@ def test_list_nfs_shares_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_nfs_shares" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_list_nfs_shares_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_nfs_shares" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = nfs_share.ListNfsSharesRequest.pb(nfs_share.ListNfsSharesRequest()) transcode.return_value = { "method": "post", @@ -33995,6 +34211,7 @@ def test_list_nfs_shares_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = nfs_share.ListNfsSharesResponse() + post_with_metadata.return_value = nfs_share.ListNfsSharesResponse(), metadata client.list_nfs_shares( request, @@ -34006,6 +34223,7 @@ def test_list_nfs_shares_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_nfs_share_rest_bad_request( @@ -34179,10 +34397,14 @@ def test_update_nfs_share_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_update_nfs_share" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_update_nfs_share_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_update_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_nfs_share.UpdateNfsShareRequest.pb( gcb_nfs_share.UpdateNfsShareRequest() ) @@ -34206,6 +34428,7 @@ def test_update_nfs_share_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_nfs_share( request, @@ -34217,6 +34440,7 @@ def test_update_nfs_share_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_nfs_share_rest_bad_request( @@ -34386,10 +34610,14 @@ def test_create_nfs_share_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_create_nfs_share" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_create_nfs_share_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_create_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcb_nfs_share.CreateNfsShareRequest.pb( gcb_nfs_share.CreateNfsShareRequest() ) @@ -34413,6 +34641,7 @@ def test_create_nfs_share_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_nfs_share( request, @@ -34424,6 +34653,7 @@ def test_create_nfs_share_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_nfs_share_rest_bad_request( @@ -34520,10 +34750,14 @@ def test_rename_nfs_share_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_rename_nfs_share" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_rename_nfs_share_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_rename_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = nfs_share.RenameNfsShareRequest.pb( nfs_share.RenameNfsShareRequest() ) @@ -34547,6 +34781,7 @@ def test_rename_nfs_share_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = nfs_share.NfsShare() + post_with_metadata.return_value = nfs_share.NfsShare(), metadata client.rename_nfs_share( request, @@ -34558,6 +34793,7 @@ def test_rename_nfs_share_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_nfs_share_rest_bad_request( @@ -34638,10 +34874,14 @@ def test_delete_nfs_share_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_delete_nfs_share" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_delete_nfs_share_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_delete_nfs_share" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = nfs_share.DeleteNfsShareRequest.pb( nfs_share.DeleteNfsShareRequest() ) @@ -34665,6 +34905,7 @@ def test_delete_nfs_share_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_nfs_share( request, @@ -34676,6 +34917,7 @@ def test_delete_nfs_share_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_provisioning_quotas_rest_bad_request( @@ -34760,10 +35002,14 @@ def test_list_provisioning_quotas_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_provisioning_quotas" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_list_provisioning_quotas_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_provisioning_quotas" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning.ListProvisioningQuotasRequest.pb( provisioning.ListProvisioningQuotasRequest() ) @@ -34789,6 +35035,10 @@ def test_list_provisioning_quotas_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning.ListProvisioningQuotasResponse() + post_with_metadata.return_value = ( + provisioning.ListProvisioningQuotasResponse(), + metadata, + ) client.list_provisioning_quotas( request, @@ -34800,6 +35050,7 @@ def test_list_provisioning_quotas_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_submit_provisioning_config_rest_bad_request( @@ -34881,10 +35132,14 @@ def test_submit_provisioning_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_submit_provisioning_config" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_submit_provisioning_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_submit_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning.SubmitProvisioningConfigRequest.pb( provisioning.SubmitProvisioningConfigRequest() ) @@ -34910,6 +35165,10 @@ def test_submit_provisioning_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning.SubmitProvisioningConfigResponse() + post_with_metadata.return_value = ( + provisioning.SubmitProvisioningConfigResponse(), + metadata, + ) client.submit_provisioning_config( request, @@ -34921,6 +35180,7 @@ def test_submit_provisioning_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_provisioning_config_rest_bad_request( @@ -35027,10 +35287,14 @@ def test_get_provisioning_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_get_provisioning_config" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_get_provisioning_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_get_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning.GetProvisioningConfigRequest.pb( provisioning.GetProvisioningConfigRequest() ) @@ -35056,6 +35320,7 @@ def test_get_provisioning_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning.ProvisioningConfig() + post_with_metadata.return_value = provisioning.ProvisioningConfig(), metadata client.get_provisioning_config( request, @@ -35067,6 +35332,7 @@ def test_get_provisioning_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_provisioning_config_rest_bad_request( @@ -35329,10 +35595,14 @@ def test_create_provisioning_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_create_provisioning_config" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_create_provisioning_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_create_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning.CreateProvisioningConfigRequest.pb( provisioning.CreateProvisioningConfigRequest() ) @@ -35358,6 +35628,7 @@ def test_create_provisioning_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning.ProvisioningConfig() + post_with_metadata.return_value = provisioning.ProvisioningConfig(), metadata client.create_provisioning_config( request, @@ -35369,6 +35640,7 @@ def test_create_provisioning_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_provisioning_config_rest_bad_request( @@ -35639,10 +35911,14 @@ def test_update_provisioning_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_update_provisioning_config" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, + "post_update_provisioning_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_update_provisioning_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = provisioning.UpdateProvisioningConfigRequest.pb( provisioning.UpdateProvisioningConfigRequest() ) @@ -35668,6 +35944,7 @@ def test_update_provisioning_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = provisioning.ProvisioningConfig() + post_with_metadata.return_value = provisioning.ProvisioningConfig(), metadata client.update_provisioning_config( request, @@ -35679,6 +35956,7 @@ def test_update_provisioning_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_network_rest_bad_request(request_type=network.RenameNetworkRequest): @@ -35783,10 +36061,13 @@ def test_rename_network_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_rename_network" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_rename_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_rename_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = network.RenameNetworkRequest.pb(network.RenameNetworkRequest()) transcode.return_value = { "method": "post", @@ -35808,6 +36089,7 @@ def test_rename_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = network.Network() + post_with_metadata.return_value = network.Network(), metadata client.rename_network( request, @@ -35819,6 +36101,7 @@ def test_rename_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_os_images_rest_bad_request(request_type=osimage.ListOSImagesRequest): @@ -35901,10 +36184,13 @@ def test_list_os_images_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "post_list_os_images" ) as post, mock.patch.object( + transports.BareMetalSolutionRestInterceptor, "post_list_os_images_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BareMetalSolutionRestInterceptor, "pre_list_os_images" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = osimage.ListOSImagesRequest.pb(osimage.ListOSImagesRequest()) transcode.return_value = { "method": "post", @@ -35928,6 +36214,7 @@ def test_list_os_images_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = osimage.ListOSImagesResponse() + post_with_metadata.return_value = osimage.ListOSImagesResponse(), metadata client.list_os_images( request, @@ -35939,6 +36226,7 @@ def test_list_os_images_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-batch/CHANGELOG.md b/packages/google-cloud-batch/CHANGELOG.md index 791b71c1b2a2..82a099c36194 100644 --- a/packages/google-cloud-batch/CHANGELOG.md +++ b/packages/google-cloud-batch/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [0.17.34](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.33...google-cloud-batch-v0.17.34) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* promote cancel job API to GA ([559dea7](https://github.com/googleapis/google-cloud-python/commit/559dea77a99dcd314df941be54ed204aa65c33c7)) + +## [0.17.33](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.32...google-cloud-batch-v0.17.33) (2025-01-02) + + +### Documentation + +* [google-cloud-batch] fix a few broken references in documentation ([651dcb6](https://github.com/googleapis/google-cloud-python/commit/651dcb611ee0ff3327b67aee2fbe1e53d20d89ee)) +* [google-cloud-batch] fix broken references in comments ([#13390](https://github.com/googleapis/google-cloud-python/issues/13390)) ([651dcb6](https://github.com/googleapis/google-cloud-python/commit/651dcb611ee0ff3327b67aee2fbe1e53d20d89ee)) + ## [0.17.32](https://github.com/googleapis/google-cloud-python/compare/google-cloud-batch-v0.17.31...google-cloud-batch-v0.17.32) (2024-12-12) diff --git a/packages/google-cloud-batch/README.rst b/packages/google-cloud-batch/README.rst index 9104cf250e84..8245cbb181e8 100644 --- a/packages/google-cloud-batch/README.rst +++ b/packages/google-cloud-batch/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Cloud Batch.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Cloud Batch.: https://cloud.google.com/batch/docs -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-batch/google/cloud/batch/__init__.py b/packages/google-cloud-batch/google/cloud/batch/__init__.py index 0717452a2b26..462bd44dd290 100644 --- a/packages/google-cloud-batch/google/cloud/batch/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch/__init__.py @@ -23,6 +23,8 @@ ) from google.cloud.batch_v1.services.batch_service.client import BatchServiceClient from google.cloud.batch_v1.types.batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, DeleteJobRequest, GetJobRequest, @@ -58,6 +60,8 @@ __all__ = ( "BatchServiceClient", "BatchServiceAsyncClient", + "CancelJobRequest", + "CancelJobResponse", "CreateJobRequest", "DeleteJobRequest", "GetJobRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index 41ebf29e4e3e..f7da358ad5fc 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.32" # {x-release-please-version} +__version__ = "0.17.34" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/__init__.py b/packages/google-cloud-batch/google/cloud/batch_v1/__init__.py index 72b01949621c..cfb5aeedf399 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/__init__.py @@ -20,6 +20,8 @@ from .services.batch_service import BatchServiceAsyncClient, BatchServiceClient from .types.batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, DeleteJobRequest, GetJobRequest, @@ -56,6 +58,8 @@ "BatchServiceAsyncClient", "AllocationPolicy", "BatchServiceClient", + "CancelJobRequest", + "CancelJobResponse", "ComputeResource", "CreateJobRequest", "DeleteJobRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_metadata.json b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_metadata.json index 299381217fbe..d4c30aa61b47 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_metadata.json +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "BatchServiceClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" @@ -45,6 +50,11 @@ "grpc-async": { "libraryClient": "BatchServiceAsyncClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" @@ -80,6 +90,11 @@ "rest": { "libraryClient": "BatchServiceClient", "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, "CreateJob": { "methods": [ "create_job" diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index 41ebf29e4e3e..f7da358ad5fc 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.32" # {x-release-please-version} +__version__ = "0.17.34" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py index dbcff5d80053..8df5a7e7bcf7 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py @@ -658,6 +658,126 @@ async def sample_delete_job(): # Done; return the response. return response + async def cancel_job( + self, + request: Optional[Union[batch.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Cancel a Job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import batch_v1 + + async def sample_cancel_job(): + # Create a client + client = batch_v1.BatchServiceAsyncClient() + + # Initialize request argument(s) + request = batch_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.batch_v1.types.CancelJobRequest, dict]]): + The request object. CancelJob Request. + name (:class:`str`): + Required. Job name. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.batch_v1.types.CancelJobResponse` + Response to the CancelJob request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, batch.CancelJobRequest): + request = batch.CancelJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.cancel_job + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + batch.CancelJobResponse, + metadata_type=batch.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_jobs( self, request: Optional[Union[batch.ListJobsRequest, dict]] = None, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py index ebc654ce031a..37cfc2a957c4 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -545,6 +547,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1090,6 +1119,123 @@ def sample_delete_job(): # Done; return the response. return response + def cancel_job( + self, + request: Optional[Union[batch.CancelJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Cancel a Job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import batch_v1 + + def sample_cancel_job(): + # Create a client + client = batch_v1.BatchServiceClient() + + # Initialize request argument(s) + request = batch_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.batch_v1.types.CancelJobRequest, dict]): + The request object. CancelJob Request. + name (str): + Required. Job name. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.batch_v1.types.CancelJobResponse` + Response to the CancelJob request. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, batch.CancelJobRequest): + request = batch.CancelJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + batch.CancelJobResponse, + metadata_type=batch.OperationMetadata, + ) + + # Done; return the response. + return response + def list_jobs( self, request: Optional[Union[batch.ListJobsRequest, dict]] = None, @@ -1483,16 +1629,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1538,16 +1688,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1704,16 +1858,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1759,16 +1917,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/base.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/base.py index cdaba55884ea..5603055c8190 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/base.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/base.py @@ -157,6 +157,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_job: gapic_v1.method.wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), self.list_jobs: gapic_v1.method.wrap_method( self.list_jobs, default_retry=retries.Retry( @@ -266,6 +271,15 @@ def delete_job( ]: raise NotImplementedError() + @property + def cancel_job( + self, + ) -> Callable[ + [batch.CancelJobRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_jobs( self, diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py index 394aaf603b5d..2ca5736d429d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc.py @@ -418,6 +418,32 @@ def delete_job( ) return self._stubs["delete_job"] + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], operations_pb2.Operation]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel a Job. + + Returns: + Callable[[~.CancelJobRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self._logged_channel.unary_unary( + "/google.cloud.batch.v1.BatchService/CancelJob", + request_serializer=batch.CancelJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_job"] + @property def list_jobs(self) -> Callable[[batch.ListJobsRequest], batch.ListJobsResponse]: r"""Return a callable for the list jobs method over gRPC. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py index e684e74c794e..97da47c10203 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py @@ -425,6 +425,32 @@ def delete_job( ) return self._stubs["delete_job"] + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the cancel job method over gRPC. + + Cancel a Job. + + Returns: + Callable[[~.CancelJobRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_job" not in self._stubs: + self._stubs["cancel_job"] = self._logged_channel.unary_unary( + "/google.cloud.batch.v1.BatchService/CancelJob", + request_serializer=batch.CancelJobRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["cancel_job"] + @property def list_jobs( self, @@ -528,6 +554,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_job: self._wrap_method( + self.cancel_job, + default_timeout=60.0, + client_info=client_info, + ), self.list_jobs: self._wrap_method( self.list_jobs, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py index 28e9964eee32..e029472faf3f 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest.py @@ -73,6 +73,14 @@ class BatchServiceRestInterceptor: .. code-block:: python class MyCustomBatchServiceInterceptor(BatchServiceRestInterceptor): + def pre_cancel_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_job(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -127,6 +135,52 @@ def post_list_tasks(self, response): """ + def pre_cancel_job( + self, + request: batch.CancelJobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the BatchService server. + """ + return request, metadata + + def post_cancel_job( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for cancel_job + + DEPRECATED. Please use the `post_cancel_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BatchService server but before + it is returned to user code. This `post_cancel_job` interceptor runs + before the `post_cancel_job_with_metadata` interceptor. + """ + return response + + def post_cancel_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_cancel_job_with_metadata` + interceptor in new development instead of the `post_cancel_job` interceptor. + When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the + `post_cancel_job` interceptor. The (possibly modified) response returned by + `post_cancel_job` will be passed to + `post_cancel_job_with_metadata`. + """ + return response, metadata + def pre_create_job( self, request: batch.CreateJobRequest, @@ -142,12 +196,33 @@ def pre_create_job( def post_create_job(self, response: gcb_job.Job) -> gcb_job.Job: """Post-rpc interceptor for create_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_create_job` interceptor runs + before the `post_create_job_with_metadata` interceptor. """ return response + def post_create_job_with_metadata( + self, response: gcb_job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gcb_job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_create_job_with_metadata` + interceptor in new development instead of the `post_create_job` interceptor. + When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the + `post_create_job` interceptor. The (possibly modified) response returned by + `post_create_job` will be passed to + `post_create_job_with_metadata`. + """ + return response, metadata + def pre_delete_job( self, request: batch.DeleteJobRequest, @@ -165,12 +240,35 @@ def post_delete_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_delete_job` interceptor runs + before the `post_delete_job_with_metadata` interceptor. """ return response + def post_delete_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_delete_job_with_metadata` + interceptor in new development instead of the `post_delete_job` interceptor. + When both interceptors are used, this `post_delete_job_with_metadata` interceptor runs after the + `post_delete_job` interceptor. The (possibly modified) response returned by + `post_delete_job` will be passed to + `post_delete_job_with_metadata`. + """ + return response, metadata + def pre_get_job( self, request: batch.GetJobRequest, @@ -186,12 +284,33 @@ def pre_get_job( def post_get_job(self, response: job.Job) -> job.Job: """Post-rpc interceptor for get_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_get_job` interceptor runs + before the `post_get_job_with_metadata` interceptor. """ return response + def post_get_job_with_metadata( + self, response: job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_get_job_with_metadata` + interceptor in new development instead of the `post_get_job` interceptor. + When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the + `post_get_job` interceptor. The (possibly modified) response returned by + `post_get_job` will be passed to + `post_get_job_with_metadata`. + """ + return response, metadata + def pre_get_task( self, request: batch.GetTaskRequest, @@ -207,12 +326,33 @@ def pre_get_task( def post_get_task(self, response: task.Task) -> task.Task: """Post-rpc interceptor for get_task - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_get_task` interceptor runs + before the `post_get_task_with_metadata` interceptor. """ return response + def post_get_task_with_metadata( + self, response: task.Task, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[task.Task, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_get_task_with_metadata` + interceptor in new development instead of the `post_get_task` interceptor. + When both interceptors are used, this `post_get_task_with_metadata` interceptor runs after the + `post_get_task` interceptor. The (possibly modified) response returned by + `post_get_task` will be passed to + `post_get_task_with_metadata`. + """ + return response, metadata + def pre_list_jobs( self, request: batch.ListJobsRequest, @@ -230,12 +370,35 @@ def post_list_jobs( ) -> batch.ListJobsResponse: """Post-rpc interceptor for list_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_list_jobs` interceptor runs + before the `post_list_jobs_with_metadata` interceptor. """ return response + def post_list_jobs_with_metadata( + self, + response: batch.ListJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_list_jobs_with_metadata` + interceptor in new development instead of the `post_list_jobs` interceptor. + When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the + `post_list_jobs` interceptor. The (possibly modified) response returned by + `post_list_jobs` will be passed to + `post_list_jobs_with_metadata`. + """ + return response, metadata + def pre_list_tasks( self, request: batch.ListTasksRequest, @@ -253,12 +416,35 @@ def post_list_tasks( ) -> batch.ListTasksResponse: """Post-rpc interceptor for list_tasks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tasks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_list_tasks` interceptor runs + before the `post_list_tasks_with_metadata` interceptor. """ return response + def post_list_tasks_with_metadata( + self, + response: batch.ListTasksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListTasksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tasks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_list_tasks_with_metadata` + interceptor in new development instead of the `post_list_tasks` interceptor. + When both interceptors are used, this `post_list_tasks_with_metadata` interceptor runs after the + `post_list_tasks` interceptor. The (possibly modified) response returned by + `post_list_tasks` will be passed to + `post_list_tasks_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -548,6 +734,161 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client + class _CancelJob( + _BaseBatchServiceRestTransport._BaseCancelJob, BatchServiceRestStub + ): + def __hash__(self): + return hash("BatchServiceRestTransport.CancelJob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: batch.CancelJobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the cancel job method over HTTP. + + Args: + request (~.batch.CancelJobRequest): + The request object. CancelJob Request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseBatchServiceRestTransport._BaseCancelJob._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_job(request, metadata) + transcoded_request = ( + _BaseBatchServiceRestTransport._BaseCancelJob._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseBatchServiceRestTransport._BaseCancelJob._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseBatchServiceRestTransport._BaseCancelJob._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.batch_v1.BatchServiceClient.CancelJob", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "CancelJob", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BatchServiceRestTransport._CancelJob._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_cancel_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_job_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.batch_v1.BatchServiceClient.cancel_job", + extra={ + "serviceName": "google.cloud.batch.v1.BatchService", + "rpcName": "CancelJob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _CreateJob( _BaseBatchServiceRestTransport._BaseCreateJob, BatchServiceRestStub ): @@ -675,6 +1016,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -820,6 +1165,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -962,6 +1311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1104,6 +1457,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_task_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1246,6 +1603,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1390,6 +1751,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tasks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tasks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1413,6 +1778,14 @@ def __call__( ) return resp + @property + def cancel_job( + self, + ) -> Callable[[batch.CancelJobRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore + @property def create_job(self) -> Callable[[batch.CreateJobRequest], gcb_job.Job]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest_base.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest_base.py index b1d074089788..f60c5ee4961d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest_base.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/transports/rest_base.py @@ -92,6 +92,63 @@ def __init__( api_audience=api_audience, ) + class _BaseCancelJob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/jobs/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = batch.CancelJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBatchServiceRestTransport._BaseCancelJob._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateJob: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/__init__.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/__init__.py index 14da21770305..489e19eacc7d 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/__init__.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/__init__.py @@ -14,6 +14,8 @@ # limitations under the License. # from .batch import ( + CancelJobRequest, + CancelJobResponse, CreateJobRequest, DeleteJobRequest, GetJobRequest, @@ -47,6 +49,8 @@ from .volume import GCS, NFS, Volume __all__ = ( + "CancelJobRequest", + "CancelJobResponse", "CreateJobRequest", "DeleteJobRequest", "GetJobRequest", diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/batch.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/batch.py index e9f92175d7c6..5cb8b0ac696a 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/batch.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/batch.py @@ -29,6 +29,8 @@ "CreateJobRequest", "GetJobRequest", "DeleteJobRequest", + "CancelJobRequest", + "CancelJobResponse", "ListJobsRequest", "ListJobsResponse", "ListTasksRequest", @@ -159,6 +161,48 @@ class DeleteJobRequest(proto.Message): ) +class CancelJobRequest(proto.Message): + r"""CancelJob Request. + + Attributes: + name (str): + Required. Job name. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CancelJobResponse(proto.Message): + r"""Response to the CancelJob request.""" + + class ListJobsRequest(proto.Message): r"""ListJob Request. @@ -332,9 +376,11 @@ class OperationMetadata(proto.Message): requested_cancellation (bool): Output only. Identifies whether the user has requested cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + successfully been cancelled have + [google.longrunning.Operation.error][google.longrunning.Operation.error] + value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py index 64df7c9ddcc1..2dd746a33e88 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/types/job.py @@ -263,6 +263,14 @@ class State(proto.Enum): The Job will be deleted, but has not been deleted yet. Typically this is because resources used by the Job are still being cleaned up. + CANCELLATION_IN_PROGRESS (7): + The Job cancellation is in progress, this is + because the resources used by the Job are still + being cleaned up. + CANCELLED (8): + The Job has been cancelled, the task + executions were stopped and the resources were + cleaned up. """ STATE_UNSPECIFIED = 0 QUEUED = 1 @@ -271,6 +279,8 @@ class State(proto.Enum): SUCCEEDED = 4 FAILED = 5 DELETION_IN_PROGRESS = 6 + CANCELLATION_IN_PROGRESS = 7 + CANCELLED = 8 class InstanceStatus(proto.Message): r"""VM instance status. diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index 41ebf29e4e3e..f7da358ad5fc 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.32" # {x-release-please-version} +__version__ = "0.17.34" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index 00e0a1b408c5..b4e2b0f2247e 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -573,6 +575,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2387,16 +2416,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2442,16 +2475,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2608,16 +2645,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2663,16 +2704,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py index 14751deec0bf..5fe3497a6b05 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py @@ -204,12 +204,35 @@ def post_cancel_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for cancel_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_cancel_job` interceptor runs + before the `post_cancel_job_with_metadata` interceptor. """ return response + def post_cancel_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_cancel_job_with_metadata` + interceptor in new development instead of the `post_cancel_job` interceptor. + When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the + `post_cancel_job` interceptor. The (possibly modified) response returned by + `post_cancel_job` will be passed to + `post_cancel_job_with_metadata`. + """ + return response, metadata + def pre_create_job( self, request: batch.CreateJobRequest, @@ -225,12 +248,33 @@ def pre_create_job( def post_create_job(self, response: gcb_job.Job) -> gcb_job.Job: """Post-rpc interceptor for create_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_create_job` interceptor runs + before the `post_create_job_with_metadata` interceptor. """ return response + def post_create_job_with_metadata( + self, response: gcb_job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gcb_job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_create_job_with_metadata` + interceptor in new development instead of the `post_create_job` interceptor. + When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the + `post_create_job` interceptor. The (possibly modified) response returned by + `post_create_job` will be passed to + `post_create_job_with_metadata`. + """ + return response, metadata + def pre_create_resource_allowance( self, request: batch.CreateResourceAllowanceRequest, @@ -250,12 +294,38 @@ def post_create_resource_allowance( ) -> gcb_resource_allowance.ResourceAllowance: """Post-rpc interceptor for create_resource_allowance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_resource_allowance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_create_resource_allowance` interceptor runs + before the `post_create_resource_allowance_with_metadata` interceptor. """ return response + def post_create_resource_allowance_with_metadata( + self, + response: gcb_resource_allowance.ResourceAllowance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcb_resource_allowance.ResourceAllowance, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_resource_allowance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_create_resource_allowance_with_metadata` + interceptor in new development instead of the `post_create_resource_allowance` interceptor. + When both interceptors are used, this `post_create_resource_allowance_with_metadata` interceptor runs after the + `post_create_resource_allowance` interceptor. The (possibly modified) response returned by + `post_create_resource_allowance` will be passed to + `post_create_resource_allowance_with_metadata`. + """ + return response, metadata + def pre_delete_job( self, request: batch.DeleteJobRequest, @@ -273,12 +343,35 @@ def post_delete_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_delete_job` interceptor runs + before the `post_delete_job_with_metadata` interceptor. """ return response + def post_delete_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_delete_job_with_metadata` + interceptor in new development instead of the `post_delete_job` interceptor. + When both interceptors are used, this `post_delete_job_with_metadata` interceptor runs after the + `post_delete_job` interceptor. The (possibly modified) response returned by + `post_delete_job` will be passed to + `post_delete_job_with_metadata`. + """ + return response, metadata + def pre_delete_resource_allowance( self, request: batch.DeleteResourceAllowanceRequest, @@ -298,12 +391,35 @@ def post_delete_resource_allowance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_resource_allowance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_resource_allowance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_delete_resource_allowance` interceptor runs + before the `post_delete_resource_allowance_with_metadata` interceptor. """ return response + def post_delete_resource_allowance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_resource_allowance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_delete_resource_allowance_with_metadata` + interceptor in new development instead of the `post_delete_resource_allowance` interceptor. + When both interceptors are used, this `post_delete_resource_allowance_with_metadata` interceptor runs after the + `post_delete_resource_allowance` interceptor. The (possibly modified) response returned by + `post_delete_resource_allowance` will be passed to + `post_delete_resource_allowance_with_metadata`. + """ + return response, metadata + def pre_get_job( self, request: batch.GetJobRequest, @@ -319,12 +435,33 @@ def pre_get_job( def post_get_job(self, response: job.Job) -> job.Job: """Post-rpc interceptor for get_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_get_job` interceptor runs + before the `post_get_job_with_metadata` interceptor. """ return response + def post_get_job_with_metadata( + self, response: job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_get_job_with_metadata` + interceptor in new development instead of the `post_get_job` interceptor. + When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the + `post_get_job` interceptor. The (possibly modified) response returned by + `post_get_job` will be passed to + `post_get_job_with_metadata`. + """ + return response, metadata + def pre_get_resource_allowance( self, request: batch.GetResourceAllowanceRequest, @@ -344,12 +481,37 @@ def post_get_resource_allowance( ) -> resource_allowance.ResourceAllowance: """Post-rpc interceptor for get_resource_allowance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_resource_allowance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_get_resource_allowance` interceptor runs + before the `post_get_resource_allowance_with_metadata` interceptor. """ return response + def post_get_resource_allowance_with_metadata( + self, + response: resource_allowance.ResourceAllowance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resource_allowance.ResourceAllowance, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_resource_allowance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_get_resource_allowance_with_metadata` + interceptor in new development instead of the `post_get_resource_allowance` interceptor. + When both interceptors are used, this `post_get_resource_allowance_with_metadata` interceptor runs after the + `post_get_resource_allowance` interceptor. The (possibly modified) response returned by + `post_get_resource_allowance` will be passed to + `post_get_resource_allowance_with_metadata`. + """ + return response, metadata + def pre_get_task( self, request: batch.GetTaskRequest, @@ -365,12 +527,33 @@ def pre_get_task( def post_get_task(self, response: task.Task) -> task.Task: """Post-rpc interceptor for get_task - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_task_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_get_task` interceptor runs + before the `post_get_task_with_metadata` interceptor. """ return response + def post_get_task_with_metadata( + self, response: task.Task, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[task.Task, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_task + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_get_task_with_metadata` + interceptor in new development instead of the `post_get_task` interceptor. + When both interceptors are used, this `post_get_task_with_metadata` interceptor runs after the + `post_get_task` interceptor. The (possibly modified) response returned by + `post_get_task` will be passed to + `post_get_task_with_metadata`. + """ + return response, metadata + def pre_list_jobs( self, request: batch.ListJobsRequest, @@ -388,12 +571,35 @@ def post_list_jobs( ) -> batch.ListJobsResponse: """Post-rpc interceptor for list_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_list_jobs` interceptor runs + before the `post_list_jobs_with_metadata` interceptor. """ return response + def post_list_jobs_with_metadata( + self, + response: batch.ListJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_list_jobs_with_metadata` + interceptor in new development instead of the `post_list_jobs` interceptor. + When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the + `post_list_jobs` interceptor. The (possibly modified) response returned by + `post_list_jobs` will be passed to + `post_list_jobs_with_metadata`. + """ + return response, metadata + def pre_list_resource_allowances( self, request: batch.ListResourceAllowancesRequest, @@ -413,12 +619,37 @@ def post_list_resource_allowances( ) -> batch.ListResourceAllowancesResponse: """Post-rpc interceptor for list_resource_allowances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_resource_allowances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_list_resource_allowances` interceptor runs + before the `post_list_resource_allowances_with_metadata` interceptor. """ return response + def post_list_resource_allowances_with_metadata( + self, + response: batch.ListResourceAllowancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + batch.ListResourceAllowancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_resource_allowances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_list_resource_allowances_with_metadata` + interceptor in new development instead of the `post_list_resource_allowances` interceptor. + When both interceptors are used, this `post_list_resource_allowances_with_metadata` interceptor runs after the + `post_list_resource_allowances` interceptor. The (possibly modified) response returned by + `post_list_resource_allowances` will be passed to + `post_list_resource_allowances_with_metadata`. + """ + return response, metadata + def pre_list_tasks( self, request: batch.ListTasksRequest, @@ -436,12 +667,35 @@ def post_list_tasks( ) -> batch.ListTasksResponse: """Post-rpc interceptor for list_tasks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tasks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_list_tasks` interceptor runs + before the `post_list_tasks_with_metadata` interceptor. """ return response + def post_list_tasks_with_metadata( + self, + response: batch.ListTasksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batch.ListTasksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tasks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_list_tasks_with_metadata` + interceptor in new development instead of the `post_list_tasks` interceptor. + When both interceptors are used, this `post_list_tasks_with_metadata` interceptor runs after the + `post_list_tasks` interceptor. The (possibly modified) response returned by + `post_list_tasks` will be passed to + `post_list_tasks_with_metadata`. + """ + return response, metadata + def pre_update_job( self, request: batch.UpdateJobRequest, @@ -457,12 +711,33 @@ def pre_update_job( def post_update_job(self, response: gcb_job.Job) -> gcb_job.Job: """Post-rpc interceptor for update_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_update_job` interceptor runs + before the `post_update_job_with_metadata` interceptor. """ return response + def post_update_job_with_metadata( + self, response: gcb_job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[gcb_job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_update_job_with_metadata` + interceptor in new development instead of the `post_update_job` interceptor. + When both interceptors are used, this `post_update_job_with_metadata` interceptor runs after the + `post_update_job` interceptor. The (possibly modified) response returned by + `post_update_job` will be passed to + `post_update_job_with_metadata`. + """ + return response, metadata + def pre_update_resource_allowance( self, request: batch.UpdateResourceAllowanceRequest, @@ -482,12 +757,38 @@ def post_update_resource_allowance( ) -> gcb_resource_allowance.ResourceAllowance: """Post-rpc interceptor for update_resource_allowance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_resource_allowance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchService server but before - it is returned to user code. + it is returned to user code. This `post_update_resource_allowance` interceptor runs + before the `post_update_resource_allowance_with_metadata` interceptor. """ return response + def post_update_resource_allowance_with_metadata( + self, + response: gcb_resource_allowance.ResourceAllowance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcb_resource_allowance.ResourceAllowance, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_resource_allowance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchService server but before it is returned to user code. + + We recommend only using this `post_update_resource_allowance_with_metadata` + interceptor in new development instead of the `post_update_resource_allowance` interceptor. + When both interceptors are used, this `post_update_resource_allowance_with_metadata` interceptor runs after the + `post_update_resource_allowance` interceptor. The (possibly modified) response returned by + `post_update_resource_allowance` will be passed to + `post_update_resource_allowance_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -905,6 +1206,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1055,6 +1360,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1208,6 +1517,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_resource_allowance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_resource_allowance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1355,6 +1668,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1499,6 +1816,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_resource_allowance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_resource_allowance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1641,6 +1962,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1787,6 +2112,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_resource_allowance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_resource_allowance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1931,6 +2260,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_task(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_task_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2073,6 +2406,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2215,6 +2552,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_resource_allowances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_resource_allowances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2361,6 +2702,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tasks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tasks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2511,6 +2856,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2664,6 +3013,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_resource_allowance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_resource_allowance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py index a1f56648f3a4..3a6d03f097af 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/types/batch.py @@ -698,9 +698,11 @@ class OperationMetadata(proto.Message): requested_cancellation (bool): Output only. Identifies whether the user has requested cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + successfully been cancelled have + [google.longrunning.Operation.error][google.longrunning.Operation.error] + value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-batch/noxfile.py b/packages/google-cloud-batch/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-batch/noxfile.py +++ b/packages/google-cloud-batch/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-batch/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_async.py b/packages/google-cloud-batch/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_async.py new file mode 100644 index 000000000000..87a4679ccdb9 --- /dev/null +++ b/packages/google-cloud-batch/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-batch + + +# [START batch_v1_generated_BatchService_CancelJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import batch_v1 + + +async def sample_cancel_job(): + # Create a client + client = batch_v1.BatchServiceAsyncClient() + + # Initialize request argument(s) + request = batch_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END batch_v1_generated_BatchService_CancelJob_async] diff --git a/packages/google-cloud-batch/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_sync.py b/packages/google-cloud-batch/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_sync.py new file mode 100644 index 000000000000..20acc49f5fde --- /dev/null +++ b/packages/google-cloud-batch/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CancelJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-batch + + +# [START batch_v1_generated_BatchService_CancelJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import batch_v1 + + +def sample_cancel_job(): + # Create a client + client = batch_v1.BatchServiceClient() + + # Initialize request argument(s) + request = batch_v1.CancelJobRequest( + name="name_value", + ) + + # Make the request + operation = client.cancel_job(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END batch_v1_generated_BatchService_CancelJob_sync] diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index 57b6b8df7022..e9a7f2a3ba58 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,9 +8,170 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.32" + "version": "0.17.34" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", + "shortName": "BatchServiceAsyncClient" + }, + "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.cancel_job", + "method": { + "fullName": "google.cloud.batch.v1.BatchService.CancelJob", + "service": { + "fullName": "google.cloud.batch.v1.BatchService", + "shortName": "BatchService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.batch_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "batch_v1_generated_batch_service_cancel_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "batch_v1_generated_BatchService_CancelJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "batch_v1_generated_batch_service_cancel_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.batch_v1.BatchServiceClient", + "shortName": "BatchServiceClient" + }, + "fullName": "google.cloud.batch_v1.BatchServiceClient.cancel_job", + "method": { + "fullName": "google.cloud.batch.v1.BatchService.CancelJob", + "service": { + "fullName": "google.cloud.batch.v1.BatchService", + "shortName": "BatchService" + }, + "shortName": "CancelJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.batch_v1.types.CancelJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "cancel_job" + }, + "description": "Sample for CancelJob", + "file": "batch_v1_generated_batch_service_cancel_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "batch_v1_generated_BatchService_CancelJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "batch_v1_generated_batch_service_cancel_job_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index e831046026d5..0fac004369d3 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.32" + "version": "0.17.34" }, "snippets": [ { diff --git a/packages/google-cloud-batch/scripts/fixup_batch_v1_keywords.py b/packages/google-cloud-batch/scripts/fixup_batch_v1_keywords.py index 0114807e8932..4b275b189c8e 100644 --- a/packages/google-cloud-batch/scripts/fixup_batch_v1_keywords.py +++ b/packages/google-cloud-batch/scripts/fixup_batch_v1_keywords.py @@ -39,6 +39,7 @@ def partition( class batchCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'cancel_job': ('name', 'request_id', ), 'create_job': ('parent', 'job', 'job_id', 'request_id', ), 'delete_job': ('name', 'reason', 'request_id', ), 'get_job': ('name', ), diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py index e39ed8710d54..9d2f573ed5b5 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1/test_batch_service.py @@ -77,6 +77,13 @@ from google.cloud.batch_v1.types import job as gcb_job from google.cloud.batch_v1.types import task, volume +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BatchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BatchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2057,6 +2107,331 @@ async def test_delete_job_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + batch.CancelJobRequest, + dict, + ], +) +def test_cancel_job(request_type, transport: str = "grpc"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = batch.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_cancel_job_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = batch.CancelJobRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.cancel_job(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == batch.CancelJobRequest( + name="name_value", + ) + + +def test_cancel_job_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc + request = {} + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BatchServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.cancel_job + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.cancel_job + ] = mock_rpc + + request = {} + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.cancel_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_cancel_job_async( + transport: str = "grpc_asyncio", request_type=batch.CancelJobRequest +): + client = BatchServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = batch.CancelJobRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_cancel_job_async_from_dict(): + await test_cancel_job_async(request_type=dict) + + +def test_cancel_job_field_headers(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batch.CancelJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_job_field_headers_async(): + client = BatchServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = batch.CancelJobRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.cancel_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_cancel_job_flattened(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.cancel_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_cancel_job_flattened_error(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_job( + batch.CancelJobRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_cancel_job_flattened_async(): + client = BatchServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.cancel_job( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_cancel_job_flattened_error_async(): + client = BatchServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.cancel_job( + batch.CancelJobRequest(), + name="name_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -3749,7 +4124,104 @@ def test_get_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = job.Job() + return_value = job.Job() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = job.Job.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_job(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + args[1], + ) + + +def test_get_job_rest_flattened_error(transport: str = "rest"): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + batch.GetJobRequest(), + name="name_value", + ) + + +def test_delete_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_job in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + + request = {} + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_job(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_job_rest_flattened(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"name": "projects/sample1/locations/sample2/jobs/sample3"} @@ -3763,14 +4235,12 @@ def test_get_job_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job(**mock_args) + client.delete_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3782,7 +4252,7 @@ def test_get_job_rest_flattened(): ) -def test_get_job_rest_flattened_error(transport: str = "rest"): +def test_delete_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3791,13 +4261,13 @@ def test_get_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), + client.delete_job( + batch.DeleteJobRequest(), name="name_value", ) -def test_delete_job_rest_use_cached_wrapped_rpc(): +def test_cancel_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3811,17 +4281,17 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods + assert client._transport.cancel_job in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc + client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc request = {} - client.delete_job(request) + client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3830,14 +4300,94 @@ def test_delete_job_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_job(request) + client.cancel_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_job_rest_flattened(): +def test_cancel_job_rest_required_fields(request_type=batch.CancelJobRequest): + transport_class = transports.BatchServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_job(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_job_rest_unset_required_fields(): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_cancel_job_rest_flattened(): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3865,19 +4415,20 @@ def test_delete_job_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job(**mock_args) + client.cancel_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/jobs/*}:cancel" + % client.transport._host, args[1], ) -def test_delete_job_rest_flattened_error(transport: str = "rest"): +def test_cancel_job_rest_flattened_error(transport: str = "rest"): client = BatchServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3886,8 +4437,8 @@ def test_delete_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), + client.cancel_job( + batch.CancelJobRequest(), name="name_value", ) @@ -4651,6 +5202,27 @@ def test_delete_job_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_grpc(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = batch.CancelJobRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_jobs_empty_call_grpc(): @@ -4811,6 +5383,31 @@ async def test_delete_job_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_job_empty_call_grpc_asyncio(): + client = BatchServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = batch.CancelJobRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -5218,10 +5815,13 @@ def test_create_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_create_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_create_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_create_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) transcode.return_value = { "method": "post", @@ -5243,6 +5843,7 @@ def test_create_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_job.Job() + post_with_metadata.return_value = gcb_job.Job(), metadata client.create_job( request, @@ -5254,6 +5855,7 @@ def test_create_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_rest_bad_request(request_type=batch.GetJobRequest): @@ -5340,10 +5942,13 @@ def test_get_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_get_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_get_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_get_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) transcode.return_value = { "method": "post", @@ -5365,6 +5970,7 @@ def test_get_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = job.Job() + post_with_metadata.return_value = job.Job(), metadata client.get_job( request, @@ -5376,6 +5982,7 @@ def test_get_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_job_rest_bad_request(request_type=batch.DeleteJobRequest): @@ -5454,10 +6061,13 @@ def test_delete_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BatchServiceRestInterceptor, "post_delete_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_delete_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_delete_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) transcode.return_value = { "method": "post", @@ -5479,6 +6089,7 @@ def test_delete_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_job( request, @@ -5490,6 +6101,126 @@ def test_delete_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_cancel_job_rest_bad_request(request_type=batch.CancelJobRequest): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_job(request) + + +@pytest.mark.parametrize( + "request_type", + [ + batch.CancelJobRequest, + dict, + ], +) +def test_cancel_job_rest_call_success(request_type): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/jobs/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_job(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_job_rest_interceptors(null_interceptor): + transport = transports.BatchServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BatchServiceRestInterceptor(), + ) + client = BatchServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BatchServiceRestInterceptor, "post_cancel_job" + ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_cancel_job_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BatchServiceRestInterceptor, "pre_cancel_job" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = batch.CancelJobRequest.pb(batch.CancelJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = batch.CancelJobRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.cancel_job( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_jobs_rest_bad_request(request_type=batch.ListJobsRequest): @@ -5574,10 +6305,13 @@ def test_list_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_list_jobs" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_list_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_list_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.ListJobsRequest.pb(batch.ListJobsRequest()) transcode.return_value = { "method": "post", @@ -5599,6 +6333,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batch.ListJobsResponse() + post_with_metadata.return_value = batch.ListJobsResponse(), metadata client.list_jobs( request, @@ -5610,6 +6345,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_task_rest_bad_request(request_type=batch.GetTaskRequest): @@ -5696,10 +6432,13 @@ def test_get_task_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_get_task" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_get_task_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_get_task" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.GetTaskRequest.pb(batch.GetTaskRequest()) transcode.return_value = { "method": "post", @@ -5721,6 +6460,7 @@ def test_get_task_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = task.Task() + post_with_metadata.return_value = task.Task(), metadata client.get_task( request, @@ -5732,6 +6472,7 @@ def test_get_task_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tasks_rest_bad_request(request_type=batch.ListTasksRequest): @@ -5820,10 +6561,13 @@ def test_list_tasks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_list_tasks" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_list_tasks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_list_tasks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.ListTasksRequest.pb(batch.ListTasksRequest()) transcode.return_value = { "method": "post", @@ -5845,6 +6589,7 @@ def test_list_tasks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batch.ListTasksResponse() + post_with_metadata.return_value = batch.ListTasksResponse(), metadata client.list_tasks( request, @@ -5856,6 +6601,7 @@ def test_list_tasks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): @@ -6293,6 +7039,26 @@ def test_delete_job_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_job_empty_call_rest(): + client = BatchServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: + client.cancel_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = batch.CancelJobRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_jobs_empty_call_rest(): @@ -6406,6 +7172,7 @@ def test_batch_service_base_transport(): "create_job", "get_job", "delete_job", + "cancel_job", "list_jobs", "get_task", "list_tasks", @@ -6684,6 +7451,9 @@ def test_batch_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_job._session session2 = client2.transport.delete_job._session assert session1 != session2 + session1 = client1.transport.cancel_job._session + session2 = client2.transport.cancel_job._session + assert session1 != session2 session1 = client1.transport.list_jobs._session session2 = client2.transport.list_jobs._session assert session1 != session2 diff --git a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py index 204a7506d590..f482cae6cf40 100644 --- a/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ b/packages/google-cloud-batch/tests/unit/gapic/batch_v1alpha/test_batch_service.py @@ -84,6 +84,13 @@ from google.cloud.batch_v1alpha.types import resource_allowance from google.cloud.batch_v1alpha.types import task, volume +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -327,6 +334,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BatchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BatchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9796,10 +9846,13 @@ def test_create_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_create_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_create_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_create_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) transcode.return_value = { "method": "post", @@ -9821,6 +9874,7 @@ def test_create_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_job.Job() + post_with_metadata.return_value = gcb_job.Job(), metadata client.create_job( request, @@ -9832,6 +9886,7 @@ def test_create_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_rest_bad_request(request_type=batch.GetJobRequest): @@ -9920,10 +9975,13 @@ def test_get_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_get_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_get_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_get_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) transcode.return_value = { "method": "post", @@ -9945,6 +10003,7 @@ def test_get_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = job.Job() + post_with_metadata.return_value = job.Job(), metadata client.get_job( request, @@ -9956,6 +10015,7 @@ def test_get_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_job_rest_bad_request(request_type=batch.DeleteJobRequest): @@ -10034,10 +10094,13 @@ def test_delete_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BatchServiceRestInterceptor, "post_delete_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_delete_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_delete_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) transcode.return_value = { "method": "post", @@ -10059,6 +10122,7 @@ def test_delete_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_job( request, @@ -10070,6 +10134,7 @@ def test_delete_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_job_rest_bad_request(request_type=batch.CancelJobRequest): @@ -10148,10 +10213,13 @@ def test_cancel_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BatchServiceRestInterceptor, "post_cancel_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_cancel_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_cancel_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.CancelJobRequest.pb(batch.CancelJobRequest()) transcode.return_value = { "method": "post", @@ -10173,6 +10241,7 @@ def test_cancel_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.cancel_job( request, @@ -10184,6 +10253,7 @@ def test_cancel_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_job_rest_bad_request(request_type=batch.UpdateJobRequest): @@ -10539,10 +10609,13 @@ def test_update_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_update_job" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_update_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_update_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.UpdateJobRequest.pb(batch.UpdateJobRequest()) transcode.return_value = { "method": "post", @@ -10564,6 +10637,7 @@ def test_update_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_job.Job() + post_with_metadata.return_value = gcb_job.Job(), metadata client.update_job( request, @@ -10575,6 +10649,7 @@ def test_update_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_jobs_rest_bad_request(request_type=batch.ListJobsRequest): @@ -10659,10 +10734,13 @@ def test_list_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_list_jobs" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_list_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_list_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.ListJobsRequest.pb(batch.ListJobsRequest()) transcode.return_value = { "method": "post", @@ -10684,6 +10762,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batch.ListJobsResponse() + post_with_metadata.return_value = batch.ListJobsResponse(), metadata client.list_jobs( request, @@ -10695,6 +10774,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_task_rest_bad_request(request_type=batch.GetTaskRequest): @@ -10781,10 +10861,13 @@ def test_get_task_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_get_task" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_get_task_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_get_task" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.GetTaskRequest.pb(batch.GetTaskRequest()) transcode.return_value = { "method": "post", @@ -10806,6 +10889,7 @@ def test_get_task_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = task.Task() + post_with_metadata.return_value = task.Task(), metadata client.get_task( request, @@ -10817,6 +10901,7 @@ def test_get_task_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tasks_rest_bad_request(request_type=batch.ListTasksRequest): @@ -10905,10 +10990,13 @@ def test_list_tasks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_list_tasks" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, "post_list_tasks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_list_tasks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.ListTasksRequest.pb(batch.ListTasksRequest()) transcode.return_value = { "method": "post", @@ -10930,6 +11018,7 @@ def test_list_tasks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batch.ListTasksResponse() + post_with_metadata.return_value = batch.ListTasksResponse(), metadata client.list_tasks( request, @@ -10941,6 +11030,7 @@ def test_list_tasks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_resource_allowance_rest_bad_request( @@ -11119,10 +11209,14 @@ def test_create_resource_allowance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_create_resource_allowance" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, + "post_create_resource_allowance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_create_resource_allowance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.CreateResourceAllowanceRequest.pb( batch.CreateResourceAllowanceRequest() ) @@ -11148,6 +11242,10 @@ def test_create_resource_allowance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_resource_allowance.ResourceAllowance() + post_with_metadata.return_value = ( + gcb_resource_allowance.ResourceAllowance(), + metadata, + ) client.create_resource_allowance( request, @@ -11159,6 +11257,7 @@ def test_create_resource_allowance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_resource_allowance_rest_bad_request( @@ -11249,10 +11348,14 @@ def test_get_resource_allowance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_get_resource_allowance" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, + "post_get_resource_allowance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_get_resource_allowance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.GetResourceAllowanceRequest.pb( batch.GetResourceAllowanceRequest() ) @@ -11278,6 +11381,10 @@ def test_get_resource_allowance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resource_allowance.ResourceAllowance() + post_with_metadata.return_value = ( + resource_allowance.ResourceAllowance(), + metadata, + ) client.get_resource_allowance( request, @@ -11289,6 +11396,7 @@ def test_get_resource_allowance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_resource_allowance_rest_bad_request( @@ -11373,10 +11481,14 @@ def test_delete_resource_allowance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BatchServiceRestInterceptor, "post_delete_resource_allowance" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, + "post_delete_resource_allowance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_delete_resource_allowance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.DeleteResourceAllowanceRequest.pb( batch.DeleteResourceAllowanceRequest() ) @@ -11400,6 +11512,7 @@ def test_delete_resource_allowance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_resource_allowance( request, @@ -11411,6 +11524,7 @@ def test_delete_resource_allowance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_resource_allowances_rest_bad_request( @@ -11497,10 +11611,14 @@ def test_list_resource_allowances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_list_resource_allowances" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, + "post_list_resource_allowances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_list_resource_allowances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.ListResourceAllowancesRequest.pb( batch.ListResourceAllowancesRequest() ) @@ -11526,6 +11644,10 @@ def test_list_resource_allowances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batch.ListResourceAllowancesResponse() + post_with_metadata.return_value = ( + batch.ListResourceAllowancesResponse(), + metadata, + ) client.list_resource_allowances( request, @@ -11537,6 +11659,7 @@ def test_list_resource_allowances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_resource_allowance_rest_bad_request( @@ -11723,10 +11846,14 @@ def test_update_resource_allowance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchServiceRestInterceptor, "post_update_resource_allowance" ) as post, mock.patch.object( + transports.BatchServiceRestInterceptor, + "post_update_resource_allowance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BatchServiceRestInterceptor, "pre_update_resource_allowance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batch.UpdateResourceAllowanceRequest.pb( batch.UpdateResourceAllowanceRequest() ) @@ -11752,6 +11879,10 @@ def test_update_resource_allowance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcb_resource_allowance.ResourceAllowance() + post_with_metadata.return_value = ( + gcb_resource_allowance.ResourceAllowance(), + metadata, + ) client.update_resource_allowance( request, @@ -11763,6 +11894,7 @@ def test_update_resource_allowance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md b/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md index 7b4dc15bcd0b..006b73838987 100644 --- a/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-appconnections/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnections-v0.4.14...google-cloud-beyondcorp-appconnections-v0.4.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnections-v0.4.13...google-cloud-beyondcorp-appconnections-v0.4.14) (2024-12-12) diff --git a/packages/google-cloud-beyondcorp-appconnections/README.rst b/packages/google-cloud-beyondcorp-appconnections/README.rst index 522af2a7382f..1a28080d74f3 100644 --- a/packages/google-cloud-beyondcorp-appconnections/README.rst +++ b/packages/google-cloud-beyondcorp-appconnections/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BeyondCorp AppConnections.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BeyondCorp AppConnections.: https://cloud.google.com/beyondcorp/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py index cd3d48686fe8..b018b8b9e8ec 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -558,6 +560,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1618,16 +1647,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1673,16 +1706,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1905,16 +1942,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2027,16 +2068,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2087,16 +2132,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -2142,16 +2191,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2197,16 +2250,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py index f592e2460673..bb6268b2c954 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py @@ -146,12 +146,35 @@ def post_create_app_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_app_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_app_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectionsService server but before - it is returned to user code. + it is returned to user code. This `post_create_app_connection` interceptor runs + before the `post_create_app_connection_with_metadata` interceptor. """ return response + def post_create_app_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_app_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectionsService server but before it is returned to user code. + + We recommend only using this `post_create_app_connection_with_metadata` + interceptor in new development instead of the `post_create_app_connection` interceptor. + When both interceptors are used, this `post_create_app_connection_with_metadata` interceptor runs after the + `post_create_app_connection` interceptor. The (possibly modified) response returned by + `post_create_app_connection` will be passed to + `post_create_app_connection_with_metadata`. + """ + return response, metadata + def pre_delete_app_connection( self, request: app_connections_service.DeleteAppConnectionRequest, @@ -172,12 +195,35 @@ def post_delete_app_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_app_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_app_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectionsService server but before - it is returned to user code. + it is returned to user code. This `post_delete_app_connection` interceptor runs + before the `post_delete_app_connection_with_metadata` interceptor. """ return response + def post_delete_app_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_app_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectionsService server but before it is returned to user code. + + We recommend only using this `post_delete_app_connection_with_metadata` + interceptor in new development instead of the `post_delete_app_connection` interceptor. + When both interceptors are used, this `post_delete_app_connection_with_metadata` interceptor runs after the + `post_delete_app_connection` interceptor. The (possibly modified) response returned by + `post_delete_app_connection` will be passed to + `post_delete_app_connection_with_metadata`. + """ + return response, metadata + def pre_get_app_connection( self, request: app_connections_service.GetAppConnectionRequest, @@ -198,12 +244,37 @@ def post_get_app_connection( ) -> app_connections_service.AppConnection: """Post-rpc interceptor for get_app_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_app_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectionsService server but before - it is returned to user code. + it is returned to user code. This `post_get_app_connection` interceptor runs + before the `post_get_app_connection_with_metadata` interceptor. """ return response + def post_get_app_connection_with_metadata( + self, + response: app_connections_service.AppConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_connections_service.AppConnection, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_app_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectionsService server but before it is returned to user code. + + We recommend only using this `post_get_app_connection_with_metadata` + interceptor in new development instead of the `post_get_app_connection` interceptor. + When both interceptors are used, this `post_get_app_connection_with_metadata` interceptor runs after the + `post_get_app_connection` interceptor. The (possibly modified) response returned by + `post_get_app_connection` will be passed to + `post_get_app_connection_with_metadata`. + """ + return response, metadata + def pre_list_app_connections( self, request: app_connections_service.ListAppConnectionsRequest, @@ -224,12 +295,38 @@ def post_list_app_connections( ) -> app_connections_service.ListAppConnectionsResponse: """Post-rpc interceptor for list_app_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_app_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectionsService server but before - it is returned to user code. + it is returned to user code. This `post_list_app_connections` interceptor runs + before the `post_list_app_connections_with_metadata` interceptor. """ return response + def post_list_app_connections_with_metadata( + self, + response: app_connections_service.ListAppConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_connections_service.ListAppConnectionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_app_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectionsService server but before it is returned to user code. + + We recommend only using this `post_list_app_connections_with_metadata` + interceptor in new development instead of the `post_list_app_connections` interceptor. + When both interceptors are used, this `post_list_app_connections_with_metadata` interceptor runs after the + `post_list_app_connections` interceptor. The (possibly modified) response returned by + `post_list_app_connections` will be passed to + `post_list_app_connections_with_metadata`. + """ + return response, metadata + def pre_resolve_app_connections( self, request: app_connections_service.ResolveAppConnectionsRequest, @@ -250,12 +347,38 @@ def post_resolve_app_connections( ) -> app_connections_service.ResolveAppConnectionsResponse: """Post-rpc interceptor for resolve_app_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resolve_app_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectionsService server but before - it is returned to user code. + it is returned to user code. This `post_resolve_app_connections` interceptor runs + before the `post_resolve_app_connections_with_metadata` interceptor. """ return response + def post_resolve_app_connections_with_metadata( + self, + response: app_connections_service.ResolveAppConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_connections_service.ResolveAppConnectionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for resolve_app_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectionsService server but before it is returned to user code. + + We recommend only using this `post_resolve_app_connections_with_metadata` + interceptor in new development instead of the `post_resolve_app_connections` interceptor. + When both interceptors are used, this `post_resolve_app_connections_with_metadata` interceptor runs after the + `post_resolve_app_connections` interceptor. The (possibly modified) response returned by + `post_resolve_app_connections` will be passed to + `post_resolve_app_connections_with_metadata`. + """ + return response, metadata + def pre_update_app_connection( self, request: app_connections_service.UpdateAppConnectionRequest, @@ -276,12 +399,35 @@ def post_update_app_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_app_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_app_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectionsService server but before - it is returned to user code. + it is returned to user code. This `post_update_app_connection` interceptor runs + before the `post_update_app_connection_with_metadata` interceptor. """ return response + def post_update_app_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_app_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectionsService server but before it is returned to user code. + + We recommend only using this `post_update_app_connection_with_metadata` + interceptor in new development instead of the `post_update_app_connection` interceptor. + When both interceptors are used, this `post_update_app_connection_with_metadata` interceptor runs after the + `post_update_app_connection` interceptor. The (possibly modified) response returned by + `post_update_app_connection` will be passed to + `post_update_app_connection_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -784,6 +930,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_app_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_app_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -931,6 +1081,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_app_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_app_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1085,6 +1239,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_app_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_app_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1235,6 +1393,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_app_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_app_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1387,6 +1549,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resolve_app_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resolve_app_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1544,6 +1710,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_app_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_app_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-beyondcorp-appconnections/noxfile.py b/packages/google-cloud-beyondcorp-appconnections/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-beyondcorp-appconnections/noxfile.py +++ b/packages/google-cloud-beyondcorp-appconnections/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json index 880e405ecd9b..87a7ecbfb05b 100644 --- a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json +++ b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnections", - "version": "0.4.14" + "version": "0.4.15" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py index 7b90d9885d57..5e650024f5bc 100644 --- a/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py +++ b/packages/google-cloud-beyondcorp-appconnections/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py @@ -77,6 +77,13 @@ ) from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AppConnectionsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AppConnectionsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5590,10 +5640,14 @@ def test_list_app_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "post_list_app_connections" ) as post, mock.patch.object( + transports.AppConnectionsServiceRestInterceptor, + "post_list_app_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "pre_list_app_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connections_service.ListAppConnectionsRequest.pb( app_connections_service.ListAppConnectionsRequest() ) @@ -5619,6 +5673,10 @@ def test_list_app_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_connections_service.ListAppConnectionsResponse() + post_with_metadata.return_value = ( + app_connections_service.ListAppConnectionsResponse(), + metadata, + ) client.list_app_connections( request, @@ -5630,6 +5688,7 @@ def test_list_app_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_app_connection_rest_bad_request( @@ -5724,10 +5783,14 @@ def test_get_app_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "post_get_app_connection" ) as post, mock.patch.object( + transports.AppConnectionsServiceRestInterceptor, + "post_get_app_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "pre_get_app_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connections_service.GetAppConnectionRequest.pb( app_connections_service.GetAppConnectionRequest() ) @@ -5753,6 +5816,10 @@ def test_get_app_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_connections_service.AppConnection() + post_with_metadata.return_value = ( + app_connections_service.AppConnection(), + metadata, + ) client.get_app_connection( request, @@ -5764,6 +5831,7 @@ def test_get_app_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_app_connection_rest_bad_request( @@ -5931,10 +5999,14 @@ def test_create_app_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "post_create_app_connection" ) as post, mock.patch.object( + transports.AppConnectionsServiceRestInterceptor, + "post_create_app_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "pre_create_app_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connections_service.CreateAppConnectionRequest.pb( app_connections_service.CreateAppConnectionRequest() ) @@ -5958,6 +6030,7 @@ def test_create_app_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_app_connection( request, @@ -5969,6 +6042,7 @@ def test_create_app_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_app_connection_rest_bad_request( @@ -6144,10 +6218,14 @@ def test_update_app_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "post_update_app_connection" ) as post, mock.patch.object( + transports.AppConnectionsServiceRestInterceptor, + "post_update_app_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "pre_update_app_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connections_service.UpdateAppConnectionRequest.pb( app_connections_service.UpdateAppConnectionRequest() ) @@ -6171,6 +6249,7 @@ def test_update_app_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_app_connection( request, @@ -6182,6 +6261,7 @@ def test_update_app_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_app_connection_rest_bad_request( @@ -6262,10 +6342,14 @@ def test_delete_app_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "post_delete_app_connection" ) as post, mock.patch.object( + transports.AppConnectionsServiceRestInterceptor, + "post_delete_app_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "pre_delete_app_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connections_service.DeleteAppConnectionRequest.pb( app_connections_service.DeleteAppConnectionRequest() ) @@ -6289,6 +6373,7 @@ def test_delete_app_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_app_connection( request, @@ -6300,6 +6385,7 @@ def test_delete_app_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resolve_app_connections_rest_bad_request( @@ -6388,10 +6474,14 @@ def test_resolve_app_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "post_resolve_app_connections" ) as post, mock.patch.object( + transports.AppConnectionsServiceRestInterceptor, + "post_resolve_app_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectionsServiceRestInterceptor, "pre_resolve_app_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connections_service.ResolveAppConnectionsRequest.pb( app_connections_service.ResolveAppConnectionsRequest() ) @@ -6417,6 +6507,10 @@ def test_resolve_app_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_connections_service.ResolveAppConnectionsResponse() + post_with_metadata.return_value = ( + app_connections_service.ResolveAppConnectionsResponse(), + metadata, + ) client.resolve_app_connections( request, @@ -6428,6 +6522,7 @@ def test_resolve_app_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md b/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md index 0d939f1d4e97..7963fd5684c2 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-appconnectors/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnectors-v0.4.14...google-cloud-beyondcorp-appconnectors-v0.4.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appconnectors-v0.4.13...google-cloud-beyondcorp-appconnectors-v0.4.14) (2024-12-12) diff --git a/packages/google-cloud-beyondcorp-appconnectors/README.rst b/packages/google-cloud-beyondcorp-appconnectors/README.rst index 86fa8a817c33..bacd36ef54e5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/README.rst +++ b/packages/google-cloud-beyondcorp-appconnectors/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BeyondCorp AppConnectors.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BeyondCorp AppConnectors.: https://cloud.google.com/beyondcorp/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py index b5329c0c274e..31355c0d2fce 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1582,16 +1611,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1637,16 +1670,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1869,16 +1906,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1991,16 +2032,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2051,16 +2096,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -2106,16 +2155,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2161,16 +2214,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py index 90f909703fd7..072e5e864a3e 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py @@ -146,12 +146,35 @@ def post_create_app_connector( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_app_connector - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_app_connector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectorsService server but before - it is returned to user code. + it is returned to user code. This `post_create_app_connector` interceptor runs + before the `post_create_app_connector_with_metadata` interceptor. """ return response + def post_create_app_connector_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_app_connector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectorsService server but before it is returned to user code. + + We recommend only using this `post_create_app_connector_with_metadata` + interceptor in new development instead of the `post_create_app_connector` interceptor. + When both interceptors are used, this `post_create_app_connector_with_metadata` interceptor runs after the + `post_create_app_connector` interceptor. The (possibly modified) response returned by + `post_create_app_connector` will be passed to + `post_create_app_connector_with_metadata`. + """ + return response, metadata + def pre_delete_app_connector( self, request: app_connectors_service.DeleteAppConnectorRequest, @@ -172,12 +195,35 @@ def post_delete_app_connector( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_app_connector - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_app_connector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectorsService server but before - it is returned to user code. + it is returned to user code. This `post_delete_app_connector` interceptor runs + before the `post_delete_app_connector_with_metadata` interceptor. """ return response + def post_delete_app_connector_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_app_connector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectorsService server but before it is returned to user code. + + We recommend only using this `post_delete_app_connector_with_metadata` + interceptor in new development instead of the `post_delete_app_connector` interceptor. + When both interceptors are used, this `post_delete_app_connector_with_metadata` interceptor runs after the + `post_delete_app_connector` interceptor. The (possibly modified) response returned by + `post_delete_app_connector` will be passed to + `post_delete_app_connector_with_metadata`. + """ + return response, metadata + def pre_get_app_connector( self, request: app_connectors_service.GetAppConnectorRequest, @@ -198,12 +244,37 @@ def post_get_app_connector( ) -> app_connectors_service.AppConnector: """Post-rpc interceptor for get_app_connector - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_app_connector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectorsService server but before - it is returned to user code. + it is returned to user code. This `post_get_app_connector` interceptor runs + before the `post_get_app_connector_with_metadata` interceptor. """ return response + def post_get_app_connector_with_metadata( + self, + response: app_connectors_service.AppConnector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_connectors_service.AppConnector, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_app_connector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectorsService server but before it is returned to user code. + + We recommend only using this `post_get_app_connector_with_metadata` + interceptor in new development instead of the `post_get_app_connector` interceptor. + When both interceptors are used, this `post_get_app_connector_with_metadata` interceptor runs after the + `post_get_app_connector` interceptor. The (possibly modified) response returned by + `post_get_app_connector` will be passed to + `post_get_app_connector_with_metadata`. + """ + return response, metadata + def pre_list_app_connectors( self, request: app_connectors_service.ListAppConnectorsRequest, @@ -224,12 +295,38 @@ def post_list_app_connectors( ) -> app_connectors_service.ListAppConnectorsResponse: """Post-rpc interceptor for list_app_connectors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_app_connectors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectorsService server but before - it is returned to user code. + it is returned to user code. This `post_list_app_connectors` interceptor runs + before the `post_list_app_connectors_with_metadata` interceptor. """ return response + def post_list_app_connectors_with_metadata( + self, + response: app_connectors_service.ListAppConnectorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_connectors_service.ListAppConnectorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_app_connectors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectorsService server but before it is returned to user code. + + We recommend only using this `post_list_app_connectors_with_metadata` + interceptor in new development instead of the `post_list_app_connectors` interceptor. + When both interceptors are used, this `post_list_app_connectors_with_metadata` interceptor runs after the + `post_list_app_connectors` interceptor. The (possibly modified) response returned by + `post_list_app_connectors` will be passed to + `post_list_app_connectors_with_metadata`. + """ + return response, metadata + def pre_report_status( self, request: app_connectors_service.ReportStatusRequest, @@ -250,12 +347,35 @@ def post_report_status( ) -> operations_pb2.Operation: """Post-rpc interceptor for report_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_report_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectorsService server but before - it is returned to user code. + it is returned to user code. This `post_report_status` interceptor runs + before the `post_report_status_with_metadata` interceptor. """ return response + def post_report_status_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for report_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectorsService server but before it is returned to user code. + + We recommend only using this `post_report_status_with_metadata` + interceptor in new development instead of the `post_report_status` interceptor. + When both interceptors are used, this `post_report_status_with_metadata` interceptor runs after the + `post_report_status` interceptor. The (possibly modified) response returned by + `post_report_status` will be passed to + `post_report_status_with_metadata`. + """ + return response, metadata + def pre_update_app_connector( self, request: app_connectors_service.UpdateAppConnectorRequest, @@ -276,12 +396,35 @@ def post_update_app_connector( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_app_connector - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_app_connector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppConnectorsService server but before - it is returned to user code. + it is returned to user code. This `post_update_app_connector` interceptor runs + before the `post_update_app_connector_with_metadata` interceptor. """ return response + def post_update_app_connector_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_app_connector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppConnectorsService server but before it is returned to user code. + + We recommend only using this `post_update_app_connector_with_metadata` + interceptor in new development instead of the `post_update_app_connector` interceptor. + When both interceptors are used, this `post_update_app_connector_with_metadata` interceptor runs after the + `post_update_app_connector` interceptor. The (possibly modified) response returned by + `post_update_app_connector` will be passed to + `post_update_app_connector_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -784,6 +927,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_app_connector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_app_connector_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -931,6 +1078,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_app_connector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_app_connector_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1085,6 +1236,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_app_connector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_app_connector_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1235,6 +1390,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_app_connectors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_app_connectors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1387,6 +1546,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_report_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_report_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1540,6 +1703,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_app_connector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_app_connector_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-beyondcorp-appconnectors/noxfile.py b/packages/google-cloud-beyondcorp-appconnectors/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/noxfile.py +++ b/packages/google-cloud-beyondcorp-appconnectors/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json index cbfad1945278..68a1ece7312b 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json +++ b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnectors", - "version": "0.4.14" + "version": "0.4.15" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py index 61c3b29bd2c4..16912a48b321 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py +++ b/packages/google-cloud-beyondcorp-appconnectors/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py @@ -82,6 +82,13 @@ ) from google.cloud.beyondcorp_appconnectors_v1.types import resource_info +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -355,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AppConnectorsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AppConnectorsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5229,10 +5279,14 @@ def test_list_app_connectors_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "post_list_app_connectors" ) as post, mock.patch.object( + transports.AppConnectorsServiceRestInterceptor, + "post_list_app_connectors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "pre_list_app_connectors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connectors_service.ListAppConnectorsRequest.pb( app_connectors_service.ListAppConnectorsRequest() ) @@ -5258,6 +5312,10 @@ def test_list_app_connectors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_connectors_service.ListAppConnectorsResponse() + post_with_metadata.return_value = ( + app_connectors_service.ListAppConnectorsResponse(), + metadata, + ) client.list_app_connectors( request, @@ -5269,6 +5327,7 @@ def test_list_app_connectors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_app_connector_rest_bad_request( @@ -5359,10 +5418,14 @@ def test_get_app_connector_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "post_get_app_connector" ) as post, mock.patch.object( + transports.AppConnectorsServiceRestInterceptor, + "post_get_app_connector_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "pre_get_app_connector" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connectors_service.GetAppConnectorRequest.pb( app_connectors_service.GetAppConnectorRequest() ) @@ -5388,6 +5451,10 @@ def test_get_app_connector_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_connectors_service.AppConnector() + post_with_metadata.return_value = ( + app_connectors_service.AppConnector(), + metadata, + ) client.get_app_connector( request, @@ -5399,6 +5466,7 @@ def test_get_app_connector_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_app_connector_rest_bad_request( @@ -5568,10 +5636,14 @@ def test_create_app_connector_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "post_create_app_connector" ) as post, mock.patch.object( + transports.AppConnectorsServiceRestInterceptor, + "post_create_app_connector_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "pre_create_app_connector" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connectors_service.CreateAppConnectorRequest.pb( app_connectors_service.CreateAppConnectorRequest() ) @@ -5595,6 +5667,7 @@ def test_create_app_connector_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_app_connector( request, @@ -5606,6 +5679,7 @@ def test_create_app_connector_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_app_connector_rest_bad_request( @@ -5783,10 +5857,14 @@ def test_update_app_connector_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "post_update_app_connector" ) as post, mock.patch.object( + transports.AppConnectorsServiceRestInterceptor, + "post_update_app_connector_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "pre_update_app_connector" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connectors_service.UpdateAppConnectorRequest.pb( app_connectors_service.UpdateAppConnectorRequest() ) @@ -5810,6 +5888,7 @@ def test_update_app_connector_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_app_connector( request, @@ -5821,6 +5900,7 @@ def test_update_app_connector_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_app_connector_rest_bad_request( @@ -5901,10 +5981,14 @@ def test_delete_app_connector_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "post_delete_app_connector" ) as post, mock.patch.object( + transports.AppConnectorsServiceRestInterceptor, + "post_delete_app_connector_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "pre_delete_app_connector" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connectors_service.DeleteAppConnectorRequest.pb( app_connectors_service.DeleteAppConnectorRequest() ) @@ -5928,6 +6012,7 @@ def test_delete_app_connector_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_app_connector( request, @@ -5939,6 +6024,7 @@ def test_delete_app_connector_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_report_status_rest_bad_request( @@ -6023,10 +6109,14 @@ def test_report_status_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "post_report_status" ) as post, mock.patch.object( + transports.AppConnectorsServiceRestInterceptor, + "post_report_status_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppConnectorsServiceRestInterceptor, "pre_report_status" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_connectors_service.ReportStatusRequest.pb( app_connectors_service.ReportStatusRequest() ) @@ -6050,6 +6140,7 @@ def test_report_status_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.report_status( request, @@ -6061,6 +6152,7 @@ def test_report_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md b/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md index 576c7d4bf6e4..e59a4b64d269 100644 --- a/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-appgateways/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appgateways-v0.4.14...google-cloud-beyondcorp-appgateways-v0.4.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-appgateways-v0.4.13...google-cloud-beyondcorp-appgateways-v0.4.14) (2024-12-12) diff --git a/packages/google-cloud-beyondcorp-appgateways/README.rst b/packages/google-cloud-beyondcorp-appgateways/README.rst index afbca38efd1f..0d3240cb3cbb 100644 --- a/packages/google-cloud-beyondcorp-appgateways/README.rst +++ b/packages/google-cloud-beyondcorp-appgateways/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BeyondCorp AppGateways.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BeyondCorp AppGateways.: https://cloud.google.com/beyondcorp/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py index 9cd3540fb6ae..7a58c56db9e8 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -509,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1288,16 +1317,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1343,16 +1376,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1575,16 +1612,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1697,16 +1738,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1757,16 +1802,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1812,16 +1861,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1867,16 +1920,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py index 205bfe62be7c..77885c491ee0 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py @@ -130,12 +130,35 @@ def post_create_app_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_app_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_app_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_create_app_gateway` interceptor runs + before the `post_create_app_gateway_with_metadata` interceptor. """ return response + def post_create_app_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_app_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppGatewaysService server but before it is returned to user code. + + We recommend only using this `post_create_app_gateway_with_metadata` + interceptor in new development instead of the `post_create_app_gateway` interceptor. + When both interceptors are used, this `post_create_app_gateway_with_metadata` interceptor runs after the + `post_create_app_gateway` interceptor. The (possibly modified) response returned by + `post_create_app_gateway` will be passed to + `post_create_app_gateway_with_metadata`. + """ + return response, metadata + def pre_delete_app_gateway( self, request: app_gateways_service.DeleteAppGatewayRequest, @@ -156,12 +179,35 @@ def post_delete_app_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_app_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_app_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_delete_app_gateway` interceptor runs + before the `post_delete_app_gateway_with_metadata` interceptor. """ return response + def post_delete_app_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_app_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppGatewaysService server but before it is returned to user code. + + We recommend only using this `post_delete_app_gateway_with_metadata` + interceptor in new development instead of the `post_delete_app_gateway` interceptor. + When both interceptors are used, this `post_delete_app_gateway_with_metadata` interceptor runs after the + `post_delete_app_gateway` interceptor. The (possibly modified) response returned by + `post_delete_app_gateway` will be passed to + `post_delete_app_gateway_with_metadata`. + """ + return response, metadata + def pre_get_app_gateway( self, request: app_gateways_service.GetAppGatewayRequest, @@ -182,12 +228,37 @@ def post_get_app_gateway( ) -> app_gateways_service.AppGateway: """Post-rpc interceptor for get_app_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_app_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_get_app_gateway` interceptor runs + before the `post_get_app_gateway_with_metadata` interceptor. """ return response + def post_get_app_gateway_with_metadata( + self, + response: app_gateways_service.AppGateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_gateways_service.AppGateway, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_app_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppGatewaysService server but before it is returned to user code. + + We recommend only using this `post_get_app_gateway_with_metadata` + interceptor in new development instead of the `post_get_app_gateway` interceptor. + When both interceptors are used, this `post_get_app_gateway_with_metadata` interceptor runs after the + `post_get_app_gateway` interceptor. The (possibly modified) response returned by + `post_get_app_gateway` will be passed to + `post_get_app_gateway_with_metadata`. + """ + return response, metadata + def pre_list_app_gateways( self, request: app_gateways_service.ListAppGatewaysRequest, @@ -208,12 +279,38 @@ def post_list_app_gateways( ) -> app_gateways_service.ListAppGatewaysResponse: """Post-rpc interceptor for list_app_gateways - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_app_gateways_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AppGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_list_app_gateways` interceptor runs + before the `post_list_app_gateways_with_metadata` interceptor. """ return response + def post_list_app_gateways_with_metadata( + self, + response: app_gateways_service.ListAppGatewaysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + app_gateways_service.ListAppGatewaysResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_app_gateways + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AppGatewaysService server but before it is returned to user code. + + We recommend only using this `post_list_app_gateways_with_metadata` + interceptor in new development instead of the `post_list_app_gateways` interceptor. + When both interceptors are used, this `post_list_app_gateways_with_metadata` interceptor runs after the + `post_list_app_gateways` interceptor. The (possibly modified) response returned by + `post_list_app_gateways` will be passed to + `post_list_app_gateways_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -714,6 +811,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_app_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_app_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -859,6 +960,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_app_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_app_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1009,6 +1114,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_app_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_app_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1155,6 +1264,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_app_gateways(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_app_gateways_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-beyondcorp-appgateways/noxfile.py b/packages/google-cloud-beyondcorp-appgateways/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-beyondcorp-appgateways/noxfile.py +++ b/packages/google-cloud-beyondcorp-appgateways/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json index e2071e4db336..59e696470f03 100644 --- a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json +++ b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appgateways", - "version": "0.4.14" + "version": "0.4.15" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py index f7561524c54c..43d923ece3a2 100644 --- a/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py +++ b/packages/google-cloud-beyondcorp-appgateways/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py @@ -76,6 +76,13 @@ ) from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AppGatewaysServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AppGatewaysServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4006,10 +4056,14 @@ def test_list_app_gateways_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "post_list_app_gateways" ) as post, mock.patch.object( + transports.AppGatewaysServiceRestInterceptor, + "post_list_app_gateways_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "pre_list_app_gateways" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_gateways_service.ListAppGatewaysRequest.pb( app_gateways_service.ListAppGatewaysRequest() ) @@ -4035,6 +4089,10 @@ def test_list_app_gateways_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_gateways_service.ListAppGatewaysResponse() + post_with_metadata.return_value = ( + app_gateways_service.ListAppGatewaysResponse(), + metadata, + ) client.list_app_gateways( request, @@ -4046,6 +4104,7 @@ def test_list_app_gateways_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_app_gateway_rest_bad_request( @@ -4144,10 +4203,14 @@ def test_get_app_gateway_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "post_get_app_gateway" ) as post, mock.patch.object( + transports.AppGatewaysServiceRestInterceptor, + "post_get_app_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "pre_get_app_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_gateways_service.GetAppGatewayRequest.pb( app_gateways_service.GetAppGatewayRequest() ) @@ -4173,6 +4236,7 @@ def test_get_app_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = app_gateways_service.AppGateway() + post_with_metadata.return_value = app_gateways_service.AppGateway(), metadata client.get_app_gateway( request, @@ -4184,6 +4248,7 @@ def test_get_app_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_app_gateway_rest_bad_request( @@ -4344,10 +4409,14 @@ def test_create_app_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "post_create_app_gateway" ) as post, mock.patch.object( + transports.AppGatewaysServiceRestInterceptor, + "post_create_app_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "pre_create_app_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_gateways_service.CreateAppGatewayRequest.pb( app_gateways_service.CreateAppGatewayRequest() ) @@ -4371,6 +4440,7 @@ def test_create_app_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_app_gateway( request, @@ -4382,6 +4452,7 @@ def test_create_app_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_app_gateway_rest_bad_request( @@ -4462,10 +4533,14 @@ def test_delete_app_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "post_delete_app_gateway" ) as post, mock.patch.object( + transports.AppGatewaysServiceRestInterceptor, + "post_delete_app_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AppGatewaysServiceRestInterceptor, "pre_delete_app_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = app_gateways_service.DeleteAppGatewayRequest.pb( app_gateways_service.DeleteAppGatewayRequest() ) @@ -4489,6 +4564,7 @@ def test_delete_app_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_app_gateway( request, @@ -4500,6 +4576,7 @@ def test_delete_app_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md b/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md index b8a3ceaac7e1..26bf9391cb96 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientconnectorservices-v0.4.14...google-cloud-beyondcorp-clientconnectorservices-v0.4.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientconnectorservices-v0.4.13...google-cloud-beyondcorp-clientconnectorservices-v0.4.14) (2024-12-12) diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/README.rst b/packages/google-cloud-beyondcorp-clientconnectorservices/README.rst index dd3e71ab6238..2202edd5c25c 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/README.rst +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BeyondCorp ClientConnectorServices.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BeyondCorp ClientConnectorServices.: https://cloud.google.com/beyondcorp/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py index e9c5bdcdd8ef..f70d636c349a 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -520,6 +522,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1512,16 +1541,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1567,16 +1600,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1799,16 +1836,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1921,16 +1962,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1981,16 +2026,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -2036,16 +2085,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2091,16 +2144,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py index faeb3df59682..7374e19aac15 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py @@ -140,12 +140,35 @@ def post_create_client_connector_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_client_connector_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_client_connector_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. + it is returned to user code. This `post_create_client_connector_service` interceptor runs + before the `post_create_client_connector_service_with_metadata` interceptor. """ return response + def post_create_client_connector_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_client_connector_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientConnectorServicesService server but before it is returned to user code. + + We recommend only using this `post_create_client_connector_service_with_metadata` + interceptor in new development instead of the `post_create_client_connector_service` interceptor. + When both interceptors are used, this `post_create_client_connector_service_with_metadata` interceptor runs after the + `post_create_client_connector_service` interceptor. The (possibly modified) response returned by + `post_create_client_connector_service` will be passed to + `post_create_client_connector_service_with_metadata`. + """ + return response, metadata + def pre_delete_client_connector_service( self, request: client_connector_services_service.DeleteClientConnectorServiceRequest, @@ -166,12 +189,35 @@ def post_delete_client_connector_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_client_connector_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_client_connector_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. + it is returned to user code. This `post_delete_client_connector_service` interceptor runs + before the `post_delete_client_connector_service_with_metadata` interceptor. """ return response + def post_delete_client_connector_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_client_connector_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientConnectorServicesService server but before it is returned to user code. + + We recommend only using this `post_delete_client_connector_service_with_metadata` + interceptor in new development instead of the `post_delete_client_connector_service` interceptor. + When both interceptors are used, this `post_delete_client_connector_service_with_metadata` interceptor runs after the + `post_delete_client_connector_service` interceptor. The (possibly modified) response returned by + `post_delete_client_connector_service` will be passed to + `post_delete_client_connector_service_with_metadata`. + """ + return response, metadata + def pre_get_client_connector_service( self, request: client_connector_services_service.GetClientConnectorServiceRequest, @@ -192,12 +238,38 @@ def post_get_client_connector_service( ) -> client_connector_services_service.ClientConnectorService: """Post-rpc interceptor for get_client_connector_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_client_connector_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. + it is returned to user code. This `post_get_client_connector_service` interceptor runs + before the `post_get_client_connector_service_with_metadata` interceptor. """ return response + def post_get_client_connector_service_with_metadata( + self, + response: client_connector_services_service.ClientConnectorService, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_connector_services_service.ClientConnectorService, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_client_connector_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientConnectorServicesService server but before it is returned to user code. + + We recommend only using this `post_get_client_connector_service_with_metadata` + interceptor in new development instead of the `post_get_client_connector_service` interceptor. + When both interceptors are used, this `post_get_client_connector_service_with_metadata` interceptor runs after the + `post_get_client_connector_service` interceptor. The (possibly modified) response returned by + `post_get_client_connector_service` will be passed to + `post_get_client_connector_service_with_metadata`. + """ + return response, metadata + def pre_list_client_connector_services( self, request: client_connector_services_service.ListClientConnectorServicesRequest, @@ -219,12 +291,38 @@ def post_list_client_connector_services( ) -> client_connector_services_service.ListClientConnectorServicesResponse: """Post-rpc interceptor for list_client_connector_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_client_connector_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. + it is returned to user code. This `post_list_client_connector_services` interceptor runs + before the `post_list_client_connector_services_with_metadata` interceptor. """ return response + def post_list_client_connector_services_with_metadata( + self, + response: client_connector_services_service.ListClientConnectorServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_connector_services_service.ListClientConnectorServicesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_client_connector_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientConnectorServicesService server but before it is returned to user code. + + We recommend only using this `post_list_client_connector_services_with_metadata` + interceptor in new development instead of the `post_list_client_connector_services` interceptor. + When both interceptors are used, this `post_list_client_connector_services_with_metadata` interceptor runs after the + `post_list_client_connector_services` interceptor. The (possibly modified) response returned by + `post_list_client_connector_services` will be passed to + `post_list_client_connector_services_with_metadata`. + """ + return response, metadata + def pre_update_client_connector_service( self, request: client_connector_services_service.UpdateClientConnectorServiceRequest, @@ -245,12 +343,35 @@ def post_update_client_connector_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_client_connector_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_client_connector_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. + it is returned to user code. This `post_update_client_connector_service` interceptor runs + before the `post_update_client_connector_service_with_metadata` interceptor. """ return response + def post_update_client_connector_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_client_connector_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientConnectorServicesService server but before it is returned to user code. + + We recommend only using this `post_update_client_connector_service_with_metadata` + interceptor in new development instead of the `post_update_client_connector_service` interceptor. + When both interceptors are used, this `post_update_client_connector_service_with_metadata` interceptor runs after the + `post_update_client_connector_service` interceptor. The (possibly modified) response returned by + `post_update_client_connector_service` will be passed to + `post_update_client_connector_service_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -755,6 +876,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_client_connector_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_client_connector_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -903,6 +1031,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_client_connector_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_client_connector_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1052,6 +1187,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_client_connector_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_client_connector_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1207,6 +1346,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_client_connector_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_client_connector_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1363,6 +1509,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_client_connector_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_client_connector_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/noxfile.py b/packages/google-cloud-beyondcorp-clientconnectorservices/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/noxfile.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json index cab20703e281..f5db07f151ea 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientconnectorservices", - "version": "0.4.14" + "version": "0.4.15" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py index 648e3254fba7..684cee355b38 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py @@ -79,6 +79,13 @@ client_connector_services_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -369,6 +376,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ClientConnectorServicesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ClientConnectorServicesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4824,11 +4874,15 @@ def test_list_client_connector_services_rest_interceptors(null_interceptor): transports.ClientConnectorServicesServiceRestInterceptor, "post_list_client_connector_services", ) as post, mock.patch.object( + transports.ClientConnectorServicesServiceRestInterceptor, + "post_list_client_connector_services_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientConnectorServicesServiceRestInterceptor, "pre_list_client_connector_services", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( client_connector_services_service.ListClientConnectorServicesRequest.pb( client_connector_services_service.ListClientConnectorServicesRequest() @@ -4858,6 +4912,10 @@ def test_list_client_connector_services_rest_interceptors(null_interceptor): post.return_value = ( client_connector_services_service.ListClientConnectorServicesResponse() ) + post_with_metadata.return_value = ( + client_connector_services_service.ListClientConnectorServicesResponse(), + metadata, + ) client.list_client_connector_services( request, @@ -4869,6 +4927,7 @@ def test_list_client_connector_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_client_connector_service_rest_bad_request( @@ -4969,11 +5028,15 @@ def test_get_client_connector_service_rest_interceptors(null_interceptor): transports.ClientConnectorServicesServiceRestInterceptor, "post_get_client_connector_service", ) as post, mock.patch.object( + transports.ClientConnectorServicesServiceRestInterceptor, + "post_get_client_connector_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientConnectorServicesServiceRestInterceptor, "pre_get_client_connector_service", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( client_connector_services_service.GetClientConnectorServiceRequest.pb( client_connector_services_service.GetClientConnectorServiceRequest() @@ -5001,6 +5064,10 @@ def test_get_client_connector_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = client_connector_services_service.ClientConnectorService() + post_with_metadata.return_value = ( + client_connector_services_service.ClientConnectorService(), + metadata, + ) client.get_client_connector_service( request, @@ -5012,6 +5079,7 @@ def test_get_client_connector_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_client_connector_service_rest_bad_request( @@ -5180,11 +5248,15 @@ def test_create_client_connector_service_rest_interceptors(null_interceptor): transports.ClientConnectorServicesServiceRestInterceptor, "post_create_client_connector_service", ) as post, mock.patch.object( + transports.ClientConnectorServicesServiceRestInterceptor, + "post_create_client_connector_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientConnectorServicesServiceRestInterceptor, "pre_create_client_connector_service", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( client_connector_services_service.CreateClientConnectorServiceRequest.pb( client_connector_services_service.CreateClientConnectorServiceRequest() @@ -5212,6 +5284,7 @@ def test_create_client_connector_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_client_connector_service( request, @@ -5223,6 +5296,7 @@ def test_create_client_connector_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_client_connector_service_rest_bad_request( @@ -5399,11 +5473,15 @@ def test_update_client_connector_service_rest_interceptors(null_interceptor): transports.ClientConnectorServicesServiceRestInterceptor, "post_update_client_connector_service", ) as post, mock.patch.object( + transports.ClientConnectorServicesServiceRestInterceptor, + "post_update_client_connector_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientConnectorServicesServiceRestInterceptor, "pre_update_client_connector_service", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( client_connector_services_service.UpdateClientConnectorServiceRequest.pb( client_connector_services_service.UpdateClientConnectorServiceRequest() @@ -5431,6 +5509,7 @@ def test_update_client_connector_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_client_connector_service( request, @@ -5442,6 +5521,7 @@ def test_update_client_connector_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_client_connector_service_rest_bad_request( @@ -5527,11 +5607,15 @@ def test_delete_client_connector_service_rest_interceptors(null_interceptor): transports.ClientConnectorServicesServiceRestInterceptor, "post_delete_client_connector_service", ) as post, mock.patch.object( + transports.ClientConnectorServicesServiceRestInterceptor, + "post_delete_client_connector_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientConnectorServicesServiceRestInterceptor, "pre_delete_client_connector_service", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( client_connector_services_service.DeleteClientConnectorServiceRequest.pb( client_connector_services_service.DeleteClientConnectorServiceRequest() @@ -5559,6 +5643,7 @@ def test_delete_client_connector_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_client_connector_service( request, @@ -5570,6 +5655,7 @@ def test_delete_client_connector_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md b/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md index f08827df94de..f44507c56df6 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md +++ b/packages/google-cloud-beyondcorp-clientgateways/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientgateways-v0.4.13...google-cloud-beyondcorp-clientgateways-v0.4.14) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-beyondcorp-clientgateways-v0.4.12...google-cloud-beyondcorp-clientgateways-v0.4.13) (2024-12-12) diff --git a/packages/google-cloud-beyondcorp-clientgateways/README.rst b/packages/google-cloud-beyondcorp-clientgateways/README.rst index 84a1ec37e100..3031b467c631 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/README.rst +++ b/packages/google-cloud-beyondcorp-clientgateways/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BeyondCorp ClientGateways.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BeyondCorp ClientGateways.: https://cloud.google.com/beyondcorp/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py index 9b19e5f10e00..3106ac663ac7 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.13" # {x-release-please-version} +__version__ = "0.4.14" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py index 9b19e5f10e00..3106ac663ac7 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.13" # {x-release-please-version} +__version__ = "0.4.14" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py index 77f599cd2635..cb5d86fbf810 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -508,6 +510,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1264,16 +1293,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1319,16 +1352,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1551,16 +1588,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1673,16 +1714,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1733,16 +1778,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1788,16 +1837,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1843,16 +1896,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py index c343432dc4f9..c9e58795dd0f 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py @@ -130,12 +130,35 @@ def post_create_client_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_client_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_client_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_create_client_gateway` interceptor runs + before the `post_create_client_gateway_with_metadata` interceptor. """ return response + def post_create_client_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_client_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientGatewaysService server but before it is returned to user code. + + We recommend only using this `post_create_client_gateway_with_metadata` + interceptor in new development instead of the `post_create_client_gateway` interceptor. + When both interceptors are used, this `post_create_client_gateway_with_metadata` interceptor runs after the + `post_create_client_gateway` interceptor. The (possibly modified) response returned by + `post_create_client_gateway` will be passed to + `post_create_client_gateway_with_metadata`. + """ + return response, metadata + def pre_delete_client_gateway( self, request: client_gateways_service.DeleteClientGatewayRequest, @@ -156,12 +179,35 @@ def post_delete_client_gateway( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_client_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_client_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_delete_client_gateway` interceptor runs + before the `post_delete_client_gateway_with_metadata` interceptor. """ return response + def post_delete_client_gateway_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_client_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientGatewaysService server but before it is returned to user code. + + We recommend only using this `post_delete_client_gateway_with_metadata` + interceptor in new development instead of the `post_delete_client_gateway` interceptor. + When both interceptors are used, this `post_delete_client_gateway_with_metadata` interceptor runs after the + `post_delete_client_gateway` interceptor. The (possibly modified) response returned by + `post_delete_client_gateway` will be passed to + `post_delete_client_gateway_with_metadata`. + """ + return response, metadata + def pre_get_client_gateway( self, request: client_gateways_service.GetClientGatewayRequest, @@ -182,12 +228,37 @@ def post_get_client_gateway( ) -> client_gateways_service.ClientGateway: """Post-rpc interceptor for get_client_gateway - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_client_gateway_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_get_client_gateway` interceptor runs + before the `post_get_client_gateway_with_metadata` interceptor. """ return response + def post_get_client_gateway_with_metadata( + self, + response: client_gateways_service.ClientGateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_gateways_service.ClientGateway, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_client_gateway + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientGatewaysService server but before it is returned to user code. + + We recommend only using this `post_get_client_gateway_with_metadata` + interceptor in new development instead of the `post_get_client_gateway` interceptor. + When both interceptors are used, this `post_get_client_gateway_with_metadata` interceptor runs after the + `post_get_client_gateway` interceptor. The (possibly modified) response returned by + `post_get_client_gateway` will be passed to + `post_get_client_gateway_with_metadata`. + """ + return response, metadata + def pre_list_client_gateways( self, request: client_gateways_service.ListClientGatewaysRequest, @@ -208,12 +279,38 @@ def post_list_client_gateways( ) -> client_gateways_service.ListClientGatewaysResponse: """Post-rpc interceptor for list_client_gateways - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_client_gateways_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClientGatewaysService server but before - it is returned to user code. + it is returned to user code. This `post_list_client_gateways` interceptor runs + before the `post_list_client_gateways_with_metadata` interceptor. """ return response + def post_list_client_gateways_with_metadata( + self, + response: client_gateways_service.ListClientGatewaysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + client_gateways_service.ListClientGatewaysResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_client_gateways + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClientGatewaysService server but before it is returned to user code. + + We recommend only using this `post_list_client_gateways_with_metadata` + interceptor in new development instead of the `post_list_client_gateways` interceptor. + When both interceptors are used, this `post_list_client_gateways_with_metadata` interceptor runs after the + `post_list_client_gateways` interceptor. The (possibly modified) response returned by + `post_list_client_gateways` will be passed to + `post_list_client_gateways_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -712,6 +809,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_client_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_client_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -858,6 +959,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_client_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_client_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1005,6 +1110,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_client_gateway(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_client_gateway_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1155,6 +1264,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_client_gateways(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_client_gateways_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-beyondcorp-clientgateways/noxfile.py b/packages/google-cloud-beyondcorp-clientgateways/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/noxfile.py +++ b/packages/google-cloud-beyondcorp-clientgateways/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json index 000a3892d5a9..38c198d87520 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json +++ b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientgateways", - "version": "0.4.13" + "version": "0.4.14" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py index fb82008584b3..eee84f4ef40f 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py +++ b/packages/google-cloud-beyondcorp-clientgateways/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py @@ -76,6 +76,13 @@ ) from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ClientGatewaysServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ClientGatewaysServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4054,10 +4104,14 @@ def test_list_client_gateways_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "post_list_client_gateways" ) as post, mock.patch.object( + transports.ClientGatewaysServiceRestInterceptor, + "post_list_client_gateways_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "pre_list_client_gateways" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = client_gateways_service.ListClientGatewaysRequest.pb( client_gateways_service.ListClientGatewaysRequest() ) @@ -4083,6 +4137,10 @@ def test_list_client_gateways_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = client_gateways_service.ListClientGatewaysResponse() + post_with_metadata.return_value = ( + client_gateways_service.ListClientGatewaysResponse(), + metadata, + ) client.list_client_gateways( request, @@ -4094,6 +4152,7 @@ def test_list_client_gateways_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_client_gateway_rest_bad_request( @@ -4184,10 +4243,14 @@ def test_get_client_gateway_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "post_get_client_gateway" ) as post, mock.patch.object( + transports.ClientGatewaysServiceRestInterceptor, + "post_get_client_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "pre_get_client_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = client_gateways_service.GetClientGatewayRequest.pb( client_gateways_service.GetClientGatewayRequest() ) @@ -4213,6 +4276,10 @@ def test_get_client_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = client_gateways_service.ClientGateway() + post_with_metadata.return_value = ( + client_gateways_service.ClientGateway(), + metadata, + ) client.get_client_gateway( request, @@ -4224,6 +4291,7 @@ def test_get_client_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_client_gateway_rest_bad_request( @@ -4381,10 +4449,14 @@ def test_create_client_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "post_create_client_gateway" ) as post, mock.patch.object( + transports.ClientGatewaysServiceRestInterceptor, + "post_create_client_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "pre_create_client_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = client_gateways_service.CreateClientGatewayRequest.pb( client_gateways_service.CreateClientGatewayRequest() ) @@ -4408,6 +4480,7 @@ def test_create_client_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_client_gateway( request, @@ -4419,6 +4492,7 @@ def test_create_client_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_client_gateway_rest_bad_request( @@ -4499,10 +4573,14 @@ def test_delete_client_gateway_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "post_delete_client_gateway" ) as post, mock.patch.object( + transports.ClientGatewaysServiceRestInterceptor, + "post_delete_client_gateway_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClientGatewaysServiceRestInterceptor, "pre_delete_client_gateway" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = client_gateways_service.DeleteClientGatewayRequest.pb( client_gateways_service.DeleteClientGatewayRequest() ) @@ -4526,6 +4604,7 @@ def test_delete_client_gateway_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_client_gateway( request, @@ -4537,6 +4616,7 @@ def test_delete_client_gateway_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md b/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md index c712761a42c0..e14d3d60d361 100644 --- a/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md +++ b/packages/google-cloud-bigquery-analyticshub/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-analyticshub-v0.4.14...google-cloud-bigquery-analyticshub-v0.4.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-analyticshub-v0.4.13...google-cloud-bigquery-analyticshub-v0.4.14) (2024-12-12) diff --git a/packages/google-cloud-bigquery-analyticshub/README.rst b/packages/google-cloud-bigquery-analyticshub/README.rst index 5d28c6aee488..ff91a9c0ea58 100644 --- a/packages/google-cloud-bigquery-analyticshub/README.rst +++ b/packages/google-cloud-bigquery-analyticshub/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BigQuery Analytics Hub.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BigQuery Analytics Hub.: https://cloud.google.com/analytics-hub -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py index 461824a6bd4f..9a7ebf8a012c 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -583,6 +585,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-analyticshub/noxfile.py b/packages/google-cloud-bigquery-analyticshub/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-analyticshub/noxfile.py +++ b/packages/google-cloud-bigquery-analyticshub/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json index 90a844ccdb66..80eab354d271 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-analyticshub", - "version": "0.4.14" + "version": "0.4.15" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py index 98c4565af87b..11b80abe28e7 100644 --- a/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-analyticshub/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -73,6 +74,13 @@ ) from google.cloud.bigquery_analyticshub_v1.types import analyticshub +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AnalyticsHubServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AnalyticsHubServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-biglake/CHANGELOG.md b/packages/google-cloud-bigquery-biglake/CHANGELOG.md index 0e9ab68d9218..871baded3081 100644 --- a/packages/google-cloud-bigquery-biglake/CHANGELOG.md +++ b/packages/google-cloud-bigquery-biglake/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.4.12...google-cloud-bigquery-biglake-v0.4.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.4.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-biglake-v0.4.11...google-cloud-bigquery-biglake-v0.4.12) (2024-12-12) diff --git a/packages/google-cloud-bigquery-biglake/README.rst b/packages/google-cloud-bigquery-biglake/README.rst index 3a372d6ee079..8624eb4dd970 100644 --- a/packages/google-cloud-bigquery-biglake/README.rst +++ b/packages/google-cloud-bigquery-biglake/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BigLake API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BigLake API.: https://cloud.google.com/bigquery/docs/iceberg-tables#create-using-biglake-metastore -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py index 6d2a4f2dfda5..f9c06970195b 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -548,6 +550,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py index 699d40de7fa9..75f2e3a16901 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py @@ -209,12 +209,35 @@ def pre_create_catalog( def post_create_catalog(self, response: metastore.Catalog) -> metastore.Catalog: """Post-rpc interceptor for create_catalog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_catalog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_catalog` interceptor runs + before the `post_create_catalog_with_metadata` interceptor. """ return response + def post_create_catalog_with_metadata( + self, + response: metastore.Catalog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_catalog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_catalog_with_metadata` + interceptor in new development instead of the `post_create_catalog` interceptor. + When both interceptors are used, this `post_create_catalog_with_metadata` interceptor runs after the + `post_create_catalog` interceptor. The (possibly modified) response returned by + `post_create_catalog` will be passed to + `post_create_catalog_with_metadata`. + """ + return response, metadata + def pre_create_database( self, request: metastore.CreateDatabaseRequest, @@ -232,12 +255,35 @@ def pre_create_database( def post_create_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for create_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. """ return response + def post_create_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + def pre_create_table( self, request: metastore.CreateTableRequest, @@ -253,12 +299,35 @@ def pre_create_table( def post_create_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for create_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_table` interceptor runs + before the `post_create_table_with_metadata` interceptor. """ return response + def post_create_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_table_with_metadata` + interceptor in new development instead of the `post_create_table` interceptor. + When both interceptors are used, this `post_create_table_with_metadata` interceptor runs after the + `post_create_table` interceptor. The (possibly modified) response returned by + `post_create_table` will be passed to + `post_create_table_with_metadata`. + """ + return response, metadata + def pre_delete_catalog( self, request: metastore.DeleteCatalogRequest, @@ -274,12 +343,35 @@ def pre_delete_catalog( def post_delete_catalog(self, response: metastore.Catalog) -> metastore.Catalog: """Post-rpc interceptor for delete_catalog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_catalog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_catalog` interceptor runs + before the `post_delete_catalog_with_metadata` interceptor. """ return response + def post_delete_catalog_with_metadata( + self, + response: metastore.Catalog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_catalog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_delete_catalog_with_metadata` + interceptor in new development instead of the `post_delete_catalog` interceptor. + When both interceptors are used, this `post_delete_catalog_with_metadata` interceptor runs after the + `post_delete_catalog` interceptor. The (possibly modified) response returned by + `post_delete_catalog` will be passed to + `post_delete_catalog_with_metadata`. + """ + return response, metadata + def pre_delete_database( self, request: metastore.DeleteDatabaseRequest, @@ -297,12 +389,35 @@ def pre_delete_database( def post_delete_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for delete_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_database` interceptor runs + before the `post_delete_database_with_metadata` interceptor. """ return response + def post_delete_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_delete_database_with_metadata` + interceptor in new development instead of the `post_delete_database` interceptor. + When both interceptors are used, this `post_delete_database_with_metadata` interceptor runs after the + `post_delete_database` interceptor. The (possibly modified) response returned by + `post_delete_database` will be passed to + `post_delete_database_with_metadata`. + """ + return response, metadata + def pre_delete_table( self, request: metastore.DeleteTableRequest, @@ -318,12 +433,35 @@ def pre_delete_table( def post_delete_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for delete_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_table` interceptor runs + before the `post_delete_table_with_metadata` interceptor. """ return response + def post_delete_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_delete_table_with_metadata` + interceptor in new development instead of the `post_delete_table` interceptor. + When both interceptors are used, this `post_delete_table_with_metadata` interceptor runs after the + `post_delete_table` interceptor. The (possibly modified) response returned by + `post_delete_table` will be passed to + `post_delete_table_with_metadata`. + """ + return response, metadata + def pre_get_catalog( self, request: metastore.GetCatalogRequest, @@ -339,12 +477,35 @@ def pre_get_catalog( def post_get_catalog(self, response: metastore.Catalog) -> metastore.Catalog: """Post-rpc interceptor for get_catalog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_catalog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_catalog` interceptor runs + before the `post_get_catalog_with_metadata` interceptor. """ return response + def post_get_catalog_with_metadata( + self, + response: metastore.Catalog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_catalog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_get_catalog_with_metadata` + interceptor in new development instead of the `post_get_catalog` interceptor. + When both interceptors are used, this `post_get_catalog_with_metadata` interceptor runs after the + `post_get_catalog` interceptor. The (possibly modified) response returned by + `post_get_catalog` will be passed to + `post_get_catalog_with_metadata`. + """ + return response, metadata + def pre_get_database( self, request: metastore.GetDatabaseRequest, @@ -360,12 +521,35 @@ def pre_get_database( def post_get_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for get_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_database` interceptor runs + before the `post_get_database_with_metadata` interceptor. """ return response + def post_get_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_get_database_with_metadata` + interceptor in new development instead of the `post_get_database` interceptor. + When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the + `post_get_database` interceptor. The (possibly modified) response returned by + `post_get_database` will be passed to + `post_get_database_with_metadata`. + """ + return response, metadata + def pre_get_table( self, request: metastore.GetTableRequest, @@ -381,12 +565,35 @@ def pre_get_table( def post_get_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for get_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_table` interceptor runs + before the `post_get_table_with_metadata` interceptor. """ return response + def post_get_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_get_table_with_metadata` + interceptor in new development instead of the `post_get_table` interceptor. + When both interceptors are used, this `post_get_table_with_metadata` interceptor runs after the + `post_get_table` interceptor. The (possibly modified) response returned by + `post_get_table` will be passed to + `post_get_table_with_metadata`. + """ + return response, metadata + def pre_list_catalogs( self, request: metastore.ListCatalogsRequest, @@ -404,12 +611,35 @@ def post_list_catalogs( ) -> metastore.ListCatalogsResponse: """Post-rpc interceptor for list_catalogs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_catalogs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_catalogs` interceptor runs + before the `post_list_catalogs_with_metadata` interceptor. """ return response + def post_list_catalogs_with_metadata( + self, + response: metastore.ListCatalogsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListCatalogsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_catalogs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_catalogs_with_metadata` + interceptor in new development instead of the `post_list_catalogs` interceptor. + When both interceptors are used, this `post_list_catalogs_with_metadata` interceptor runs after the + `post_list_catalogs` interceptor. The (possibly modified) response returned by + `post_list_catalogs` will be passed to + `post_list_catalogs_with_metadata`. + """ + return response, metadata + def pre_list_databases( self, request: metastore.ListDatabasesRequest, @@ -427,12 +657,37 @@ def post_list_databases( ) -> metastore.ListDatabasesResponse: """Post-rpc interceptor for list_databases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. """ return response + def post_list_databases_with_metadata( + self, + response: metastore.ListDatabasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + def pre_list_tables( self, request: metastore.ListTablesRequest, @@ -450,12 +705,35 @@ def post_list_tables( ) -> metastore.ListTablesResponse: """Post-rpc interceptor for list_tables - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tables_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_tables` interceptor runs + before the `post_list_tables_with_metadata` interceptor. """ return response + def post_list_tables_with_metadata( + self, + response: metastore.ListTablesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListTablesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tables + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_tables_with_metadata` + interceptor in new development instead of the `post_list_tables` interceptor. + When both interceptors are used, this `post_list_tables_with_metadata` interceptor runs after the + `post_list_tables` interceptor. The (possibly modified) response returned by + `post_list_tables` will be passed to + `post_list_tables_with_metadata`. + """ + return response, metadata + def pre_rename_table( self, request: metastore.RenameTableRequest, @@ -471,12 +749,35 @@ def pre_rename_table( def post_rename_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for rename_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_rename_table` interceptor runs + before the `post_rename_table_with_metadata` interceptor. """ return response + def post_rename_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_rename_table_with_metadata` + interceptor in new development instead of the `post_rename_table` interceptor. + When both interceptors are used, this `post_rename_table_with_metadata` interceptor runs after the + `post_rename_table` interceptor. The (possibly modified) response returned by + `post_rename_table` will be passed to + `post_rename_table_with_metadata`. + """ + return response, metadata + def pre_update_database( self, request: metastore.UpdateDatabaseRequest, @@ -494,12 +795,35 @@ def pre_update_database( def post_update_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for update_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_database` interceptor runs + before the `post_update_database_with_metadata` interceptor. """ return response + def post_update_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_update_database_with_metadata` + interceptor in new development instead of the `post_update_database` interceptor. + When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the + `post_update_database` interceptor. The (possibly modified) response returned by + `post_update_database` will be passed to + `post_update_database_with_metadata`. + """ + return response, metadata + def pre_update_table( self, request: metastore.UpdateTableRequest, @@ -515,12 +839,35 @@ def pre_update_table( def post_update_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for update_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_table` interceptor runs + before the `post_update_table_with_metadata` interceptor. """ return response + def post_update_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_update_table_with_metadata` + interceptor in new development instead of the `post_update_table` interceptor. + When both interceptors are used, this `post_update_table_with_metadata` interceptor runs after the + `post_update_table` interceptor. The (possibly modified) response returned by + `post_update_table` will be passed to + `post_update_table_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MetastoreServiceRestStub: @@ -744,6 +1091,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_catalog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_catalog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -891,6 +1242,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1038,6 +1393,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1181,6 +1540,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_catalog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_catalog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1322,6 +1685,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1463,6 +1830,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1606,6 +1977,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_catalog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_catalog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1747,6 +2122,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1890,6 +2269,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2033,6 +2416,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_catalogs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_catalogs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2176,6 +2563,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2319,6 +2710,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tables(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tables_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2466,6 +2861,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2613,6 +3012,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2760,6 +3163,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py index db2ef16a95a4..9b19e5f10e00 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.12" # {x-release-please-version} +__version__ = "0.4.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py index 6e86c94fde80..f463ed5ae52a 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -574,6 +576,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py index ec8ae6129f57..39c89fb90c57 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py @@ -238,12 +238,35 @@ def pre_check_lock( def post_check_lock(self, response: metastore.Lock) -> metastore.Lock: """Post-rpc interceptor for check_lock - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_lock_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_check_lock` interceptor runs + before the `post_check_lock_with_metadata` interceptor. """ return response + def post_check_lock_with_metadata( + self, + response: metastore.Lock, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Lock, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for check_lock + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_check_lock_with_metadata` + interceptor in new development instead of the `post_check_lock` interceptor. + When both interceptors are used, this `post_check_lock_with_metadata` interceptor runs after the + `post_check_lock` interceptor. The (possibly modified) response returned by + `post_check_lock` will be passed to + `post_check_lock_with_metadata`. + """ + return response, metadata + def pre_create_catalog( self, request: metastore.CreateCatalogRequest, @@ -259,12 +282,35 @@ def pre_create_catalog( def post_create_catalog(self, response: metastore.Catalog) -> metastore.Catalog: """Post-rpc interceptor for create_catalog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_catalog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_catalog` interceptor runs + before the `post_create_catalog_with_metadata` interceptor. """ return response + def post_create_catalog_with_metadata( + self, + response: metastore.Catalog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_catalog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_catalog_with_metadata` + interceptor in new development instead of the `post_create_catalog` interceptor. + When both interceptors are used, this `post_create_catalog_with_metadata` interceptor runs after the + `post_create_catalog` interceptor. The (possibly modified) response returned by + `post_create_catalog` will be passed to + `post_create_catalog_with_metadata`. + """ + return response, metadata + def pre_create_database( self, request: metastore.CreateDatabaseRequest, @@ -282,12 +328,35 @@ def pre_create_database( def post_create_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for create_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. """ return response + def post_create_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + def pre_create_lock( self, request: metastore.CreateLockRequest, @@ -303,12 +372,35 @@ def pre_create_lock( def post_create_lock(self, response: metastore.Lock) -> metastore.Lock: """Post-rpc interceptor for create_lock - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_lock_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_lock` interceptor runs + before the `post_create_lock_with_metadata` interceptor. """ return response + def post_create_lock_with_metadata( + self, + response: metastore.Lock, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Lock, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_lock + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_lock_with_metadata` + interceptor in new development instead of the `post_create_lock` interceptor. + When both interceptors are used, this `post_create_lock_with_metadata` interceptor runs after the + `post_create_lock` interceptor. The (possibly modified) response returned by + `post_create_lock` will be passed to + `post_create_lock_with_metadata`. + """ + return response, metadata + def pre_create_table( self, request: metastore.CreateTableRequest, @@ -324,12 +416,35 @@ def pre_create_table( def post_create_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for create_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_table` interceptor runs + before the `post_create_table_with_metadata` interceptor. """ return response + def post_create_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_create_table_with_metadata` + interceptor in new development instead of the `post_create_table` interceptor. + When both interceptors are used, this `post_create_table_with_metadata` interceptor runs after the + `post_create_table` interceptor. The (possibly modified) response returned by + `post_create_table` will be passed to + `post_create_table_with_metadata`. + """ + return response, metadata + def pre_delete_catalog( self, request: metastore.DeleteCatalogRequest, @@ -345,12 +460,35 @@ def pre_delete_catalog( def post_delete_catalog(self, response: metastore.Catalog) -> metastore.Catalog: """Post-rpc interceptor for delete_catalog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_catalog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_catalog` interceptor runs + before the `post_delete_catalog_with_metadata` interceptor. """ return response + def post_delete_catalog_with_metadata( + self, + response: metastore.Catalog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_catalog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_delete_catalog_with_metadata` + interceptor in new development instead of the `post_delete_catalog` interceptor. + When both interceptors are used, this `post_delete_catalog_with_metadata` interceptor runs after the + `post_delete_catalog` interceptor. The (possibly modified) response returned by + `post_delete_catalog` will be passed to + `post_delete_catalog_with_metadata`. + """ + return response, metadata + def pre_delete_database( self, request: metastore.DeleteDatabaseRequest, @@ -368,12 +506,35 @@ def pre_delete_database( def post_delete_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for delete_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_database` interceptor runs + before the `post_delete_database_with_metadata` interceptor. """ return response + def post_delete_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_delete_database_with_metadata` + interceptor in new development instead of the `post_delete_database` interceptor. + When both interceptors are used, this `post_delete_database_with_metadata` interceptor runs after the + `post_delete_database` interceptor. The (possibly modified) response returned by + `post_delete_database` will be passed to + `post_delete_database_with_metadata`. + """ + return response, metadata + def pre_delete_lock( self, request: metastore.DeleteLockRequest, @@ -401,12 +562,35 @@ def pre_delete_table( def post_delete_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for delete_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_table` interceptor runs + before the `post_delete_table_with_metadata` interceptor. """ return response + def post_delete_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_delete_table_with_metadata` + interceptor in new development instead of the `post_delete_table` interceptor. + When both interceptors are used, this `post_delete_table_with_metadata` interceptor runs after the + `post_delete_table` interceptor. The (possibly modified) response returned by + `post_delete_table` will be passed to + `post_delete_table_with_metadata`. + """ + return response, metadata + def pre_get_catalog( self, request: metastore.GetCatalogRequest, @@ -422,12 +606,35 @@ def pre_get_catalog( def post_get_catalog(self, response: metastore.Catalog) -> metastore.Catalog: """Post-rpc interceptor for get_catalog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_catalog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_catalog` interceptor runs + before the `post_get_catalog_with_metadata` interceptor. """ return response + def post_get_catalog_with_metadata( + self, + response: metastore.Catalog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_catalog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_get_catalog_with_metadata` + interceptor in new development instead of the `post_get_catalog` interceptor. + When both interceptors are used, this `post_get_catalog_with_metadata` interceptor runs after the + `post_get_catalog` interceptor. The (possibly modified) response returned by + `post_get_catalog` will be passed to + `post_get_catalog_with_metadata`. + """ + return response, metadata + def pre_get_database( self, request: metastore.GetDatabaseRequest, @@ -443,12 +650,35 @@ def pre_get_database( def post_get_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for get_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_database` interceptor runs + before the `post_get_database_with_metadata` interceptor. """ return response + def post_get_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_get_database_with_metadata` + interceptor in new development instead of the `post_get_database` interceptor. + When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the + `post_get_database` interceptor. The (possibly modified) response returned by + `post_get_database` will be passed to + `post_get_database_with_metadata`. + """ + return response, metadata + def pre_get_table( self, request: metastore.GetTableRequest, @@ -464,12 +694,35 @@ def pre_get_table( def post_get_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for get_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_table` interceptor runs + before the `post_get_table_with_metadata` interceptor. """ return response + def post_get_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_get_table_with_metadata` + interceptor in new development instead of the `post_get_table` interceptor. + When both interceptors are used, this `post_get_table_with_metadata` interceptor runs after the + `post_get_table` interceptor. The (possibly modified) response returned by + `post_get_table` will be passed to + `post_get_table_with_metadata`. + """ + return response, metadata + def pre_list_catalogs( self, request: metastore.ListCatalogsRequest, @@ -487,12 +740,35 @@ def post_list_catalogs( ) -> metastore.ListCatalogsResponse: """Post-rpc interceptor for list_catalogs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_catalogs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_catalogs` interceptor runs + before the `post_list_catalogs_with_metadata` interceptor. """ return response + def post_list_catalogs_with_metadata( + self, + response: metastore.ListCatalogsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListCatalogsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_catalogs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_catalogs_with_metadata` + interceptor in new development instead of the `post_list_catalogs` interceptor. + When both interceptors are used, this `post_list_catalogs_with_metadata` interceptor runs after the + `post_list_catalogs` interceptor. The (possibly modified) response returned by + `post_list_catalogs` will be passed to + `post_list_catalogs_with_metadata`. + """ + return response, metadata + def pre_list_databases( self, request: metastore.ListDatabasesRequest, @@ -510,12 +786,37 @@ def post_list_databases( ) -> metastore.ListDatabasesResponse: """Post-rpc interceptor for list_databases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_databases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_databases` interceptor runs + before the `post_list_databases_with_metadata` interceptor. """ return response + def post_list_databases_with_metadata( + self, + response: metastore.ListDatabasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_databases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_databases_with_metadata` + interceptor in new development instead of the `post_list_databases` interceptor. + When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the + `post_list_databases` interceptor. The (possibly modified) response returned by + `post_list_databases` will be passed to + `post_list_databases_with_metadata`. + """ + return response, metadata + def pre_list_locks( self, request: metastore.ListLocksRequest, @@ -533,12 +834,35 @@ def post_list_locks( ) -> metastore.ListLocksResponse: """Post-rpc interceptor for list_locks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_locks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_locks` interceptor runs + before the `post_list_locks_with_metadata` interceptor. """ return response + def post_list_locks_with_metadata( + self, + response: metastore.ListLocksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListLocksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_locks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_locks_with_metadata` + interceptor in new development instead of the `post_list_locks` interceptor. + When both interceptors are used, this `post_list_locks_with_metadata` interceptor runs after the + `post_list_locks` interceptor. The (possibly modified) response returned by + `post_list_locks` will be passed to + `post_list_locks_with_metadata`. + """ + return response, metadata + def pre_list_tables( self, request: metastore.ListTablesRequest, @@ -556,12 +880,35 @@ def post_list_tables( ) -> metastore.ListTablesResponse: """Post-rpc interceptor for list_tables - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tables_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_tables` interceptor runs + before the `post_list_tables_with_metadata` interceptor. """ return response + def post_list_tables_with_metadata( + self, + response: metastore.ListTablesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListTablesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tables + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_list_tables_with_metadata` + interceptor in new development instead of the `post_list_tables` interceptor. + When both interceptors are used, this `post_list_tables_with_metadata` interceptor runs after the + `post_list_tables` interceptor. The (possibly modified) response returned by + `post_list_tables` will be passed to + `post_list_tables_with_metadata`. + """ + return response, metadata + def pre_rename_table( self, request: metastore.RenameTableRequest, @@ -577,12 +924,35 @@ def pre_rename_table( def post_rename_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for rename_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_rename_table` interceptor runs + before the `post_rename_table_with_metadata` interceptor. """ return response + def post_rename_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_rename_table_with_metadata` + interceptor in new development instead of the `post_rename_table` interceptor. + When both interceptors are used, this `post_rename_table_with_metadata` interceptor runs after the + `post_rename_table` interceptor. The (possibly modified) response returned by + `post_rename_table` will be passed to + `post_rename_table_with_metadata`. + """ + return response, metadata + def pre_update_database( self, request: metastore.UpdateDatabaseRequest, @@ -600,12 +970,35 @@ def pre_update_database( def post_update_database(self, response: metastore.Database) -> metastore.Database: """Post-rpc interceptor for update_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_database` interceptor runs + before the `post_update_database_with_metadata` interceptor. """ return response + def post_update_database_with_metadata( + self, + response: metastore.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_update_database_with_metadata` + interceptor in new development instead of the `post_update_database` interceptor. + When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the + `post_update_database` interceptor. The (possibly modified) response returned by + `post_update_database` will be passed to + `post_update_database_with_metadata`. + """ + return response, metadata + def pre_update_table( self, request: metastore.UpdateTableRequest, @@ -621,12 +1014,35 @@ def pre_update_table( def post_update_table(self, response: metastore.Table) -> metastore.Table: """Post-rpc interceptor for update_table - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetastoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_table` interceptor runs + before the `post_update_table_with_metadata` interceptor. """ return response + def post_update_table_with_metadata( + self, + response: metastore.Table, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetastoreService server but before it is returned to user code. + + We recommend only using this `post_update_table_with_metadata` + interceptor in new development instead of the `post_update_table` interceptor. + When both interceptors are used, this `post_update_table_with_metadata` interceptor runs after the + `post_update_table` interceptor. The (possibly modified) response returned by + `post_update_table` will be passed to + `post_update_table_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MetastoreServiceRestStub: @@ -848,6 +1264,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_lock(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_check_lock_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -997,6 +1417,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_catalog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_catalog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1144,6 +1568,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1291,6 +1719,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_lock(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_lock_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1438,6 +1870,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1581,6 +2017,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_catalog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_catalog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1722,6 +2162,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1970,6 +2414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2113,6 +2561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_catalog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_catalog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2254,6 +2706,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2397,6 +2853,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2540,6 +3000,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_catalogs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_catalogs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2683,6 +3147,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_databases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_databases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2826,6 +3294,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_locks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_locks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2969,6 +3441,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tables(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tables_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3116,6 +3592,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3263,6 +3743,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3410,6 +3894,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_table_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bigquery-biglake/noxfile.py b/packages/google-cloud-bigquery-biglake/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-biglake/noxfile.py +++ b/packages/google-cloud-bigquery-biglake/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json index 4a2eb396e18f..5bc6d5da5ed5 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json index 586f4d04788e..2fdf41e2dd19 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.12" + "version": "0.4.13" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py index 798b51c04874..fe1a6a6b194a 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py @@ -62,6 +62,13 @@ ) from google.cloud.bigquery_biglake_v1.types import metastore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +328,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetastoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetastoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10777,10 +10827,13 @@ def test_create_catalog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_catalog" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_catalog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_catalog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateCatalogRequest.pb(metastore.CreateCatalogRequest()) transcode.return_value = { "method": "post", @@ -10802,6 +10855,7 @@ def test_create_catalog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Catalog() + post_with_metadata.return_value = metastore.Catalog(), metadata client.create_catalog( request, @@ -10813,6 +10867,7 @@ def test_create_catalog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_catalog_rest_bad_request(request_type=metastore.DeleteCatalogRequest): @@ -10895,10 +10950,13 @@ def test_delete_catalog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_delete_catalog" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_catalog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_delete_catalog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteCatalogRequest.pb(metastore.DeleteCatalogRequest()) transcode.return_value = { "method": "post", @@ -10920,6 +10978,7 @@ def test_delete_catalog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Catalog() + post_with_metadata.return_value = metastore.Catalog(), metadata client.delete_catalog( request, @@ -10931,6 +10990,7 @@ def test_delete_catalog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_catalog_rest_bad_request(request_type=metastore.GetCatalogRequest): @@ -11013,10 +11073,13 @@ def test_get_catalog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_get_catalog" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_catalog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_get_catalog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetCatalogRequest.pb(metastore.GetCatalogRequest()) transcode.return_value = { "method": "post", @@ -11038,6 +11101,7 @@ def test_get_catalog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Catalog() + post_with_metadata.return_value = metastore.Catalog(), metadata client.get_catalog( request, @@ -11049,6 +11113,7 @@ def test_get_catalog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_catalogs_rest_bad_request(request_type=metastore.ListCatalogsRequest): @@ -11131,10 +11196,13 @@ def test_list_catalogs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_catalogs" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_catalogs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_catalogs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListCatalogsRequest.pb(metastore.ListCatalogsRequest()) transcode.return_value = { "method": "post", @@ -11158,6 +11226,7 @@ def test_list_catalogs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListCatalogsResponse() + post_with_metadata.return_value = metastore.ListCatalogsResponse(), metadata client.list_catalogs( request, @@ -11169,6 +11238,7 @@ def test_list_catalogs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_database_rest_bad_request(request_type=metastore.CreateDatabaseRequest): @@ -11329,10 +11399,13 @@ def test_create_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateDatabaseRequest.pb( metastore.CreateDatabaseRequest() ) @@ -11356,6 +11429,7 @@ def test_create_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.create_database( request, @@ -11367,6 +11441,7 @@ def test_create_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_database_rest_bad_request(request_type=metastore.DeleteDatabaseRequest): @@ -11455,10 +11530,13 @@ def test_delete_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_delete_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_delete_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteDatabaseRequest.pb( metastore.DeleteDatabaseRequest() ) @@ -11482,6 +11560,7 @@ def test_delete_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.delete_database( request, @@ -11493,6 +11572,7 @@ def test_delete_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_database_rest_bad_request(request_type=metastore.UpdateDatabaseRequest): @@ -11661,10 +11741,13 @@ def test_update_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_update_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_update_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateDatabaseRequest.pb( metastore.UpdateDatabaseRequest() ) @@ -11688,6 +11771,7 @@ def test_update_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.update_database( request, @@ -11699,6 +11783,7 @@ def test_update_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_database_rest_bad_request(request_type=metastore.GetDatabaseRequest): @@ -11787,10 +11872,13 @@ def test_get_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_get_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_get_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetDatabaseRequest.pb(metastore.GetDatabaseRequest()) transcode.return_value = { "method": "post", @@ -11812,6 +11900,7 @@ def test_get_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.get_database( request, @@ -11823,6 +11912,7 @@ def test_get_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_databases_rest_bad_request(request_type=metastore.ListDatabasesRequest): @@ -11905,10 +11995,13 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_databases" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListDatabasesRequest.pb(metastore.ListDatabasesRequest()) transcode.return_value = { "method": "post", @@ -11932,6 +12025,7 @@ def test_list_databases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListDatabasesResponse() + post_with_metadata.return_value = metastore.ListDatabasesResponse(), metadata client.list_databases( request, @@ -11943,6 +12037,7 @@ def test_list_databases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_table_rest_bad_request(request_type=metastore.CreateTableRequest): @@ -12119,10 +12214,13 @@ def test_create_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateTableRequest.pb(metastore.CreateTableRequest()) transcode.return_value = { "method": "post", @@ -12144,6 +12242,7 @@ def test_create_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.create_table( request, @@ -12155,6 +12254,7 @@ def test_create_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_table_rest_bad_request(request_type=metastore.DeleteTableRequest): @@ -12245,10 +12345,13 @@ def test_delete_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_delete_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_delete_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteTableRequest.pb(metastore.DeleteTableRequest()) transcode.return_value = { "method": "post", @@ -12270,6 +12373,7 @@ def test_delete_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.delete_table( request, @@ -12281,6 +12385,7 @@ def test_delete_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_table_rest_bad_request(request_type=metastore.UpdateTableRequest): @@ -12461,10 +12566,13 @@ def test_update_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_update_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_update_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateTableRequest.pb(metastore.UpdateTableRequest()) transcode.return_value = { "method": "post", @@ -12486,6 +12594,7 @@ def test_update_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.update_table( request, @@ -12497,6 +12606,7 @@ def test_update_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_table_rest_bad_request(request_type=metastore.RenameTableRequest): @@ -12587,10 +12697,13 @@ def test_rename_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_rename_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_rename_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_rename_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RenameTableRequest.pb(metastore.RenameTableRequest()) transcode.return_value = { "method": "post", @@ -12612,6 +12725,7 @@ def test_rename_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.rename_table( request, @@ -12623,6 +12737,7 @@ def test_rename_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_table_rest_bad_request(request_type=metastore.GetTableRequest): @@ -12713,10 +12828,13 @@ def test_get_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_get_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_get_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetTableRequest.pb(metastore.GetTableRequest()) transcode.return_value = { "method": "post", @@ -12738,6 +12856,7 @@ def test_get_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.get_table( request, @@ -12749,6 +12868,7 @@ def test_get_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tables_rest_bad_request(request_type=metastore.ListTablesRequest): @@ -12835,10 +12955,13 @@ def test_list_tables_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_tables" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_tables_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_tables" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListTablesRequest.pb(metastore.ListTablesRequest()) transcode.return_value = { "method": "post", @@ -12862,6 +12985,7 @@ def test_list_tables_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListTablesResponse() + post_with_metadata.return_value = metastore.ListTablesResponse(), metadata client.list_tables( request, @@ -12873,6 +12997,7 @@ def test_list_tables_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py index d65fbaf8c79f..35645d2bd512 100644 --- a/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py +++ b/packages/google-cloud-bigquery-biglake/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py @@ -62,6 +62,13 @@ ) from google.cloud.bigquery_biglake_v1alpha1.types import metastore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +328,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetastoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetastoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -13262,10 +13312,13 @@ def test_create_catalog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_catalog" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_catalog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_catalog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateCatalogRequest.pb(metastore.CreateCatalogRequest()) transcode.return_value = { "method": "post", @@ -13287,6 +13340,7 @@ def test_create_catalog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Catalog() + post_with_metadata.return_value = metastore.Catalog(), metadata client.create_catalog( request, @@ -13298,6 +13352,7 @@ def test_create_catalog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_catalog_rest_bad_request(request_type=metastore.DeleteCatalogRequest): @@ -13380,10 +13435,13 @@ def test_delete_catalog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_delete_catalog" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_catalog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_delete_catalog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteCatalogRequest.pb(metastore.DeleteCatalogRequest()) transcode.return_value = { "method": "post", @@ -13405,6 +13463,7 @@ def test_delete_catalog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Catalog() + post_with_metadata.return_value = metastore.Catalog(), metadata client.delete_catalog( request, @@ -13416,6 +13475,7 @@ def test_delete_catalog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_catalog_rest_bad_request(request_type=metastore.GetCatalogRequest): @@ -13498,10 +13558,13 @@ def test_get_catalog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_get_catalog" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_catalog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_get_catalog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetCatalogRequest.pb(metastore.GetCatalogRequest()) transcode.return_value = { "method": "post", @@ -13523,6 +13586,7 @@ def test_get_catalog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Catalog() + post_with_metadata.return_value = metastore.Catalog(), metadata client.get_catalog( request, @@ -13534,6 +13598,7 @@ def test_get_catalog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_catalogs_rest_bad_request(request_type=metastore.ListCatalogsRequest): @@ -13616,10 +13681,13 @@ def test_list_catalogs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_catalogs" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_catalogs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_catalogs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListCatalogsRequest.pb(metastore.ListCatalogsRequest()) transcode.return_value = { "method": "post", @@ -13643,6 +13711,7 @@ def test_list_catalogs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListCatalogsResponse() + post_with_metadata.return_value = metastore.ListCatalogsResponse(), metadata client.list_catalogs( request, @@ -13654,6 +13723,7 @@ def test_list_catalogs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_database_rest_bad_request(request_type=metastore.CreateDatabaseRequest): @@ -13814,10 +13884,13 @@ def test_create_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateDatabaseRequest.pb( metastore.CreateDatabaseRequest() ) @@ -13841,6 +13914,7 @@ def test_create_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.create_database( request, @@ -13852,6 +13926,7 @@ def test_create_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_database_rest_bad_request(request_type=metastore.DeleteDatabaseRequest): @@ -13940,10 +14015,13 @@ def test_delete_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_delete_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_delete_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteDatabaseRequest.pb( metastore.DeleteDatabaseRequest() ) @@ -13967,6 +14045,7 @@ def test_delete_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.delete_database( request, @@ -13978,6 +14057,7 @@ def test_delete_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_database_rest_bad_request(request_type=metastore.UpdateDatabaseRequest): @@ -14146,10 +14226,13 @@ def test_update_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_update_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_update_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateDatabaseRequest.pb( metastore.UpdateDatabaseRequest() ) @@ -14173,6 +14256,7 @@ def test_update_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.update_database( request, @@ -14184,6 +14268,7 @@ def test_update_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_database_rest_bad_request(request_type=metastore.GetDatabaseRequest): @@ -14272,10 +14357,13 @@ def test_get_database_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_get_database" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_database_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_get_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetDatabaseRequest.pb(metastore.GetDatabaseRequest()) transcode.return_value = { "method": "post", @@ -14297,6 +14385,7 @@ def test_get_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Database() + post_with_metadata.return_value = metastore.Database(), metadata client.get_database( request, @@ -14308,6 +14397,7 @@ def test_get_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_databases_rest_bad_request(request_type=metastore.ListDatabasesRequest): @@ -14390,10 +14480,13 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_databases" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListDatabasesRequest.pb(metastore.ListDatabasesRequest()) transcode.return_value = { "method": "post", @@ -14417,6 +14510,7 @@ def test_list_databases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListDatabasesResponse() + post_with_metadata.return_value = metastore.ListDatabasesResponse(), metadata client.list_databases( request, @@ -14428,6 +14522,7 @@ def test_list_databases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_table_rest_bad_request(request_type=metastore.CreateTableRequest): @@ -14604,10 +14699,13 @@ def test_create_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateTableRequest.pb(metastore.CreateTableRequest()) transcode.return_value = { "method": "post", @@ -14629,6 +14727,7 @@ def test_create_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.create_table( request, @@ -14640,6 +14739,7 @@ def test_create_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_table_rest_bad_request(request_type=metastore.DeleteTableRequest): @@ -14730,10 +14830,13 @@ def test_delete_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_delete_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_delete_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_delete_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteTableRequest.pb(metastore.DeleteTableRequest()) transcode.return_value = { "method": "post", @@ -14755,6 +14858,7 @@ def test_delete_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.delete_table( request, @@ -14766,6 +14870,7 @@ def test_delete_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_table_rest_bad_request(request_type=metastore.UpdateTableRequest): @@ -14946,10 +15051,13 @@ def test_update_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_update_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_update_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_update_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateTableRequest.pb(metastore.UpdateTableRequest()) transcode.return_value = { "method": "post", @@ -14971,6 +15079,7 @@ def test_update_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.update_table( request, @@ -14982,6 +15091,7 @@ def test_update_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_table_rest_bad_request(request_type=metastore.RenameTableRequest): @@ -15072,10 +15182,13 @@ def test_rename_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_rename_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_rename_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_rename_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RenameTableRequest.pb(metastore.RenameTableRequest()) transcode.return_value = { "method": "post", @@ -15097,6 +15210,7 @@ def test_rename_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.rename_table( request, @@ -15108,6 +15222,7 @@ def test_rename_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_table_rest_bad_request(request_type=metastore.GetTableRequest): @@ -15198,10 +15313,13 @@ def test_get_table_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_get_table" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_get_table_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_get_table" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetTableRequest.pb(metastore.GetTableRequest()) transcode.return_value = { "method": "post", @@ -15223,6 +15341,7 @@ def test_get_table_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Table() + post_with_metadata.return_value = metastore.Table(), metadata client.get_table( request, @@ -15234,6 +15353,7 @@ def test_get_table_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tables_rest_bad_request(request_type=metastore.ListTablesRequest): @@ -15320,10 +15440,13 @@ def test_list_tables_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_tables" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_tables_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_tables" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListTablesRequest.pb(metastore.ListTablesRequest()) transcode.return_value = { "method": "post", @@ -15347,6 +15470,7 @@ def test_list_tables_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListTablesResponse() + post_with_metadata.return_value = metastore.ListTablesResponse(), metadata client.list_tables( request, @@ -15358,6 +15482,7 @@ def test_list_tables_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_lock_rest_bad_request(request_type=metastore.CreateLockRequest): @@ -15523,10 +15648,13 @@ def test_create_lock_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_create_lock" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_create_lock_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_create_lock" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateLockRequest.pb(metastore.CreateLockRequest()) transcode.return_value = { "method": "post", @@ -15548,6 +15676,7 @@ def test_create_lock_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Lock() + post_with_metadata.return_value = metastore.Lock(), metadata client.create_lock( request, @@ -15559,6 +15688,7 @@ def test_create_lock_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_lock_rest_bad_request(request_type=metastore.DeleteLockRequest): @@ -15759,10 +15889,13 @@ def test_check_lock_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_check_lock" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_check_lock_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_check_lock" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CheckLockRequest.pb(metastore.CheckLockRequest()) transcode.return_value = { "method": "post", @@ -15784,6 +15917,7 @@ def test_check_lock_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Lock() + post_with_metadata.return_value = metastore.Lock(), metadata client.check_lock( request, @@ -15795,6 +15929,7 @@ def test_check_lock_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_locks_rest_bad_request(request_type=metastore.ListLocksRequest): @@ -15881,10 +16016,13 @@ def test_list_locks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetastoreServiceRestInterceptor, "post_list_locks" ) as post, mock.patch.object( + transports.MetastoreServiceRestInterceptor, "post_list_locks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetastoreServiceRestInterceptor, "pre_list_locks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListLocksRequest.pb(metastore.ListLocksRequest()) transcode.return_value = { "method": "post", @@ -15908,6 +16046,7 @@ def test_list_locks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListLocksResponse() + post_with_metadata.return_value = metastore.ListLocksResponse(), metadata client.list_locks( request, @@ -15919,6 +16058,7 @@ def test_list_locks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-bigquery-connection/CHANGELOG.md b/packages/google-cloud-bigquery-connection/CHANGELOG.md index 7aa4ebccdc28..13beee3c5abc 100644 --- a/packages/google-cloud-bigquery-connection/CHANGELOG.md +++ b/packages/google-cloud-bigquery-connection/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-connection-v1.17.0...google-cloud-bigquery-connection-v1.18.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [1.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-connection-v1.16.1...google-cloud-bigquery-connection-v1.17.0) (2024-12-12) diff --git a/packages/google-cloud-bigquery-connection/README.rst b/packages/google-cloud-bigquery-connection/README.rst index 6f5be94eeb47..8fe5ff94b6c5 100644 --- a/packages/google-cloud-bigquery-connection/README.rst +++ b/packages/google-cloud-bigquery-connection/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BigQuery Connection.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BigQuery Connection.: https://cloud.google.com/bigquery/docs/reference/bigqueryconnection -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py index 007d6040cbe0..8099b154e9b6 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py index 007d6040cbe0..8099b154e9b6 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py index 950ac661c269..69adc52ae137 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -535,6 +537,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py index 2653641a1c2d..d6078e6b3896 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py @@ -157,12 +157,35 @@ def post_create_connection( ) -> gcbc_connection.Connection: """Post-rpc interceptor for create_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_create_connection` interceptor runs + before the `post_create_connection_with_metadata` interceptor. """ return response + def post_create_connection_with_metadata( + self, + response: gcbc_connection.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcbc_connection.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_create_connection_with_metadata` + interceptor in new development instead of the `post_create_connection` interceptor. + When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the + `post_create_connection` interceptor. The (possibly modified) response returned by + `post_create_connection` will be passed to + `post_create_connection_with_metadata`. + """ + return response, metadata + def pre_delete_connection( self, request: connection.DeleteConnectionRequest, @@ -196,12 +219,35 @@ def post_get_connection( ) -> connection.Connection: """Post-rpc interceptor for get_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_get_connection` interceptor runs + before the `post_get_connection_with_metadata` interceptor. """ return response + def post_get_connection_with_metadata( + self, + response: connection.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[connection.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_get_connection_with_metadata` + interceptor in new development instead of the `post_get_connection` interceptor. + When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the + `post_get_connection` interceptor. The (possibly modified) response returned by + `post_get_connection` will be passed to + `post_get_connection_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -219,12 +265,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_list_connections( self, request: connection.ListConnectionsRequest, @@ -244,12 +313,37 @@ def post_list_connections( ) -> connection.ListConnectionsResponse: """Post-rpc interceptor for list_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_list_connections` interceptor runs + before the `post_list_connections_with_metadata` interceptor. """ return response + def post_list_connections_with_metadata( + self, + response: connection.ListConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + connection.ListConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_list_connections_with_metadata` + interceptor in new development instead of the `post_list_connections` interceptor. + When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the + `post_list_connections` interceptor. The (possibly modified) response returned by + `post_list_connections` will be passed to + `post_list_connections_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -267,12 +361,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -293,12 +410,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_connection( self, request: gcbc_connection.UpdateConnectionRequest, @@ -318,12 +461,35 @@ def post_update_connection( ) -> gcbc_connection.Connection: """Post-rpc interceptor for update_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConnectionService server but before - it is returned to user code. + it is returned to user code. This `post_update_connection` interceptor runs + before the `post_update_connection_with_metadata` interceptor. """ return response + def post_update_connection_with_metadata( + self, + response: gcbc_connection.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcbc_connection.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConnectionService server but before it is returned to user code. + + We recommend only using this `post_update_connection_with_metadata` + interceptor in new development instead of the `post_update_connection` interceptor. + When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the + `post_update_connection` interceptor. The (possibly modified) response returned by + `post_update_connection` will be passed to + `post_update_connection_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ConnectionServiceRestStub: @@ -541,6 +707,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -796,6 +966,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1017,6 +1191,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1163,6 +1341,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1386,6 +1568,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1535,6 +1721,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1688,6 +1878,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bigquery-connection/noxfile.py b/packages/google-cloud-bigquery-connection/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-connection/noxfile.py +++ b/packages/google-cloud-bigquery-connection/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json index 9577a8066ad5..539b0c69eb30 100644 --- a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json +++ b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-connection", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py index 32ae31cdab79..0162486705e3 100644 --- a/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py +++ b/packages/google-cloud-bigquery-connection/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py @@ -66,6 +66,13 @@ from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection from google.cloud.bigquery_connection_v1.types import connection +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -333,6 +340,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConnectionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConnectionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6452,10 +6502,14 @@ def test_create_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_create_connection" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, + "post_create_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_create_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcbc_connection.CreateConnectionRequest.pb( gcbc_connection.CreateConnectionRequest() ) @@ -6479,6 +6533,7 @@ def test_create_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcbc_connection.Connection() + post_with_metadata.return_value = gcbc_connection.Connection(), metadata client.create_connection( request, @@ -6490,6 +6545,7 @@ def test_create_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_rest_bad_request(request_type=connection.GetConnectionRequest): @@ -6582,10 +6638,13 @@ def test_get_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_get_connection" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_get_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_get_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = connection.GetConnectionRequest.pb( connection.GetConnectionRequest() ) @@ -6609,6 +6668,7 @@ def test_get_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = connection.Connection() + post_with_metadata.return_value = connection.Connection(), metadata client.get_connection( request, @@ -6620,6 +6680,7 @@ def test_get_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_connections_rest_bad_request( @@ -6704,10 +6765,14 @@ def test_list_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_list_connections" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, + "post_list_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_list_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = connection.ListConnectionsRequest.pb( connection.ListConnectionsRequest() ) @@ -6733,6 +6798,7 @@ def test_list_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = connection.ListConnectionsResponse() + post_with_metadata.return_value = connection.ListConnectionsResponse(), metadata client.list_connections( request, @@ -6744,6 +6810,7 @@ def test_list_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_connection_rest_bad_request( @@ -6963,10 +7030,14 @@ def test_update_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_update_connection" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, + "post_update_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_update_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcbc_connection.UpdateConnectionRequest.pb( gcbc_connection.UpdateConnectionRequest() ) @@ -6990,6 +7061,7 @@ def test_update_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcbc_connection.Connection() + post_with_metadata.return_value = gcbc_connection.Connection(), metadata client.update_connection( request, @@ -7001,6 +7073,7 @@ def test_update_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connection_rest_bad_request( @@ -7197,10 +7270,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -7222,6 +7298,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -7233,6 +7310,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -7320,10 +7398,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -7345,6 +7426,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -7356,6 +7438,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -7441,10 +7524,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConnectionServiceRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ConnectionServiceRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConnectionServiceRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -7468,6 +7555,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -7479,6 +7570,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md b/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md index 2ad3c00ee7a7..89484aa9d294 100644 --- a/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md +++ b/packages/google-cloud-bigquery-data-exchange/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.17](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-data-exchange-v0.5.16...google-cloud-bigquery-data-exchange-v0.5.17) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.5.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-data-exchange-v0.5.15...google-cloud-bigquery-data-exchange-v0.5.16) (2024-12-12) diff --git a/packages/google-cloud-bigquery-data-exchange/README.rst b/packages/google-cloud-bigquery-data-exchange/README.rst index 182e9eed534b..32185088d5b4 100644 --- a/packages/google-cloud-bigquery-data-exchange/README.rst +++ b/packages/google-cloud-bigquery-data-exchange/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BigQuery Analytics Hub.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BigQuery Analytics Hub.: https://cloud.google.com/bigquery/docs/analytics-hub-introduction -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py index 02b0cbec08ac..6dccd9f0e979 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.16" # {x-release-please-version} +__version__ = "0.5.17" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py index 02b0cbec08ac..6dccd9f0e979 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.16" # {x-release-please-version} +__version__ = "0.5.17" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py index e235ef3e16da..7fbfff458083 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -537,6 +539,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2486,16 +2515,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2541,16 +2574,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-bigquery-data-exchange/noxfile.py b/packages/google-cloud-bigquery-data-exchange/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-data-exchange/noxfile.py +++ b/packages/google-cloud-bigquery-data-exchange/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json index 7af9a67ae3df..f9600737979c 100644 --- a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-data-exchange", - "version": "0.5.16" + "version": "0.5.17" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py index 2840816a64cc..88de6e5163ca 100644 --- a/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py +++ b/packages/google-cloud-bigquery-data-exchange/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -62,6 +63,13 @@ ) from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -335,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AnalyticsHubServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AnalyticsHubServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md b/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md index e158806b69a7..b121035ff45a 100644 --- a/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datapolicies/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.6.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.6.11...google-cloud-bigquery-datapolicies-v0.6.12) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [0.6.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datapolicies-v0.6.10...google-cloud-bigquery-datapolicies-v0.6.11) (2024-12-12) diff --git a/packages/google-cloud-bigquery-datapolicies/README.rst b/packages/google-cloud-bigquery-datapolicies/README.rst index fef5dc499b72..84d8b443ffa1 100644 --- a/packages/google-cloud-bigquery-datapolicies/README.rst +++ b/packages/google-cloud-bigquery-datapolicies/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BigQuery Data Policy.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BigQuery Data Policy.: https://cloud.google.com/bigquery/docs/reference/bigquerydatapolicy/rest -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py index ab428db9b0ed..c9fbb1e3ae49 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py index 896742a4fce4..f3ce323626c9 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py @@ -164,12 +164,35 @@ def post_create_data_policy( ) -> datapolicy.DataPolicy: """Post-rpc interceptor for create_data_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_data_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_create_data_policy` interceptor runs + before the `post_create_data_policy_with_metadata` interceptor. """ return response + def post_create_data_policy_with_metadata( + self, + response: datapolicy.DataPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_create_data_policy_with_metadata` + interceptor in new development instead of the `post_create_data_policy` interceptor. + When both interceptors are used, this `post_create_data_policy_with_metadata` interceptor runs after the + `post_create_data_policy` interceptor. The (possibly modified) response returned by + `post_create_data_policy` will be passed to + `post_create_data_policy_with_metadata`. + """ + return response, metadata + def pre_delete_data_policy( self, request: datapolicy.DeleteDataPolicyRequest, @@ -203,12 +226,35 @@ def post_get_data_policy( ) -> datapolicy.DataPolicy: """Post-rpc interceptor for get_data_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_policy` interceptor runs + before the `post_get_data_policy_with_metadata` interceptor. """ return response + def post_get_data_policy_with_metadata( + self, + response: datapolicy.DataPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_get_data_policy_with_metadata` + interceptor in new development instead of the `post_get_data_policy` interceptor. + When both interceptors are used, this `post_get_data_policy_with_metadata` interceptor runs after the + `post_get_data_policy` interceptor. The (possibly modified) response returned by + `post_get_data_policy` will be passed to + `post_get_data_policy_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -226,12 +272,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_list_data_policies( self, request: datapolicy.ListDataPoliciesRequest, @@ -251,12 +320,37 @@ def post_list_data_policies( ) -> datapolicy.ListDataPoliciesResponse: """Post-rpc interceptor for list_data_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_policies` interceptor runs + before the `post_list_data_policies_with_metadata` interceptor. """ return response + def post_list_data_policies_with_metadata( + self, + response: datapolicy.ListDataPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datapolicy.ListDataPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_list_data_policies_with_metadata` + interceptor in new development instead of the `post_list_data_policies` interceptor. + When both interceptors are used, this `post_list_data_policies_with_metadata` interceptor runs after the + `post_list_data_policies` interceptor. The (possibly modified) response returned by + `post_list_data_policies` will be passed to + `post_list_data_policies_with_metadata`. + """ + return response, metadata + def pre_rename_data_policy( self, request: datapolicy.RenameDataPolicyRequest, @@ -276,12 +370,35 @@ def post_rename_data_policy( ) -> datapolicy.DataPolicy: """Post-rpc interceptor for rename_data_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rename_data_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_rename_data_policy` interceptor runs + before the `post_rename_data_policy_with_metadata` interceptor. """ return response + def post_rename_data_policy_with_metadata( + self, + response: datapolicy.DataPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rename_data_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_rename_data_policy_with_metadata` + interceptor in new development instead of the `post_rename_data_policy` interceptor. + When both interceptors are used, this `post_rename_data_policy_with_metadata` interceptor runs after the + `post_rename_data_policy` interceptor. The (possibly modified) response returned by + `post_rename_data_policy` will be passed to + `post_rename_data_policy_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -299,12 +416,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -325,12 +465,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_data_policy( self, request: datapolicy.UpdateDataPolicyRequest, @@ -350,12 +516,35 @@ def post_update_data_policy( ) -> datapolicy.DataPolicy: """Post-rpc interceptor for update_data_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_policy` interceptor runs + before the `post_update_data_policy_with_metadata` interceptor. """ return response + def post_update_data_policy_with_metadata( + self, + response: datapolicy.DataPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataPolicyService server but before it is returned to user code. + + We recommend only using this `post_update_data_policy_with_metadata` + interceptor in new development instead of the `post_update_data_policy` interceptor. + When both interceptors are used, this `post_update_data_policy_with_metadata` interceptor runs after the + `post_update_data_policy` interceptor. The (possibly modified) response returned by + `post_update_data_policy` will be passed to + `post_update_data_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DataPolicyServiceRestStub: @@ -571,6 +760,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_data_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -823,6 +1016,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1044,6 +1241,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1190,6 +1391,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1342,6 +1547,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rename_data_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rename_data_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1563,6 +1772,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1712,6 +1925,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1862,6 +2079,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py index 02b228845902..44e5c049e336 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.11" # {x-release-please-version} +__version__ = "0.6.12" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py index c74037a18619..57fa270142d9 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-datapolicies/noxfile.py b/packages/google-cloud-bigquery-datapolicies/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-datapolicies/noxfile.py +++ b/packages/google-cloud-bigquery-datapolicies/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json index 444a432a20be..390eaf7b1f8d 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.11" + "version": "0.6.12" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json index 1ac454cdf135..5a7576fe63ad 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.11" + "version": "0.6.12" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py index c38e40f4af88..84017e4c3b0c 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py @@ -65,6 +65,13 @@ ) from google.cloud.bigquery_datapolicies_v1.types import datapolicy +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -332,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataPolicyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataPolicyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6545,10 +6595,14 @@ def test_create_data_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_create_data_policy" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, + "post_create_data_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_create_data_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datapolicy.CreateDataPolicyRequest.pb( datapolicy.CreateDataPolicyRequest() ) @@ -6572,6 +6626,7 @@ def test_create_data_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datapolicy.DataPolicy() + post_with_metadata.return_value = datapolicy.DataPolicy(), metadata client.create_data_policy( request, @@ -6583,6 +6638,7 @@ def test_create_data_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_policy_rest_bad_request( @@ -6757,10 +6813,14 @@ def test_update_data_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_update_data_policy" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, + "post_update_data_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_update_data_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datapolicy.UpdateDataPolicyRequest.pb( datapolicy.UpdateDataPolicyRequest() ) @@ -6784,6 +6844,7 @@ def test_update_data_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datapolicy.DataPolicy() + post_with_metadata.return_value = datapolicy.DataPolicy(), metadata client.update_data_policy( request, @@ -6795,6 +6856,7 @@ def test_update_data_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rename_data_policy_rest_bad_request( @@ -6887,10 +6949,14 @@ def test_rename_data_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_rename_data_policy" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, + "post_rename_data_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_rename_data_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datapolicy.RenameDataPolicyRequest.pb( datapolicy.RenameDataPolicyRequest() ) @@ -6914,6 +6980,7 @@ def test_rename_data_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datapolicy.DataPolicy() + post_with_metadata.return_value = datapolicy.DataPolicy(), metadata client.rename_data_policy( request, @@ -6925,6 +6992,7 @@ def test_rename_data_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_data_policy_rest_bad_request( @@ -7124,10 +7192,14 @@ def test_get_data_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_get_data_policy" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, + "post_get_data_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_get_data_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datapolicy.GetDataPolicyRequest.pb( datapolicy.GetDataPolicyRequest() ) @@ -7151,6 +7223,7 @@ def test_get_data_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datapolicy.DataPolicy() + post_with_metadata.return_value = datapolicy.DataPolicy(), metadata client.get_data_policy( request, @@ -7162,6 +7235,7 @@ def test_get_data_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_policies_rest_bad_request( @@ -7246,10 +7320,14 @@ def test_list_data_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_list_data_policies" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, + "post_list_data_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_list_data_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datapolicy.ListDataPoliciesRequest.pb( datapolicy.ListDataPoliciesRequest() ) @@ -7275,6 +7353,10 @@ def test_list_data_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datapolicy.ListDataPoliciesResponse() + post_with_metadata.return_value = ( + datapolicy.ListDataPoliciesResponse(), + metadata, + ) client.list_data_policies( request, @@ -7286,6 +7368,7 @@ def test_list_data_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -7373,10 +7456,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -7398,6 +7484,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -7409,6 +7496,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -7496,10 +7584,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -7521,6 +7612,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -7532,6 +7624,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -7617,10 +7710,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.DataPolicyServiceRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataPolicyServiceRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -7644,6 +7741,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -7655,6 +7756,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py index a9ac78315857..5c62400f2bf0 100644 --- a/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py +++ b/packages/google-cloud-bigquery-datapolicies/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -60,6 +61,13 @@ ) from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -327,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataPolicyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataPolicyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md index 0157eeea1235..04b8f153f94f 100644 --- a/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md +++ b/packages/google-cloud-bigquery-datatransfer/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.19.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.18.0...google-cloud-bigquery-datatransfer-v3.19.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) +* Add support for reading selective GAPIC generation methods from service YAML ([b1c3ce8](https://github.com/googleapis/google-cloud-python/commit/b1c3ce8b271e9d22afabcde054e81dcedae6b0ef)) + ## [3.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-datatransfer-v3.17.1...google-cloud-bigquery-datatransfer-v3.18.0) (2024-12-12) diff --git a/packages/google-cloud-bigquery-datatransfer/README.rst b/packages/google-cloud-bigquery-datatransfer/README.rst index 331dcfc575bc..c23b84ed42e4 100644 --- a/packages/google-cloud-bigquery-datatransfer/README.rst +++ b/packages/google-cloud-bigquery-datatransfer/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the BigQuery Data Transfer.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the BigQuery Data Transfer.: https://cloud.google.com/bigquery/transfer/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 4138c894ea3a..420469d05091 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.18.0" # {x-release-please-version} +__version__ = "3.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 4138c894ea3a..420469d05091 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.18.0" # {x-release-please-version} +__version__ = "3.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 21289026f52e..dcfbd3f143a5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -532,6 +534,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2580,16 +2609,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2635,16 +2668,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py index 8eab3de2412b..454b8caf5d28 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py @@ -207,12 +207,37 @@ def post_check_valid_creds( ) -> datatransfer.CheckValidCredsResponse: """Post-rpc interceptor for check_valid_creds - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_valid_creds_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_check_valid_creds` interceptor runs + before the `post_check_valid_creds_with_metadata` interceptor. """ return response + def post_check_valid_creds_with_metadata( + self, + response: datatransfer.CheckValidCredsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.CheckValidCredsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for check_valid_creds + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_check_valid_creds_with_metadata` + interceptor in new development instead of the `post_check_valid_creds` interceptor. + When both interceptors are used, this `post_check_valid_creds_with_metadata` interceptor runs after the + `post_check_valid_creds` interceptor. The (possibly modified) response returned by + `post_check_valid_creds` will be passed to + `post_check_valid_creds_with_metadata`. + """ + return response, metadata + def pre_create_transfer_config( self, request: datatransfer.CreateTransferConfigRequest, @@ -233,12 +258,35 @@ def post_create_transfer_config( ) -> transfer.TransferConfig: """Post-rpc interceptor for create_transfer_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_transfer_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_create_transfer_config` interceptor runs + before the `post_create_transfer_config_with_metadata` interceptor. """ return response + def post_create_transfer_config_with_metadata( + self, + response: transfer.TransferConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[transfer.TransferConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_transfer_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_create_transfer_config_with_metadata` + interceptor in new development instead of the `post_create_transfer_config` interceptor. + When both interceptors are used, this `post_create_transfer_config_with_metadata` interceptor runs after the + `post_create_transfer_config` interceptor. The (possibly modified) response returned by + `post_create_transfer_config` will be passed to + `post_create_transfer_config_with_metadata`. + """ + return response, metadata + def pre_delete_transfer_config( self, request: datatransfer.DeleteTransferConfigRequest, @@ -301,12 +349,35 @@ def post_get_data_source( ) -> datatransfer.DataSource: """Post-rpc interceptor for get_data_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_source` interceptor runs + before the `post_get_data_source_with_metadata` interceptor. """ return response + def post_get_data_source_with_metadata( + self, + response: datatransfer.DataSource, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datatransfer.DataSource, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_get_data_source_with_metadata` + interceptor in new development instead of the `post_get_data_source` interceptor. + When both interceptors are used, this `post_get_data_source_with_metadata` interceptor runs after the + `post_get_data_source` interceptor. The (possibly modified) response returned by + `post_get_data_source` will be passed to + `post_get_data_source_with_metadata`. + """ + return response, metadata + def pre_get_transfer_config( self, request: datatransfer.GetTransferConfigRequest, @@ -326,12 +397,35 @@ def post_get_transfer_config( ) -> transfer.TransferConfig: """Post-rpc interceptor for get_transfer_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transfer_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_get_transfer_config` interceptor runs + before the `post_get_transfer_config_with_metadata` interceptor. """ return response + def post_get_transfer_config_with_metadata( + self, + response: transfer.TransferConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[transfer.TransferConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_transfer_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_get_transfer_config_with_metadata` + interceptor in new development instead of the `post_get_transfer_config` interceptor. + When both interceptors are used, this `post_get_transfer_config_with_metadata` interceptor runs after the + `post_get_transfer_config` interceptor. The (possibly modified) response returned by + `post_get_transfer_config` will be passed to + `post_get_transfer_config_with_metadata`. + """ + return response, metadata + def pre_get_transfer_run( self, request: datatransfer.GetTransferRunRequest, @@ -351,12 +445,35 @@ def post_get_transfer_run( ) -> transfer.TransferRun: """Post-rpc interceptor for get_transfer_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transfer_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_get_transfer_run` interceptor runs + before the `post_get_transfer_run_with_metadata` interceptor. """ return response + def post_get_transfer_run_with_metadata( + self, + response: transfer.TransferRun, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[transfer.TransferRun, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_transfer_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_get_transfer_run_with_metadata` + interceptor in new development instead of the `post_get_transfer_run` interceptor. + When both interceptors are used, this `post_get_transfer_run_with_metadata` interceptor runs after the + `post_get_transfer_run` interceptor. The (possibly modified) response returned by + `post_get_transfer_run` will be passed to + `post_get_transfer_run_with_metadata`. + """ + return response, metadata + def pre_list_data_sources( self, request: datatransfer.ListDataSourcesRequest, @@ -376,12 +493,37 @@ def post_list_data_sources( ) -> datatransfer.ListDataSourcesResponse: """Post-rpc interceptor for list_data_sources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_sources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_sources` interceptor runs + before the `post_list_data_sources_with_metadata` interceptor. """ return response + def post_list_data_sources_with_metadata( + self, + response: datatransfer.ListDataSourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.ListDataSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_list_data_sources_with_metadata` + interceptor in new development instead of the `post_list_data_sources` interceptor. + When both interceptors are used, this `post_list_data_sources_with_metadata` interceptor runs after the + `post_list_data_sources` interceptor. The (possibly modified) response returned by + `post_list_data_sources` will be passed to + `post_list_data_sources_with_metadata`. + """ + return response, metadata + def pre_list_transfer_configs( self, request: datatransfer.ListTransferConfigsRequest, @@ -401,12 +543,38 @@ def post_list_transfer_configs( ) -> datatransfer.ListTransferConfigsResponse: """Post-rpc interceptor for list_transfer_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transfer_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_list_transfer_configs` interceptor runs + before the `post_list_transfer_configs_with_metadata` interceptor. """ return response + def post_list_transfer_configs_with_metadata( + self, + response: datatransfer.ListTransferConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.ListTransferConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_transfer_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_list_transfer_configs_with_metadata` + interceptor in new development instead of the `post_list_transfer_configs` interceptor. + When both interceptors are used, this `post_list_transfer_configs_with_metadata` interceptor runs after the + `post_list_transfer_configs` interceptor. The (possibly modified) response returned by + `post_list_transfer_configs` will be passed to + `post_list_transfer_configs_with_metadata`. + """ + return response, metadata + def pre_list_transfer_logs( self, request: datatransfer.ListTransferLogsRequest, @@ -426,12 +594,37 @@ def post_list_transfer_logs( ) -> datatransfer.ListTransferLogsResponse: """Post-rpc interceptor for list_transfer_logs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transfer_logs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_list_transfer_logs` interceptor runs + before the `post_list_transfer_logs_with_metadata` interceptor. """ return response + def post_list_transfer_logs_with_metadata( + self, + response: datatransfer.ListTransferLogsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.ListTransferLogsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_transfer_logs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_list_transfer_logs_with_metadata` + interceptor in new development instead of the `post_list_transfer_logs` interceptor. + When both interceptors are used, this `post_list_transfer_logs_with_metadata` interceptor runs after the + `post_list_transfer_logs` interceptor. The (possibly modified) response returned by + `post_list_transfer_logs` will be passed to + `post_list_transfer_logs_with_metadata`. + """ + return response, metadata + def pre_list_transfer_runs( self, request: datatransfer.ListTransferRunsRequest, @@ -451,12 +644,37 @@ def post_list_transfer_runs( ) -> datatransfer.ListTransferRunsResponse: """Post-rpc interceptor for list_transfer_runs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transfer_runs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_list_transfer_runs` interceptor runs + before the `post_list_transfer_runs_with_metadata` interceptor. """ return response + def post_list_transfer_runs_with_metadata( + self, + response: datatransfer.ListTransferRunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.ListTransferRunsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_transfer_runs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_list_transfer_runs_with_metadata` + interceptor in new development instead of the `post_list_transfer_runs` interceptor. + When both interceptors are used, this `post_list_transfer_runs_with_metadata` interceptor runs after the + `post_list_transfer_runs` interceptor. The (possibly modified) response returned by + `post_list_transfer_runs` will be passed to + `post_list_transfer_runs_with_metadata`. + """ + return response, metadata + def pre_schedule_transfer_runs( self, request: datatransfer.ScheduleTransferRunsRequest, @@ -477,12 +695,38 @@ def post_schedule_transfer_runs( ) -> datatransfer.ScheduleTransferRunsResponse: """Post-rpc interceptor for schedule_transfer_runs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_schedule_transfer_runs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_schedule_transfer_runs` interceptor runs + before the `post_schedule_transfer_runs_with_metadata` interceptor. """ return response + def post_schedule_transfer_runs_with_metadata( + self, + response: datatransfer.ScheduleTransferRunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.ScheduleTransferRunsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for schedule_transfer_runs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_schedule_transfer_runs_with_metadata` + interceptor in new development instead of the `post_schedule_transfer_runs` interceptor. + When both interceptors are used, this `post_schedule_transfer_runs_with_metadata` interceptor runs after the + `post_schedule_transfer_runs` interceptor. The (possibly modified) response returned by + `post_schedule_transfer_runs` will be passed to + `post_schedule_transfer_runs_with_metadata`. + """ + return response, metadata + def pre_start_manual_transfer_runs( self, request: datatransfer.StartManualTransferRunsRequest, @@ -503,12 +747,38 @@ def post_start_manual_transfer_runs( ) -> datatransfer.StartManualTransferRunsResponse: """Post-rpc interceptor for start_manual_transfer_runs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_manual_transfer_runs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_start_manual_transfer_runs` interceptor runs + before the `post_start_manual_transfer_runs_with_metadata` interceptor. """ return response + def post_start_manual_transfer_runs_with_metadata( + self, + response: datatransfer.StartManualTransferRunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datatransfer.StartManualTransferRunsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for start_manual_transfer_runs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_start_manual_transfer_runs_with_metadata` + interceptor in new development instead of the `post_start_manual_transfer_runs` interceptor. + When both interceptors are used, this `post_start_manual_transfer_runs_with_metadata` interceptor runs after the + `post_start_manual_transfer_runs` interceptor. The (possibly modified) response returned by + `post_start_manual_transfer_runs` will be passed to + `post_start_manual_transfer_runs_with_metadata`. + """ + return response, metadata + def pre_unenroll_data_sources( self, request: datatransfer.UnenrollDataSourcesRequest, @@ -543,12 +813,35 @@ def post_update_transfer_config( ) -> transfer.TransferConfig: """Post-rpc interceptor for update_transfer_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_transfer_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataTransferService server but before - it is returned to user code. + it is returned to user code. This `post_update_transfer_config` interceptor runs + before the `post_update_transfer_config_with_metadata` interceptor. """ return response + def post_update_transfer_config_with_metadata( + self, + response: transfer.TransferConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[transfer.TransferConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_transfer_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTransferService server but before it is returned to user code. + + We recommend only using this `post_update_transfer_config_with_metadata` + interceptor in new development instead of the `post_update_transfer_config` interceptor. + When both interceptors are used, this `post_update_transfer_config_with_metadata` interceptor runs after the + `post_update_transfer_config` interceptor. The (possibly modified) response returned by + `post_update_transfer_config` will be passed to + `post_update_transfer_config_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -825,6 +1118,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_valid_creds(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_check_valid_creds_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -998,6 +1295,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_transfer_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_transfer_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +1786,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1638,6 +1943,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transfer_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transfer_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1782,6 +2091,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transfer_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transfer_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1929,6 +2242,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_sources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_sources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2079,6 +2396,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transfer_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transfer_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2228,6 +2549,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transfer_logs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transfer_logs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2375,6 +2700,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transfer_runs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transfer_runs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2531,6 +2860,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_schedule_transfer_runs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_schedule_transfer_runs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2688,6 +3021,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_manual_transfer_runs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_manual_transfer_runs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2974,6 +3311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_transfer_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_transfer_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bigquery-datatransfer/noxfile.py b/packages/google-cloud-bigquery-datatransfer/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-datatransfer/noxfile.py +++ b/packages/google-cloud-bigquery-datatransfer/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index 2f9fd9267c67..b139e56d64cc 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.18.0" + "version": "3.19.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index add441b9f214..77af061ccaaf 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -68,6 +68,13 @@ ) from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataTransferServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataTransferServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -11542,10 +11592,14 @@ def test_get_data_source_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_get_data_source" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_get_data_source_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_get_data_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.GetDataSourceRequest.pb( datatransfer.GetDataSourceRequest() ) @@ -11569,6 +11623,7 @@ def test_get_data_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.DataSource() + post_with_metadata.return_value = datatransfer.DataSource(), metadata client.get_data_source( request, @@ -11580,6 +11635,7 @@ def test_get_data_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_sources_rest_bad_request( @@ -11664,10 +11720,14 @@ def test_list_data_sources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_list_data_sources" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_list_data_sources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_list_data_sources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.ListDataSourcesRequest.pb( datatransfer.ListDataSourcesRequest() ) @@ -11693,6 +11753,10 @@ def test_list_data_sources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.ListDataSourcesResponse() + post_with_metadata.return_value = ( + datatransfer.ListDataSourcesResponse(), + metadata, + ) client.list_data_sources( request, @@ -11704,6 +11768,7 @@ def test_list_data_sources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_transfer_config_rest_bad_request( @@ -11919,10 +11984,14 @@ def test_create_transfer_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_create_transfer_config" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_create_transfer_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_create_transfer_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.CreateTransferConfigRequest.pb( datatransfer.CreateTransferConfigRequest() ) @@ -11946,6 +12015,7 @@ def test_create_transfer_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transfer.TransferConfig() + post_with_metadata.return_value = transfer.TransferConfig(), metadata client.create_transfer_config( request, @@ -11957,6 +12027,7 @@ def test_create_transfer_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_transfer_config_rest_bad_request( @@ -12180,10 +12251,14 @@ def test_update_transfer_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_update_transfer_config" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_update_transfer_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_update_transfer_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.UpdateTransferConfigRequest.pb( datatransfer.UpdateTransferConfigRequest() ) @@ -12207,6 +12282,7 @@ def test_update_transfer_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transfer.TransferConfig() + post_with_metadata.return_value = transfer.TransferConfig(), metadata client.update_transfer_config( request, @@ -12218,6 +12294,7 @@ def test_update_transfer_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_transfer_config_rest_bad_request( @@ -12438,10 +12515,14 @@ def test_get_transfer_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_get_transfer_config" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_get_transfer_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_get_transfer_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.GetTransferConfigRequest.pb( datatransfer.GetTransferConfigRequest() ) @@ -12465,6 +12546,7 @@ def test_get_transfer_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transfer.TransferConfig() + post_with_metadata.return_value = transfer.TransferConfig(), metadata client.get_transfer_config( request, @@ -12476,6 +12558,7 @@ def test_get_transfer_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_transfer_configs_rest_bad_request( @@ -12560,10 +12643,14 @@ def test_list_transfer_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_list_transfer_configs" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_list_transfer_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_list_transfer_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.ListTransferConfigsRequest.pb( datatransfer.ListTransferConfigsRequest() ) @@ -12589,6 +12676,10 @@ def test_list_transfer_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.ListTransferConfigsResponse() + post_with_metadata.return_value = ( + datatransfer.ListTransferConfigsResponse(), + metadata, + ) client.list_transfer_configs( request, @@ -12600,6 +12691,7 @@ def test_list_transfer_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_schedule_transfer_runs_rest_bad_request( @@ -12685,10 +12777,14 @@ def test_schedule_transfer_runs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_schedule_transfer_runs" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_schedule_transfer_runs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_schedule_transfer_runs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.ScheduleTransferRunsRequest.pb( datatransfer.ScheduleTransferRunsRequest() ) @@ -12714,6 +12810,10 @@ def test_schedule_transfer_runs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.ScheduleTransferRunsResponse() + post_with_metadata.return_value = ( + datatransfer.ScheduleTransferRunsResponse(), + metadata, + ) client.schedule_transfer_runs( request, @@ -12725,6 +12825,7 @@ def test_schedule_transfer_runs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_manual_transfer_runs_rest_bad_request( @@ -12810,10 +12911,14 @@ def test_start_manual_transfer_runs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_start_manual_transfer_runs" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_start_manual_transfer_runs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_start_manual_transfer_runs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.StartManualTransferRunsRequest.pb( datatransfer.StartManualTransferRunsRequest() ) @@ -12839,6 +12944,10 @@ def test_start_manual_transfer_runs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.StartManualTransferRunsResponse() + post_with_metadata.return_value = ( + datatransfer.StartManualTransferRunsResponse(), + metadata, + ) client.start_manual_transfer_runs( request, @@ -12850,6 +12959,7 @@ def test_start_manual_transfer_runs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transfer_run_rest_bad_request( @@ -12949,10 +13059,14 @@ def test_get_transfer_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_get_transfer_run" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_get_transfer_run_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_get_transfer_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.GetTransferRunRequest.pb( datatransfer.GetTransferRunRequest() ) @@ -12976,6 +13090,7 @@ def test_get_transfer_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transfer.TransferRun() + post_with_metadata.return_value = transfer.TransferRun(), metadata client.get_transfer_run( request, @@ -12987,6 +13102,7 @@ def test_get_transfer_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_transfer_run_rest_bad_request( @@ -13188,10 +13304,14 @@ def test_list_transfer_runs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_list_transfer_runs" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_list_transfer_runs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_list_transfer_runs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.ListTransferRunsRequest.pb( datatransfer.ListTransferRunsRequest() ) @@ -13217,6 +13337,10 @@ def test_list_transfer_runs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.ListTransferRunsResponse() + post_with_metadata.return_value = ( + datatransfer.ListTransferRunsResponse(), + metadata, + ) client.list_transfer_runs( request, @@ -13228,6 +13352,7 @@ def test_list_transfer_runs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_transfer_logs_rest_bad_request( @@ -13316,10 +13441,14 @@ def test_list_transfer_logs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_list_transfer_logs" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_list_transfer_logs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_list_transfer_logs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.ListTransferLogsRequest.pb( datatransfer.ListTransferLogsRequest() ) @@ -13345,6 +13474,10 @@ def test_list_transfer_logs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.ListTransferLogsResponse() + post_with_metadata.return_value = ( + datatransfer.ListTransferLogsResponse(), + metadata, + ) client.list_transfer_logs( request, @@ -13356,6 +13489,7 @@ def test_list_transfer_logs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_check_valid_creds_rest_bad_request( @@ -13440,10 +13574,14 @@ def test_check_valid_creds_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataTransferServiceRestInterceptor, "post_check_valid_creds" ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, + "post_check_valid_creds_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataTransferServiceRestInterceptor, "pre_check_valid_creds" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datatransfer.CheckValidCredsRequest.pb( datatransfer.CheckValidCredsRequest() ) @@ -13469,6 +13607,10 @@ def test_check_valid_creds_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datatransfer.CheckValidCredsResponse() + post_with_metadata.return_value = ( + datatransfer.CheckValidCredsResponse(), + metadata, + ) client.check_valid_creds( request, @@ -13480,6 +13622,7 @@ def test_check_valid_creds_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enroll_data_sources_rest_bad_request( diff --git a/packages/google-cloud-bigquery-logging/CHANGELOG.md b/packages/google-cloud-bigquery-logging/CHANGELOG.md index 31b5e330af54..86f4c3b0ed6f 100644 --- a/packages/google-cloud-bigquery-logging/CHANGELOG.md +++ b/packages/google-cloud-bigquery-logging/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-logging-v1.5.0...google-cloud-bigquery-logging-v1.6.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-logging-v1.4.5...google-cloud-bigquery-logging-v1.5.0) (2024-10-24) diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py index de27578dd493..186dbb3596a5 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py index de27578dd493..186dbb3596a5 100644 --- a/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-logging/google/cloud/bigquery_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.5.0" # {x-release-please-version} +__version__ = "1.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/CHANGELOG.md b/packages/google-cloud-bigquery-migration/CHANGELOG.md index a4c09783017c..d4f2ac8571fa 100644 --- a/packages/google-cloud-bigquery-migration/CHANGELOG.md +++ b/packages/google-cloud-bigquery-migration/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.11.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-migration-v0.11.12...google-cloud-bigquery-migration-v0.11.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [0.11.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-migration-v0.11.11...google-cloud-bigquery-migration-v0.11.12) (2024-12-12) diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py index 2566b8be8361..082d8f13abe1 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.12" # {x-release-please-version} +__version__ = "0.11.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py index 2566b8be8361..082d8f13abe1 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.12" # {x-release-please-version} +__version__ = "0.11.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py index 00c0357c7680..6a20be7f6df2 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py index 2566b8be8361..082d8f13abe1 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.12" # {x-release-please-version} +__version__ = "0.11.13" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index f25c72cba44f..7c3f57c69688 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py index f474b7a3e006..339907fd733b 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json index ce2e706a2d89..ccb2a563ff8d 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.12" + "version": "0.11.13" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json index cb9fde2bd57e..827cb3774f2a 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.12" + "version": "0.11.13" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py index 52e18ce616db..5a5dda640d55 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -69,6 +70,13 @@ translation_usability, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MigrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MigrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index ec6917df7b1c..9f0f1531dfdc 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -66,6 +67,13 @@ translation_task, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -325,6 +333,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MigrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MigrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py index b4d046a3b632..c5b5cea11e35 100644 --- a/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py +++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -54,6 +55,13 @@ ) from google.cloud.bigquery_migration_v2alpha.types import translation_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +337,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SqlTranslationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SqlTranslationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-bigquery-reservation/CHANGELOG.md b/packages/google-cloud-bigquery-reservation/CHANGELOG.md index f749f2a4c241..b21e882ebe96 100644 --- a/packages/google-cloud-bigquery-reservation/CHANGELOG.md +++ b/packages/google-cloud-bigquery-reservation/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-reservation-v1.15.0...google-cloud-bigquery-reservation-v1.16.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-bigquery-reservation-v1.14.1...google-cloud-bigquery-reservation-v1.15.0) (2024-12-12) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py index a4d2d1aaf5b8..4e74d1b430e8 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -573,6 +575,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py index a62771c411d0..093e65abb526 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py @@ -259,12 +259,35 @@ def post_create_assignment( ) -> reservation.Assignment: """Post-rpc interceptor for create_assignment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_assignment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_create_assignment` interceptor runs + before the `post_create_assignment_with_metadata` interceptor. """ return response + def post_create_assignment_with_metadata( + self, + response: reservation.Assignment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.Assignment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_assignment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_create_assignment_with_metadata` + interceptor in new development instead of the `post_create_assignment` interceptor. + When both interceptors are used, this `post_create_assignment_with_metadata` interceptor runs after the + `post_create_assignment` interceptor. The (possibly modified) response returned by + `post_create_assignment` will be passed to + `post_create_assignment_with_metadata`. + """ + return response, metadata + def pre_create_capacity_commitment( self, request: reservation.CreateCapacityCommitmentRequest, @@ -285,12 +308,35 @@ def post_create_capacity_commitment( ) -> reservation.CapacityCommitment: """Post-rpc interceptor for create_capacity_commitment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_capacity_commitment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_create_capacity_commitment` interceptor runs + before the `post_create_capacity_commitment_with_metadata` interceptor. """ return response + def post_create_capacity_commitment_with_metadata( + self, + response: reservation.CapacityCommitment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_capacity_commitment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_create_capacity_commitment_with_metadata` + interceptor in new development instead of the `post_create_capacity_commitment` interceptor. + When both interceptors are used, this `post_create_capacity_commitment_with_metadata` interceptor runs after the + `post_create_capacity_commitment` interceptor. The (possibly modified) response returned by + `post_create_capacity_commitment` will be passed to + `post_create_capacity_commitment_with_metadata`. + """ + return response, metadata + def pre_create_reservation( self, request: gcbr_reservation.CreateReservationRequest, @@ -311,12 +357,35 @@ def post_create_reservation( ) -> gcbr_reservation.Reservation: """Post-rpc interceptor for create_reservation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_reservation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_create_reservation` interceptor runs + before the `post_create_reservation_with_metadata` interceptor. """ return response + def post_create_reservation_with_metadata( + self, + response: gcbr_reservation.Reservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcbr_reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_reservation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_create_reservation_with_metadata` + interceptor in new development instead of the `post_create_reservation` interceptor. + When both interceptors are used, this `post_create_reservation_with_metadata` interceptor runs after the + `post_create_reservation` interceptor. The (possibly modified) response returned by + `post_create_reservation` will be passed to + `post_create_reservation_with_metadata`. + """ + return response, metadata + def pre_delete_assignment( self, request: reservation.DeleteAssignmentRequest, @@ -379,12 +448,35 @@ def post_failover_reservation( ) -> reservation.Reservation: """Post-rpc interceptor for failover_reservation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_reservation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_failover_reservation` interceptor runs + before the `post_failover_reservation_with_metadata` interceptor. """ return response + def post_failover_reservation_with_metadata( + self, + response: reservation.Reservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_reservation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_failover_reservation_with_metadata` + interceptor in new development instead of the `post_failover_reservation` interceptor. + When both interceptors are used, this `post_failover_reservation_with_metadata` interceptor runs after the + `post_failover_reservation` interceptor. The (possibly modified) response returned by + `post_failover_reservation` will be passed to + `post_failover_reservation_with_metadata`. + """ + return response, metadata + def pre_get_bi_reservation( self, request: reservation.GetBiReservationRequest, @@ -404,12 +496,35 @@ def post_get_bi_reservation( ) -> reservation.BiReservation: """Post-rpc interceptor for get_bi_reservation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_bi_reservation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_get_bi_reservation` interceptor runs + before the `post_get_bi_reservation_with_metadata` interceptor. """ return response + def post_get_bi_reservation_with_metadata( + self, + response: reservation.BiReservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.BiReservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_bi_reservation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_get_bi_reservation_with_metadata` + interceptor in new development instead of the `post_get_bi_reservation` interceptor. + When both interceptors are used, this `post_get_bi_reservation_with_metadata` interceptor runs after the + `post_get_bi_reservation` interceptor. The (possibly modified) response returned by + `post_get_bi_reservation` will be passed to + `post_get_bi_reservation_with_metadata`. + """ + return response, metadata + def pre_get_capacity_commitment( self, request: reservation.GetCapacityCommitmentRequest, @@ -430,12 +545,35 @@ def post_get_capacity_commitment( ) -> reservation.CapacityCommitment: """Post-rpc interceptor for get_capacity_commitment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_capacity_commitment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_get_capacity_commitment` interceptor runs + before the `post_get_capacity_commitment_with_metadata` interceptor. """ return response + def post_get_capacity_commitment_with_metadata( + self, + response: reservation.CapacityCommitment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_capacity_commitment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_get_capacity_commitment_with_metadata` + interceptor in new development instead of the `post_get_capacity_commitment` interceptor. + When both interceptors are used, this `post_get_capacity_commitment_with_metadata` interceptor runs after the + `post_get_capacity_commitment` interceptor. The (possibly modified) response returned by + `post_get_capacity_commitment` will be passed to + `post_get_capacity_commitment_with_metadata`. + """ + return response, metadata + def pre_get_reservation( self, request: reservation.GetReservationRequest, @@ -455,12 +593,35 @@ def post_get_reservation( ) -> reservation.Reservation: """Post-rpc interceptor for get_reservation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_reservation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_get_reservation` interceptor runs + before the `post_get_reservation_with_metadata` interceptor. """ return response + def post_get_reservation_with_metadata( + self, + response: reservation.Reservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_reservation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_get_reservation_with_metadata` + interceptor in new development instead of the `post_get_reservation` interceptor. + When both interceptors are used, this `post_get_reservation_with_metadata` interceptor runs after the + `post_get_reservation` interceptor. The (possibly modified) response returned by + `post_get_reservation` will be passed to + `post_get_reservation_with_metadata`. + """ + return response, metadata + def pre_list_assignments( self, request: reservation.ListAssignmentsRequest, @@ -480,12 +641,37 @@ def post_list_assignments( ) -> reservation.ListAssignmentsResponse: """Post-rpc interceptor for list_assignments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_assignments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_list_assignments` interceptor runs + before the `post_list_assignments_with_metadata` interceptor. """ return response + def post_list_assignments_with_metadata( + self, + response: reservation.ListAssignmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.ListAssignmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_assignments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_list_assignments_with_metadata` + interceptor in new development instead of the `post_list_assignments` interceptor. + When both interceptors are used, this `post_list_assignments_with_metadata` interceptor runs after the + `post_list_assignments` interceptor. The (possibly modified) response returned by + `post_list_assignments` will be passed to + `post_list_assignments_with_metadata`. + """ + return response, metadata + def pre_list_capacity_commitments( self, request: reservation.ListCapacityCommitmentsRequest, @@ -506,12 +692,38 @@ def post_list_capacity_commitments( ) -> reservation.ListCapacityCommitmentsResponse: """Post-rpc interceptor for list_capacity_commitments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_capacity_commitments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_list_capacity_commitments` interceptor runs + before the `post_list_capacity_commitments_with_metadata` interceptor. """ return response + def post_list_capacity_commitments_with_metadata( + self, + response: reservation.ListCapacityCommitmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.ListCapacityCommitmentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_capacity_commitments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_list_capacity_commitments_with_metadata` + interceptor in new development instead of the `post_list_capacity_commitments` interceptor. + When both interceptors are used, this `post_list_capacity_commitments_with_metadata` interceptor runs after the + `post_list_capacity_commitments` interceptor. The (possibly modified) response returned by + `post_list_capacity_commitments` will be passed to + `post_list_capacity_commitments_with_metadata`. + """ + return response, metadata + def pre_list_reservations( self, request: reservation.ListReservationsRequest, @@ -531,12 +743,37 @@ def post_list_reservations( ) -> reservation.ListReservationsResponse: """Post-rpc interceptor for list_reservations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_reservations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_list_reservations` interceptor runs + before the `post_list_reservations_with_metadata` interceptor. """ return response + def post_list_reservations_with_metadata( + self, + response: reservation.ListReservationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.ListReservationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_reservations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_list_reservations_with_metadata` + interceptor in new development instead of the `post_list_reservations` interceptor. + When both interceptors are used, this `post_list_reservations_with_metadata` interceptor runs after the + `post_list_reservations` interceptor. The (possibly modified) response returned by + `post_list_reservations` will be passed to + `post_list_reservations_with_metadata`. + """ + return response, metadata + def pre_merge_capacity_commitments( self, request: reservation.MergeCapacityCommitmentsRequest, @@ -557,12 +794,35 @@ def post_merge_capacity_commitments( ) -> reservation.CapacityCommitment: """Post-rpc interceptor for merge_capacity_commitments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_merge_capacity_commitments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_merge_capacity_commitments` interceptor runs + before the `post_merge_capacity_commitments_with_metadata` interceptor. """ return response + def post_merge_capacity_commitments_with_metadata( + self, + response: reservation.CapacityCommitment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for merge_capacity_commitments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_merge_capacity_commitments_with_metadata` + interceptor in new development instead of the `post_merge_capacity_commitments` interceptor. + When both interceptors are used, this `post_merge_capacity_commitments_with_metadata` interceptor runs after the + `post_merge_capacity_commitments` interceptor. The (possibly modified) response returned by + `post_merge_capacity_commitments` will be passed to + `post_merge_capacity_commitments_with_metadata`. + """ + return response, metadata + def pre_move_assignment( self, request: reservation.MoveAssignmentRequest, @@ -582,12 +842,35 @@ def post_move_assignment( ) -> reservation.Assignment: """Post-rpc interceptor for move_assignment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_assignment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_move_assignment` interceptor runs + before the `post_move_assignment_with_metadata` interceptor. """ return response + def post_move_assignment_with_metadata( + self, + response: reservation.Assignment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.Assignment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_assignment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_move_assignment_with_metadata` + interceptor in new development instead of the `post_move_assignment` interceptor. + When both interceptors are used, this `post_move_assignment_with_metadata` interceptor runs after the + `post_move_assignment` interceptor. The (possibly modified) response returned by + `post_move_assignment` will be passed to + `post_move_assignment_with_metadata`. + """ + return response, metadata + def pre_search_all_assignments( self, request: reservation.SearchAllAssignmentsRequest, @@ -607,12 +890,38 @@ def post_search_all_assignments( ) -> reservation.SearchAllAssignmentsResponse: """Post-rpc interceptor for search_all_assignments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_assignments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_assignments` interceptor runs + before the `post_search_all_assignments_with_metadata` interceptor. """ return response + def post_search_all_assignments_with_metadata( + self, + response: reservation.SearchAllAssignmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.SearchAllAssignmentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_all_assignments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_search_all_assignments_with_metadata` + interceptor in new development instead of the `post_search_all_assignments` interceptor. + When both interceptors are used, this `post_search_all_assignments_with_metadata` interceptor runs after the + `post_search_all_assignments` interceptor. The (possibly modified) response returned by + `post_search_all_assignments` will be passed to + `post_search_all_assignments_with_metadata`. + """ + return response, metadata + def pre_search_assignments( self, request: reservation.SearchAssignmentsRequest, @@ -632,12 +941,37 @@ def post_search_assignments( ) -> reservation.SearchAssignmentsResponse: """Post-rpc interceptor for search_assignments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_assignments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_search_assignments` interceptor runs + before the `post_search_assignments_with_metadata` interceptor. """ return response + def post_search_assignments_with_metadata( + self, + response: reservation.SearchAssignmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.SearchAssignmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for search_assignments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_search_assignments_with_metadata` + interceptor in new development instead of the `post_search_assignments` interceptor. + When both interceptors are used, this `post_search_assignments_with_metadata` interceptor runs after the + `post_search_assignments` interceptor. The (possibly modified) response returned by + `post_search_assignments` will be passed to + `post_search_assignments_with_metadata`. + """ + return response, metadata + def pre_split_capacity_commitment( self, request: reservation.SplitCapacityCommitmentRequest, @@ -658,12 +992,38 @@ def post_split_capacity_commitment( ) -> reservation.SplitCapacityCommitmentResponse: """Post-rpc interceptor for split_capacity_commitment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_split_capacity_commitment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_split_capacity_commitment` interceptor runs + before the `post_split_capacity_commitment_with_metadata` interceptor. """ return response + def post_split_capacity_commitment_with_metadata( + self, + response: reservation.SplitCapacityCommitmentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.SplitCapacityCommitmentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for split_capacity_commitment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_split_capacity_commitment_with_metadata` + interceptor in new development instead of the `post_split_capacity_commitment` interceptor. + When both interceptors are used, this `post_split_capacity_commitment_with_metadata` interceptor runs after the + `post_split_capacity_commitment` interceptor. The (possibly modified) response returned by + `post_split_capacity_commitment` will be passed to + `post_split_capacity_commitment_with_metadata`. + """ + return response, metadata + def pre_update_assignment( self, request: reservation.UpdateAssignmentRequest, @@ -683,12 +1043,35 @@ def post_update_assignment( ) -> reservation.Assignment: """Post-rpc interceptor for update_assignment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_assignment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_update_assignment` interceptor runs + before the `post_update_assignment_with_metadata` interceptor. """ return response + def post_update_assignment_with_metadata( + self, + response: reservation.Assignment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.Assignment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_assignment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_update_assignment_with_metadata` + interceptor in new development instead of the `post_update_assignment` interceptor. + When both interceptors are used, this `post_update_assignment_with_metadata` interceptor runs after the + `post_update_assignment` interceptor. The (possibly modified) response returned by + `post_update_assignment` will be passed to + `post_update_assignment_with_metadata`. + """ + return response, metadata + def pre_update_bi_reservation( self, request: reservation.UpdateBiReservationRequest, @@ -708,12 +1091,35 @@ def post_update_bi_reservation( ) -> reservation.BiReservation: """Post-rpc interceptor for update_bi_reservation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_bi_reservation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_update_bi_reservation` interceptor runs + before the `post_update_bi_reservation_with_metadata` interceptor. """ return response + def post_update_bi_reservation_with_metadata( + self, + response: reservation.BiReservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.BiReservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_bi_reservation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_update_bi_reservation_with_metadata` + interceptor in new development instead of the `post_update_bi_reservation` interceptor. + When both interceptors are used, this `post_update_bi_reservation_with_metadata` interceptor runs after the + `post_update_bi_reservation` interceptor. The (possibly modified) response returned by + `post_update_bi_reservation` will be passed to + `post_update_bi_reservation_with_metadata`. + """ + return response, metadata + def pre_update_capacity_commitment( self, request: reservation.UpdateCapacityCommitmentRequest, @@ -734,12 +1140,35 @@ def post_update_capacity_commitment( ) -> reservation.CapacityCommitment: """Post-rpc interceptor for update_capacity_commitment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_capacity_commitment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_update_capacity_commitment` interceptor runs + before the `post_update_capacity_commitment_with_metadata` interceptor. """ return response + def post_update_capacity_commitment_with_metadata( + self, + response: reservation.CapacityCommitment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_capacity_commitment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_update_capacity_commitment_with_metadata` + interceptor in new development instead of the `post_update_capacity_commitment` interceptor. + When both interceptors are used, this `post_update_capacity_commitment_with_metadata` interceptor runs after the + `post_update_capacity_commitment` interceptor. The (possibly modified) response returned by + `post_update_capacity_commitment` will be passed to + `post_update_capacity_commitment_with_metadata`. + """ + return response, metadata + def pre_update_reservation( self, request: gcbr_reservation.UpdateReservationRequest, @@ -760,12 +1189,35 @@ def post_update_reservation( ) -> gcbr_reservation.Reservation: """Post-rpc interceptor for update_reservation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_reservation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReservationService server but before - it is returned to user code. + it is returned to user code. This `post_update_reservation` interceptor runs + before the `post_update_reservation_with_metadata` interceptor. """ return response + def post_update_reservation_with_metadata( + self, + response: gcbr_reservation.Reservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcbr_reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_reservation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_update_reservation_with_metadata` + interceptor in new development instead of the `post_update_reservation` interceptor. + When both interceptors are used, this `post_update_reservation_with_metadata` interceptor runs after the + `post_update_reservation` interceptor. The (possibly modified) response returned by + `post_update_reservation` will be passed to + `post_update_reservation_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ReservationServiceRestStub: @@ -1000,6 +1452,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_assignment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_assignment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1167,6 +1623,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_capacity_commitment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_capacity_commitment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1319,6 +1779,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_reservation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_reservation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1808,6 +2272,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_failover_reservation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_failover_reservation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1952,6 +2420,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_bi_reservation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_bi_reservation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2112,6 +2584,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_capacity_commitment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_capacity_commitment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2256,6 +2732,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_reservation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_reservation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2402,6 +2882,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_assignments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assignments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2552,6 +3036,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_capacity_commitments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_capacity_commitments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2700,6 +3188,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_reservations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_reservations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2869,6 +3361,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_merge_capacity_commitments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_merge_capacity_commitments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3027,6 +3523,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_assignment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_assignment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3177,6 +3677,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_assignments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_assignments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3327,6 +3831,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_assignments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_assignments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3483,6 +3991,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_split_capacity_commitment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_split_capacity_commitment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3638,6 +4150,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_assignment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_assignment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3789,6 +4305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_bi_reservation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_bi_reservation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3956,6 +4476,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_capacity_commitment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_capacity_commitment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4108,6 +4632,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_reservation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_reservation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json index ed01a737c315..6909922aad10 100644 --- a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-reservation", - "version": "1.15.0" + "version": "1.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index 970a335958c8..ef279d853b76 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -65,6 +65,13 @@ from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation from google.cloud.bigquery_reservation_v1.types import reservation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ReservationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ReservationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -15349,10 +15399,14 @@ def test_create_reservation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_create_reservation" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_create_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_create_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcbr_reservation.CreateReservationRequest.pb( gcbr_reservation.CreateReservationRequest() ) @@ -15378,6 +15432,7 @@ def test_create_reservation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcbr_reservation.Reservation() + post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata client.create_reservation( request, @@ -15389,6 +15444,7 @@ def test_create_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_reservations_rest_bad_request( @@ -15473,10 +15529,14 @@ def test_list_reservations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_list_reservations" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_list_reservations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_list_reservations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.ListReservationsRequest.pb( reservation.ListReservationsRequest() ) @@ -15502,6 +15562,10 @@ def test_list_reservations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.ListReservationsResponse() + post_with_metadata.return_value = ( + reservation.ListReservationsResponse(), + metadata, + ) client.list_reservations( request, @@ -15513,6 +15577,7 @@ def test_list_reservations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_reservation_rest_bad_request( @@ -15613,10 +15678,14 @@ def test_get_reservation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_get_reservation" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_get_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_get_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.GetReservationRequest.pb( reservation.GetReservationRequest() ) @@ -15640,6 +15709,7 @@ def test_get_reservation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.Reservation() + post_with_metadata.return_value = reservation.Reservation(), metadata client.get_reservation( request, @@ -15651,6 +15721,7 @@ def test_get_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_reservation_rest_bad_request( @@ -15949,10 +16020,14 @@ def test_update_reservation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_update_reservation" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_update_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_update_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcbr_reservation.UpdateReservationRequest.pb( gcbr_reservation.UpdateReservationRequest() ) @@ -15978,6 +16053,7 @@ def test_update_reservation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcbr_reservation.Reservation() + post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata client.update_reservation( request, @@ -15989,6 +16065,7 @@ def test_update_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_failover_reservation_rest_bad_request( @@ -16089,10 +16166,14 @@ def test_failover_reservation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_failover_reservation" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_failover_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_failover_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.FailoverReservationRequest.pb( reservation.FailoverReservationRequest() ) @@ -16116,6 +16197,7 @@ def test_failover_reservation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.Reservation() + post_with_metadata.return_value = reservation.Reservation(), metadata client.failover_reservation( request, @@ -16127,6 +16209,7 @@ def test_failover_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_capacity_commitment_rest_bad_request( @@ -16316,10 +16399,14 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_create_capacity_commitment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_create_capacity_commitment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_create_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.CreateCapacityCommitmentRequest.pb( reservation.CreateCapacityCommitmentRequest() ) @@ -16345,6 +16432,7 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata client.create_capacity_commitment( request, @@ -16356,6 +16444,7 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_capacity_commitments_rest_bad_request( @@ -16440,10 +16529,14 @@ def test_list_capacity_commitments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_list_capacity_commitments" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_list_capacity_commitments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_list_capacity_commitments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.ListCapacityCommitmentsRequest.pb( reservation.ListCapacityCommitmentsRequest() ) @@ -16469,6 +16562,10 @@ def test_list_capacity_commitments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.ListCapacityCommitmentsResponse() + post_with_metadata.return_value = ( + reservation.ListCapacityCommitmentsResponse(), + metadata, + ) client.list_capacity_commitments( request, @@ -16480,6 +16577,7 @@ def test_list_capacity_commitments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_capacity_commitment_rest_bad_request( @@ -16582,10 +16680,14 @@ def test_get_capacity_commitment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_get_capacity_commitment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_get_capacity_commitment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_get_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.GetCapacityCommitmentRequest.pb( reservation.GetCapacityCommitmentRequest() ) @@ -16611,6 +16713,7 @@ def test_get_capacity_commitment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata client.get_capacity_commitment( request, @@ -16622,6 +16725,7 @@ def test_get_capacity_commitment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_capacity_commitment_rest_bad_request( @@ -16932,10 +17036,14 @@ def test_update_capacity_commitment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_update_capacity_commitment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_update_capacity_commitment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_update_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.UpdateCapacityCommitmentRequest.pb( reservation.UpdateCapacityCommitmentRequest() ) @@ -16961,6 +17069,7 @@ def test_update_capacity_commitment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata client.update_capacity_commitment( request, @@ -16972,6 +17081,7 @@ def test_update_capacity_commitment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_split_capacity_commitment_rest_bad_request( @@ -17057,10 +17167,14 @@ def test_split_capacity_commitment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_split_capacity_commitment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_split_capacity_commitment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_split_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.SplitCapacityCommitmentRequest.pb( reservation.SplitCapacityCommitmentRequest() ) @@ -17086,6 +17200,10 @@ def test_split_capacity_commitment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.SplitCapacityCommitmentResponse() + post_with_metadata.return_value = ( + reservation.SplitCapacityCommitmentResponse(), + metadata, + ) client.split_capacity_commitment( request, @@ -17097,6 +17215,7 @@ def test_split_capacity_commitment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_merge_capacity_commitments_rest_bad_request( @@ -17195,10 +17314,14 @@ def test_merge_capacity_commitments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_merge_capacity_commitments" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_merge_capacity_commitments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_merge_capacity_commitments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.MergeCapacityCommitmentsRequest.pb( reservation.MergeCapacityCommitmentsRequest() ) @@ -17224,6 +17347,7 @@ def test_merge_capacity_commitments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata client.merge_capacity_commitments( request, @@ -17235,6 +17359,7 @@ def test_merge_capacity_commitments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_assignment_rest_bad_request( @@ -17398,10 +17523,14 @@ def test_create_assignment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_create_assignment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_create_assignment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_create_assignment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.CreateAssignmentRequest.pb( reservation.CreateAssignmentRequest() ) @@ -17425,6 +17554,7 @@ def test_create_assignment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.Assignment() + post_with_metadata.return_value = reservation.Assignment(), metadata client.create_assignment( request, @@ -17436,6 +17566,7 @@ def test_create_assignment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_assignments_rest_bad_request( @@ -17520,10 +17651,14 @@ def test_list_assignments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_list_assignments" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_list_assignments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_list_assignments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.ListAssignmentsRequest.pb( reservation.ListAssignmentsRequest() ) @@ -17549,6 +17684,10 @@ def test_list_assignments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.ListAssignmentsResponse() + post_with_metadata.return_value = ( + reservation.ListAssignmentsResponse(), + metadata, + ) client.list_assignments( request, @@ -17560,6 +17699,7 @@ def test_list_assignments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_assignment_rest_bad_request( @@ -17757,10 +17897,14 @@ def test_search_assignments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_search_assignments" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_search_assignments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_search_assignments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.SearchAssignmentsRequest.pb( reservation.SearchAssignmentsRequest() ) @@ -17786,6 +17930,10 @@ def test_search_assignments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.SearchAssignmentsResponse() + post_with_metadata.return_value = ( + reservation.SearchAssignmentsResponse(), + metadata, + ) client.search_assignments( request, @@ -17797,6 +17945,7 @@ def test_search_assignments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_all_assignments_rest_bad_request( @@ -17881,10 +18030,14 @@ def test_search_all_assignments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_search_all_assignments" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_search_all_assignments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_search_all_assignments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.SearchAllAssignmentsRequest.pb( reservation.SearchAllAssignmentsRequest() ) @@ -17910,6 +18063,10 @@ def test_search_all_assignments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.SearchAllAssignmentsResponse() + post_with_metadata.return_value = ( + reservation.SearchAllAssignmentsResponse(), + metadata, + ) client.search_all_assignments( request, @@ -17921,6 +18078,7 @@ def test_search_all_assignments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_assignment_rest_bad_request( @@ -18015,10 +18173,14 @@ def test_move_assignment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_move_assignment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_move_assignment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_move_assignment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.MoveAssignmentRequest.pb( reservation.MoveAssignmentRequest() ) @@ -18042,6 +18204,7 @@ def test_move_assignment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.Assignment() + post_with_metadata.return_value = reservation.Assignment(), metadata client.move_assignment( request, @@ -18053,6 +18216,7 @@ def test_move_assignment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_assignment_rest_bad_request( @@ -18224,10 +18388,14 @@ def test_update_assignment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_update_assignment" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_update_assignment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_update_assignment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.UpdateAssignmentRequest.pb( reservation.UpdateAssignmentRequest() ) @@ -18251,6 +18419,7 @@ def test_update_assignment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.Assignment() + post_with_metadata.return_value = reservation.Assignment(), metadata client.update_assignment( request, @@ -18262,6 +18431,7 @@ def test_update_assignment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_bi_reservation_rest_bad_request( @@ -18348,10 +18518,14 @@ def test_get_bi_reservation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_get_bi_reservation" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_get_bi_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_get_bi_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.GetBiReservationRequest.pb( reservation.GetBiReservationRequest() ) @@ -18375,6 +18549,7 @@ def test_get_bi_reservation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.BiReservation() + post_with_metadata.return_value = reservation.BiReservation(), metadata client.get_bi_reservation( request, @@ -18386,6 +18561,7 @@ def test_get_bi_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_bi_reservation_rest_bad_request( @@ -18555,10 +18731,14 @@ def test_update_bi_reservation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationServiceRestInterceptor, "post_update_bi_reservation" ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_update_bi_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationServiceRestInterceptor, "pre_update_bi_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = reservation.UpdateBiReservationRequest.pb( reservation.UpdateBiReservationRequest() ) @@ -18582,6 +18762,7 @@ def test_update_bi_reservation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = reservation.BiReservation() + post_with_metadata.return_value = reservation.BiReservation(), metadata client.update_bi_reservation( request, @@ -18593,6 +18774,7 @@ def test_update_bi_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-billing-budgets/CHANGELOG.md b/packages/google-cloud-billing-budgets/CHANGELOG.md index f78b3d9bc5d5..92d1a6cd6373 100644 --- a/packages/google-cloud-billing-budgets/CHANGELOG.md +++ b/packages/google-cloud-billing-budgets/CHANGELOG.md @@ -1,4 +1,12 @@ # Changelog +## [1.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.16.0...google-cloud-billing-budgets-v1.17.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-budgets-v1.15.1...google-cloud-billing-budgets-v1.16.0) (2024-12-12) diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/proto/budget_model.proto b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/proto/budget_model.proto deleted file mode 100644 index eefe3db44582..000000000000 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/proto/budget_model.proto +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.billing.budgets.v1; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/struct.proto"; -import "google/type/money.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/billing/budgets/v1;budgets"; -option java_multiple_files = true; -option java_outer_classname = "BudgetModelProto"; -option java_package = "com.google.cloud.billing.budgets.v1"; - -// A budget is a plan that describes what you expect to spend on Cloud -// projects, plus the rules to execute as spend is tracked against that plan, -// (for example, send an alert when 90% of the target spend is met). -// Currently all plans are monthly budgets so the usage period(s) tracked are -// implied (calendar months of usage back-to-back). -message Budget { - option (google.api.resource) = { - type: "billingbudgets.googleapis.com/Budget" - pattern: "billingAccounts/{billing_account}/budgets/{budget}" - }; - - // Output only. Resource name of the budget. - // The resource name implies the scope of a budget. Values are of the form - // `billingAccounts/{billingAccountId}/budgets/{budgetId}`. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // User data for display name in UI. The name must be less than or equal to 60 - // characters. - string display_name = 2; - - // Optional. Filters that define which resources are used to compute - // the actual spend against the budget. - Filter budget_filter = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Budgeted amount. - BudgetAmount amount = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Rules that trigger alerts (notifications of thresholds - // being crossed) when spend exceeds the specified percentages of the budget. - repeated ThresholdRule threshold_rules = 5 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Rules to apply to notifications sent based on budget spend and - // thresholds. - NotificationsRule notifications_rule = 6 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Etag to validate that the object is unchanged for a - // read-modify-write operation. - // An empty etag will cause an update to overwrite other changes. - string etag = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// The budgeted amount for each usage period. -message BudgetAmount { - // Specification for what amount to use as the budget. - oneof budget_amount { - // A specified amount to use as the budget. - // `currency_code` is optional. If specified when creating a budget, it must - // match the currency of the billing account. If specified when updating a - // budget, it must match the currency_code of the existing budget. - // The `currency_code` is provided on output. - google.type.Money specified_amount = 1; - - // Use the last period's actual spend as the budget for the present period. - LastPeriodAmount last_period_amount = 2; - } -} - -// Describes a budget amount targeted to last period's spend. -// At this time, the amount is automatically 100% of last period's spend; -// that is, there are no other options yet. -// Future configuration will be described here (for example, configuring a -// percentage of last period's spend). -message LastPeriodAmount {} - -// ThresholdRule contains a definition of a threshold which triggers -// an alert (a notification of a threshold being crossed) to be sent when -// spend goes above the specified amount. -// Alerts are automatically e-mailed to users with the Billing Account -// Administrator role or the Billing Account User role. -// The thresholds here have no effect on notifications sent to anything -// configured under `Budget.all_updates_rule`. -message ThresholdRule { - // The type of basis used to determine if spend has passed the threshold. - enum Basis { - // Unspecified threshold basis. - BASIS_UNSPECIFIED = 0; - - // Use current spend as the basis for comparison against the threshold. - CURRENT_SPEND = 1; - - // Use forecasted spend for the period as the basis for comparison against - // the threshold. - FORECASTED_SPEND = 2; - } - - // Required. Send an alert when this threshold is exceeded. - // This is a 1.0-based percentage, so 0.5 = 50%. - // Validation: non-negative number. - double threshold_percent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The type of basis used to determine if spend has passed the - // threshold. Behavior defaults to CURRENT_SPEND if not set. - Basis spend_basis = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// NotificationsRule defines notifications that are sent based on budget spend -// and thresholds. -message NotificationsRule { - // Optional. The name of the Pub/Sub topic where budget related messages will - // be published, in the form `projects/{project_id}/topics/{topic_id}`. - // Updates are sent at regular intervals to the topic. The topic needs to be - // created before the budget is created; see - // https://cloud.google.com/billing/docs/how-to/budgets#manage-notifications - // for more details. - // Caller is expected to have - // `pubsub.topics.setIamPolicy` permission on the topic when it's set for a - // budget, otherwise, the API call will fail with PERMISSION_DENIED. See - // https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications - // for more details on Pub/Sub roles and permissions. - string pubsub_topic = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Required when - // [NotificationsRule.pubsub_topic][google.cloud.billing.budgets.v1.NotificationsRule.pubsub_topic] - // is set. The schema version of the notification sent to - // [NotificationsRule.pubsub_topic][google.cloud.billing.budgets.v1.NotificationsRule.pubsub_topic]. - // Only "1.0" is accepted. It represents the JSON schema as defined in - // https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications#notification_format. - string schema_version = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Targets to send notifications to when a threshold is exceeded. - // This is in addition to default recipients who have billing account IAM - // roles. The value is the full REST resource name of a monitoring - // notification channel with the form - // `projects/{project_id}/notificationChannels/{channel_id}`. A maximum of 5 - // channels are allowed. See - // https://cloud.google.com/billing/docs/how-to/budgets-notification-recipients - // for more details. - repeated string monitoring_notification_channels = 3 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. When set to true, disables default notifications sent when a - // threshold is exceeded. Default notifications are sent to those with Billing - // Account Administrator and Billing Account User IAM roles for the target - // account. - bool disable_default_iam_recipients = 4 - [(google.api.field_behavior) = OPTIONAL]; -} - -// A filter for a budget, limiting the scope of the cost to calculate. -message Filter { - // Specifies how credits should be treated when determining spend for - // threshold calculations. - enum CreditTypesTreatment { - // This is an invalid value. - CREDIT_TYPES_TREATMENT_UNSPECIFIED = 0; - - // All types of credit are subtracted from the gross cost to determine the - // spend for threshold calculations. - INCLUDE_ALL_CREDITS = 1; - - // All types of credit are added to the net cost to determine the spend for - // threshold calculations. - EXCLUDE_ALL_CREDITS = 2; - - // Credit types specified in the credit_types field are subtracted from the - // gross cost to determine the spend for threshold calculations. - INCLUDE_SPECIFIED_CREDITS = 3; - } - - // Optional. A set of projects of the form `projects/{project}`, - // specifying that usage from only this set of projects should be - // included in the budget. If omitted, the report will include all usage for - // the billing account, regardless of which project the usage occurred on. - // Only zero or one project can be specified currently. - repeated string projects = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If - // [Filter.credit_types_treatment][google.cloud.billing.budgets.v1.Filter.credit_types_treatment] - // is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types to be - // subtracted from gross cost to determine the spend for threshold - // calculations. - // - // If - // [Filter.credit_types_treatment][google.cloud.billing.budgets.v1.Filter.credit_types_treatment] - // is **not** INCLUDE_SPECIFIED_CREDITS, this field must be empty. See [a list - // of acceptable credit type - // values](https://cloud.google.com/billing/docs/how-to/export-data-bigquery-tables#credits-type). - repeated string credit_types = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If not set, default behavior is `INCLUDE_ALL_CREDITS`. - CreditTypesTreatment credit_types_treatment = 4 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A set of services of the form `services/{service_id}`, - // specifying that usage from only this set of services should be - // included in the budget. If omitted, the report will include usage for - // all the services. - // The service names are available through the Catalog API: - // https://cloud.google.com/billing/v1/how-tos/catalog-api. - repeated string services = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A set of subaccounts of the form `billingAccounts/{account_id}`, - // specifying that usage from only this set of subaccounts should be included - // in the budget. If a subaccount is set to the name of the parent account, - // usage from the parent account will be included. If the field is omitted, - // the report will include usage from the parent account and all subaccounts, - // if they exist. - repeated string subaccounts = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A single label and value pair specifying that usage from only - // this set of labeled resources should be included in the budget. Currently, - // multiple entries or multiple values per entry are not allowed. If omitted, - // the report will include all labeled and unlabeled usage. - map labels = 6 - [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/proto/budget_service.proto b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/proto/budget_service.proto deleted file mode 100644 index a4b512b8a6fa..000000000000 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/proto/budget_service.proto +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.billing.budgets.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/billing/budgets/v1/budget_model.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/billing/budgets/v1;budgets"; -option java_multiple_files = true; -option java_outer_classname = "BudgetServiceProto"; -option java_package = "com.google.cloud.billing.budgets.v1"; - -// BudgetService stores Cloud Billing budgets, which define a -// budget plan and rules to execute as we track spend against that plan. -service BudgetService { - option (google.api.default_host) = "billingbudgets.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-billing," - "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a new budget. See - // [Quotas and limits](https://cloud.google.com/billing/quotas) - // for more information on the limits of the number of budgets you can create. - rpc CreateBudget(CreateBudgetRequest) returns (Budget) { - option (google.api.http) = { - post: "/v1/{parent=billingAccounts/*}/budgets" - body: "budget" - }; - option (google.api.method_signature) = "parent,budget"; - } - - // Updates a budget and returns the updated budget. - // - // WARNING: There are some fields exposed on the Google Cloud Console that - // aren't available on this API. Budget fields that are not exposed in - // this API will not be changed by this method. - rpc UpdateBudget(UpdateBudgetRequest) returns (Budget) { - option (google.api.http) = { - patch: "/v1/{budget.name=billingAccounts/*/budgets/*}" - body: "budget" - }; - option (google.api.method_signature) = "budget,update_mask"; - } - - // Returns a budget. - // - // WARNING: There are some fields exposed on the Google Cloud Console that - // aren't available on this API. When reading from the API, you will not - // see these fields in the return value, though they may have been set - // in the Cloud Console. - rpc GetBudget(GetBudgetRequest) returns (Budget) { - option (google.api.http) = { - get: "/v1/{name=billingAccounts/*/budgets/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Returns a list of budgets for a billing account. - // - // WARNING: There are some fields exposed on the Google Cloud Console that - // aren't available on this API. When reading from the API, you will not - // see these fields in the return value, though they may have been set - // in the Cloud Console. - rpc ListBudgets(ListBudgetsRequest) returns (ListBudgetsResponse) { - option (google.api.http) = { - get: "/v1/{parent=billingAccounts/*}/budgets" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a budget. Returns successfully if already deleted. - rpc DeleteBudget(DeleteBudgetRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=billingAccounts/*/budgets/*}" - }; - option (google.api.method_signature) = "name"; - } -} - -// Request for CreateBudget -message CreateBudgetRequest { - // Required. The name of the billing account to create the budget in. Values - // are of the form `billingAccounts/{billingAccountId}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "billingbudgets.googleapis.com/Budget" - } - ]; - - // Required. Budget to create. - Budget budget = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Request for UpdateBudget -message UpdateBudgetRequest { - // Required. The updated budget object. - // The budget to update is specified by the budget name in the budget. - Budget budget = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Indicates which fields in the provided budget to update. - // Read-only fields (such as `name`) cannot be changed. If this is not - // provided, then only fields with non-default values from the request are - // updated. See - // https://developers.google.com/protocol-buffers/docs/proto3#default for more - // details about default values. - google.protobuf.FieldMask update_mask = 2 - [(google.api.field_behavior) = OPTIONAL]; -} - -// Request for GetBudget -message GetBudgetRequest { - // Required. Name of budget to get. Values are of the form - // `billingAccounts/{billingAccountId}/budgets/{budgetId}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "billingbudgets.googleapis.com/Budget" - } - ]; -} - -// Request for ListBudgets -message ListBudgetsRequest { - // Required. Name of billing account to list budgets under. Values - // are of the form `billingAccounts/{billingAccountId}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "billingbudgets.googleapis.com/Budget" - } - ]; - - // Optional. The maximum number of budgets to return per page. - // The default and maximum value are 100. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The value returned by the last `ListBudgetsResponse` which - // indicates that this is a continuation of a prior `ListBudgets` call, - // and that the system should return the next page of data. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response for ListBudgets -message ListBudgetsResponse { - // List of the budgets owned by the requested billing account. - repeated Budget budgets = 1; - - // If not empty, indicates that there may be more budgets that match the - // request; this value should be passed in a new `ListBudgetsRequest`. - string next_page_token = 2; -} - -// Request for DeleteBudget -message DeleteBudgetRequest { - // Required. Name of the budget to delete. Values are of the form - // `billingAccounts/{billingAccountId}/budgets/{budgetId}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "billingbudgets.googleapis.com/Budget" - } - ]; -} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py index 31004c0485da..e32eb475ffeb 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -484,6 +486,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py index 890821cd16a1..27b3f775b32d 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py @@ -128,12 +128,35 @@ def pre_create_budget( def post_create_budget(self, response: budget_model.Budget) -> budget_model.Budget: """Post-rpc interceptor for create_budget - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_budget_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BudgetService server but before - it is returned to user code. + it is returned to user code. This `post_create_budget` interceptor runs + before the `post_create_budget_with_metadata` interceptor. """ return response + def post_create_budget_with_metadata( + self, + response: budget_model.Budget, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[budget_model.Budget, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_budget + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BudgetService server but before it is returned to user code. + + We recommend only using this `post_create_budget_with_metadata` + interceptor in new development instead of the `post_create_budget` interceptor. + When both interceptors are used, this `post_create_budget_with_metadata` interceptor runs after the + `post_create_budget` interceptor. The (possibly modified) response returned by + `post_create_budget` will be passed to + `post_create_budget_with_metadata`. + """ + return response, metadata + def pre_delete_budget( self, request: budget_service.DeleteBudgetRequest, @@ -165,12 +188,35 @@ def pre_get_budget( def post_get_budget(self, response: budget_model.Budget) -> budget_model.Budget: """Post-rpc interceptor for get_budget - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_budget_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BudgetService server but before - it is returned to user code. + it is returned to user code. This `post_get_budget` interceptor runs + before the `post_get_budget_with_metadata` interceptor. """ return response + def post_get_budget_with_metadata( + self, + response: budget_model.Budget, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[budget_model.Budget, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_budget + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BudgetService server but before it is returned to user code. + + We recommend only using this `post_get_budget_with_metadata` + interceptor in new development instead of the `post_get_budget` interceptor. + When both interceptors are used, this `post_get_budget_with_metadata` interceptor runs after the + `post_get_budget` interceptor. The (possibly modified) response returned by + `post_get_budget` will be passed to + `post_get_budget_with_metadata`. + """ + return response, metadata + def pre_list_budgets( self, request: budget_service.ListBudgetsRequest, @@ -190,12 +236,37 @@ def post_list_budgets( ) -> budget_service.ListBudgetsResponse: """Post-rpc interceptor for list_budgets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_budgets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BudgetService server but before - it is returned to user code. + it is returned to user code. This `post_list_budgets` interceptor runs + before the `post_list_budgets_with_metadata` interceptor. """ return response + def post_list_budgets_with_metadata( + self, + response: budget_service.ListBudgetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + budget_service.ListBudgetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_budgets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BudgetService server but before it is returned to user code. + + We recommend only using this `post_list_budgets_with_metadata` + interceptor in new development instead of the `post_list_budgets` interceptor. + When both interceptors are used, this `post_list_budgets_with_metadata` interceptor runs after the + `post_list_budgets` interceptor. The (possibly modified) response returned by + `post_list_budgets` will be passed to + `post_list_budgets_with_metadata`. + """ + return response, metadata + def pre_update_budget( self, request: budget_service.UpdateBudgetRequest, @@ -213,12 +284,35 @@ def pre_update_budget( def post_update_budget(self, response: budget_model.Budget) -> budget_model.Budget: """Post-rpc interceptor for update_budget - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_budget_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BudgetService server but before - it is returned to user code. + it is returned to user code. This `post_update_budget` interceptor runs + before the `post_update_budget_with_metadata` interceptor. """ return response + def post_update_budget_with_metadata( + self, + response: budget_model.Budget, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[budget_model.Budget, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_budget + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BudgetService server but before it is returned to user code. + + We recommend only using this `post_update_budget_with_metadata` + interceptor in new development instead of the `post_update_budget` interceptor. + When both interceptors are used, this `post_update_budget_with_metadata` interceptor runs after the + `post_update_budget` interceptor. The (possibly modified) response returned by + `post_update_budget` will be passed to + `post_update_budget_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class BudgetServiceRestStub: @@ -440,6 +534,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_budget(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_budget_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -699,6 +797,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_budget(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_budget_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -841,6 +943,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_budgets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_budgets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -998,6 +1104,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_budget(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_budget_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/proto/budget_model.proto b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/proto/budget_model.proto deleted file mode 100644 index 636f32903458..000000000000 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/proto/budget_model.proto +++ /dev/null @@ -1,233 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.billing.budgets.v1beta1; - -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/struct.proto"; -import "google/type/money.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/billing/budgets/v1beta1;budgets"; -option java_multiple_files = true; -option java_package = "com.google.cloud.billing.budgets.v1beta1"; - -// A budget is a plan that describes what you expect to spend on Cloud -// projects, plus the rules to execute as spend is tracked against that plan, -// (for example, send an alert when 90% of the target spend is met). -// Currently all plans are monthly budgets so the usage period(s) tracked are -// implied (calendar months of usage back-to-back). -message Budget { - option (google.api.resource) = { - type: "billingbudgets.googleapis.com/Budget" - pattern: "billingAccounts/{billing_account}/budgets/{budget}" - }; - - // Output only. Resource name of the budget. - // The resource name implies the scope of a budget. Values are of the form - // `billingAccounts/{billingAccountId}/budgets/{budgetId}`. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // User data for display name in UI. - // Validation: <= 60 chars. - string display_name = 2; - - // Optional. Filters that define which resources are used to compute - // the actual spend against the budget. - Filter budget_filter = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Budgeted amount. - BudgetAmount amount = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Rules that trigger alerts (notifications of thresholds - // being crossed) when spend exceeds the specified percentages of the budget. - repeated ThresholdRule threshold_rules = 5 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Rules to apply to notifications sent based on budget spend and - // thresholds. - AllUpdatesRule all_updates_rule = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Etag to validate that the object is unchanged for a - // read-modify-write operation. - // An empty etag will cause an update to overwrite other changes. - string etag = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// The budgeted amount for each usage period. -message BudgetAmount { - // Specification for what amount to use as the budget. - oneof budget_amount { - // A specified amount to use as the budget. - // `currency_code` is optional. If specified when creating a budget, it must - // match the currency of the billing account. If specified when updating a - // budget, it must match the existing budget currency_code. - // The `currency_code` is provided on output. - google.type.Money specified_amount = 1; - - // Use the last period's actual spend as the budget for the present period. - LastPeriodAmount last_period_amount = 2; - } -} - -// Describes a budget amount targeted to last period's spend. -// At this time, the amount is automatically 100% of last period's spend; -// that is, there are no other options yet. -// Future configuration will be described here (for example, configuring a -// percentage of last period's spend). -message LastPeriodAmount {} - -// ThresholdRule contains a definition of a threshold which triggers -// an alert (a notification of a threshold being crossed) to be sent when -// spend goes above the specified amount. -// Alerts are automatically e-mailed to users with the Billing Account -// Administrator role or the Billing Account User role. -// The thresholds here have no effect on notifications sent to anything -// configured under `Budget.all_updates_rule`. -message ThresholdRule { - // The type of basis used to determine if spend has passed the threshold. - enum Basis { - // Unspecified threshold basis. - BASIS_UNSPECIFIED = 0; - - // Use current spend as the basis for comparison against the threshold. - CURRENT_SPEND = 1; - - // Use forecasted spend for the period as the basis for comparison against - // the threshold. - FORECASTED_SPEND = 2; - } - - // Required. Send an alert when this threshold is exceeded. - // This is a 1.0-based percentage, so 0.5 = 50%. - // Validation: non-negative number. - double threshold_percent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The type of basis used to determine if spend has passed the - // threshold. Behavior defaults to CURRENT_SPEND if not set. - Basis spend_basis = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// AllUpdatesRule defines notifications that are sent based on budget spend -// and thresholds. -message AllUpdatesRule { - // Optional. The name of the Pub/Sub topic where budget related messages will - // be published, in the form `projects/{project_id}/topics/{topic_id}`. - // Updates are sent at regular intervals to the topic. The topic needs to be - // created before the budget is created; see - // https://cloud.google.com/billing/docs/how-to/budgets#manage-notifications - // for more details. - // Caller is expected to have - // `pubsub.topics.setIamPolicy` permission on the topic when it's set for a - // budget, otherwise, the API call will fail with PERMISSION_DENIED. See - // https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications - // for more details on Pub/Sub roles and permissions. - string pubsub_topic = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Required when - // [AllUpdatesRule.pubsub_topic][google.cloud.billing.budgets.v1beta1.AllUpdatesRule.pubsub_topic] - // is set. The schema version of the notification sent to - // [AllUpdatesRule.pubsub_topic][google.cloud.billing.budgets.v1beta1.AllUpdatesRule.pubsub_topic]. - // Only "1.0" is accepted. It represents the JSON schema as defined in - // https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications#notification_format. - string schema_version = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Targets to send notifications to when a threshold is exceeded. - // This is in addition to default recipients who have billing account IAM - // roles. The value is the full REST resource name of a monitoring - // notification channel with the form - // `projects/{project_id}/notificationChannels/{channel_id}`. A maximum of 5 - // channels are allowed. See - // https://cloud.google.com/billing/docs/how-to/budgets-notification-recipients - // for more details. - repeated string monitoring_notification_channels = 3 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. When set to true, disables default notifications sent when a - // threshold is exceeded. Default notifications are sent to those with Billing - // Account Administrator and Billing Account User IAM roles for the target - // account. - bool disable_default_iam_recipients = 4 - [(google.api.field_behavior) = OPTIONAL]; -} - -// A filter for a budget, limiting the scope of the cost to calculate. -message Filter { - // Specifies how credits should be treated when determining spend for - // threshold calculations. - enum CreditTypesTreatment { - CREDIT_TYPES_TREATMENT_UNSPECIFIED = 0; - - // All types of credit are subtracted from the gross cost to determine the - // spend for threshold calculations. - INCLUDE_ALL_CREDITS = 1; - - // All types of credit are added to the net cost to determine the spend for - // threshold calculations. - EXCLUDE_ALL_CREDITS = 2; - - // Credit types specified in the credit_types field are subtracted from the - // gross cost to determine the spend for threshold calculations. - INCLUDE_SPECIFIED_CREDITS = 3; - } - - // Optional. A set of projects of the form `projects/{project}`, - // specifying that usage from only this set of projects should be - // included in the budget. If omitted, the report will include all usage for - // the billing account, regardless of which project the usage occurred on. - // Only zero or one project can be specified currently. - repeated string projects = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If - // [Filter.credit_types_treatment][google.cloud.billing.budgets.v1beta1.Filter.credit_types_treatment] - // is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types to be - // subtracted from gross cost to determine the spend for threshold - // calculations. - // - // If - // [Filter.credit_types_treatment][google.cloud.billing.budgets.v1beta1.Filter.credit_types_treatment] - // is **not** INCLUDE_SPECIFIED_CREDITS, this field must be empty. See [a list - // of acceptable credit type - // values](https://cloud.google.com/billing/docs/how-to/export-data-bigquery-tables#credits-type). - repeated string credit_types = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If not set, default behavior is `INCLUDE_ALL_CREDITS`. - CreditTypesTreatment credit_types_treatment = 4 - [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A set of services of the form `services/{service_id}`, - // specifying that usage from only this set of services should be - // included in the budget. If omitted, the report will include usage for - // all the services. - // The service names are available through the Catalog API: - // https://cloud.google.com/billing/v1/how-tos/catalog-api. - repeated string services = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A set of subaccounts of the form `billingAccounts/{account_id}`, - // specifying that usage from only this set of subaccounts should be included - // in the budget. If a subaccount is set to the name of the parent account, - // usage from the parent account will be included. If omitted, the - // report will include usage from the parent account and all - // subaccounts, if they exist. - repeated string subaccounts = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A single label and value pair specifying that usage from only - // this set of labeled resources should be included in the budget. Currently, - // multiple entries or multiple values per entry are not allowed. If omitted, - // the report will include all labeled and unlabeled usage. - map labels = 6 - [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/proto/budget_service.proto b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/proto/budget_service.proto deleted file mode 100644 index d97934704831..000000000000 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/proto/budget_service.proto +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.billing.budgets.v1beta1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/billing/budgets/v1beta1/budget_model.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/billing/budgets/v1beta1;budgets"; -option java_multiple_files = true; -option java_package = "com.google.cloud.billing.budgets.v1beta1"; - -// BudgetService stores Cloud Billing budgets, which define a -// budget plan and rules to execute as we track spend against that plan. -service BudgetService { - option (google.api.default_host) = "billingbudgets.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-billing," - "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a new budget. See - // Quotas and limits - // for more information on the limits of the number of budgets you can create. - rpc CreateBudget(CreateBudgetRequest) returns (Budget) { - option (google.api.http) = { - post: "/v1beta1/{parent=billingAccounts/*}/budgets" - body: "*" - }; - } - - // Updates a budget and returns the updated budget. - // - // WARNING: There are some fields exposed on the Google Cloud Console that - // aren't available on this API. Budget fields that are not exposed in - // this API will not be changed by this method. - rpc UpdateBudget(UpdateBudgetRequest) returns (Budget) { - option (google.api.http) = { - patch: "/v1beta1/{budget.name=billingAccounts/*/budgets/*}" - body: "*" - }; - } - - // Returns a budget. - // - // WARNING: There are some fields exposed on the Google Cloud Console that - // aren't available on this API. When reading from the API, you will not - // see these fields in the return value, though they may have been set - // in the Cloud Console. - rpc GetBudget(GetBudgetRequest) returns (Budget) { - option (google.api.http) = { - get: "/v1beta1/{name=billingAccounts/*/budgets/*}" - }; - } - - // Returns a list of budgets for a billing account. - // - // WARNING: There are some fields exposed on the Google Cloud Console that - // aren't available on this API. When reading from the API, you will not - // see these fields in the return value, though they may have been set - // in the Cloud Console. - rpc ListBudgets(ListBudgetsRequest) returns (ListBudgetsResponse) { - option (google.api.http) = { - get: "/v1beta1/{parent=billingAccounts/*}/budgets" - }; - } - - // Deletes a budget. Returns successfully if already deleted. - rpc DeleteBudget(DeleteBudgetRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1beta1/{name=billingAccounts/*/budgets/*}" - }; - } -} - -// Request for CreateBudget -message CreateBudgetRequest { - // Required. The name of the billing account to create the budget in. Values - // are of the form `billingAccounts/{billingAccountId}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "billingbudgets.googleapis.com/Budget" - } - ]; - - // Required. Budget to create. - Budget budget = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// Request for UpdateBudget -message UpdateBudgetRequest { - // Required. The updated budget object. - // The budget to update is specified by the budget name in the budget. - Budget budget = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Indicates which fields in the provided budget to update. - // Read-only fields (such as `name`) cannot be changed. If this is not - // provided, then only fields with non-default values from the request are - // updated. See - // https://developers.google.com/protocol-buffers/docs/proto3#default for more - // details about default values. - google.protobuf.FieldMask update_mask = 2 - [(google.api.field_behavior) = OPTIONAL]; -} - -// Request for GetBudget -message GetBudgetRequest { - // Required. Name of budget to get. Values are of the form - // `billingAccounts/{billingAccountId}/budgets/{budgetId}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "billingbudgets.googleapis.com/Budget" - } - ]; -} - -// Request for ListBudgets -message ListBudgetsRequest { - // Required. Name of billing account to list budgets under. Values - // are of the form `billingAccounts/{billingAccountId}`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "billingbudgets.googleapis.com/Budget" - } - ]; - - // Optional. The maximum number of budgets to return per page. - // The default and maximum value are 100. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The value returned by the last `ListBudgetsResponse` which - // indicates that this is a continuation of a prior `ListBudgets` call, - // and that the system should return the next page of data. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Response for ListBudgets -message ListBudgetsResponse { - // List of the budgets owned by the requested billing account. - repeated Budget budgets = 1; - - // If not empty, indicates that there may be more budgets that match the - // request; this value should be passed in a new `ListBudgetsRequest`. - string next_page_token = 2; -} - -// Request for DeleteBudget -message DeleteBudgetRequest { - // Required. Name of the budget to delete. Values are of the form - // `billingAccounts/{billingAccountId}/budgets/{budgetId}`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "billingbudgets.googleapis.com/Budget" - } - ]; -} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py index 0513d45e3a6d..6b3b39bc93a6 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -480,6 +482,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json index 34b1ce5af4f0..c6ea803043ca 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.16.0" + "version": "1.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json index e9159d2a85ba..343e03f0a576 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.16.0" + "version": "1.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py index 3ecaef5f9e5a..66dbd136fb0f 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1/test_budget_service.py @@ -64,6 +64,13 @@ ) from google.cloud.billing.budgets_v1.types import budget_model, budget_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BudgetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BudgetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4418,10 +4468,13 @@ def test_create_budget_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BudgetServiceRestInterceptor, "post_create_budget" ) as post, mock.patch.object( + transports.BudgetServiceRestInterceptor, "post_create_budget_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BudgetServiceRestInterceptor, "pre_create_budget" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = budget_service.CreateBudgetRequest.pb( budget_service.CreateBudgetRequest() ) @@ -4445,6 +4498,7 @@ def test_create_budget_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = budget_model.Budget() + post_with_metadata.return_value = budget_model.Budget(), metadata client.create_budget( request, @@ -4456,6 +4510,7 @@ def test_create_budget_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_budget_rest_bad_request( @@ -4652,10 +4707,13 @@ def test_update_budget_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BudgetServiceRestInterceptor, "post_update_budget" ) as post, mock.patch.object( + transports.BudgetServiceRestInterceptor, "post_update_budget_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BudgetServiceRestInterceptor, "pre_update_budget" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = budget_service.UpdateBudgetRequest.pb( budget_service.UpdateBudgetRequest() ) @@ -4679,6 +4737,7 @@ def test_update_budget_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = budget_model.Budget() + post_with_metadata.return_value = budget_model.Budget(), metadata client.update_budget( request, @@ -4690,6 +4749,7 @@ def test_update_budget_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_budget_rest_bad_request(request_type=budget_service.GetBudgetRequest): @@ -4776,10 +4836,13 @@ def test_get_budget_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BudgetServiceRestInterceptor, "post_get_budget" ) as post, mock.patch.object( + transports.BudgetServiceRestInterceptor, "post_get_budget_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BudgetServiceRestInterceptor, "pre_get_budget" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = budget_service.GetBudgetRequest.pb( budget_service.GetBudgetRequest() ) @@ -4803,6 +4866,7 @@ def test_get_budget_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = budget_model.Budget() + post_with_metadata.return_value = budget_model.Budget(), metadata client.get_budget( request, @@ -4814,6 +4878,7 @@ def test_get_budget_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_budgets_rest_bad_request(request_type=budget_service.ListBudgetsRequest): @@ -4896,10 +4961,13 @@ def test_list_budgets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BudgetServiceRestInterceptor, "post_list_budgets" ) as post, mock.patch.object( + transports.BudgetServiceRestInterceptor, "post_list_budgets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BudgetServiceRestInterceptor, "pre_list_budgets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = budget_service.ListBudgetsRequest.pb( budget_service.ListBudgetsRequest() ) @@ -4925,6 +4993,7 @@ def test_list_budgets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = budget_service.ListBudgetsResponse() + post_with_metadata.return_value = budget_service.ListBudgetsResponse(), metadata client.list_budgets( request, @@ -4936,6 +5005,7 @@ def test_list_budgets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_budget_rest_bad_request( diff --git a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py index 5b19669c64b4..80d130df32a4 100644 --- a/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py +++ b/packages/google-cloud-billing-budgets/tests/unit/gapic/budgets_v1beta1/test_budget_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -59,6 +60,13 @@ ) from google.cloud.billing.budgets_v1beta1.types import budget_model, budget_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +321,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BudgetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BudgetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-billing/CHANGELOG.md b/packages/google-cloud-billing/CHANGELOG.md index d412a3a9931a..70c741e12d23 100644 --- a/packages/google-cloud-billing/CHANGELOG.md +++ b/packages/google-cloud-billing/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.15.0...google-cloud-billing-v1.16.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-billing-v1.14.1...google-cloud-billing-v1.15.0) (2024-12-12) diff --git a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-billing/google/cloud/billing/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py index 14c05607a3b1..bab5c4ffbde3 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -525,6 +527,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/transports/rest.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/transports/rest.py index 025dccaacb6d..0219fb12369a 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/transports/rest.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_billing/transports/rest.py @@ -184,12 +184,35 @@ def post_create_billing_account( ) -> cloud_billing.BillingAccount: """Post-rpc interceptor for create_billing_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_billing_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_create_billing_account` interceptor runs + before the `post_create_billing_account_with_metadata` interceptor. """ return response + def post_create_billing_account_with_metadata( + self, + response: cloud_billing.BillingAccount, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_billing.BillingAccount, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_billing_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_create_billing_account_with_metadata` + interceptor in new development instead of the `post_create_billing_account` interceptor. + When both interceptors are used, this `post_create_billing_account_with_metadata` interceptor runs after the + `post_create_billing_account` interceptor. The (possibly modified) response returned by + `post_create_billing_account` will be passed to + `post_create_billing_account_with_metadata`. + """ + return response, metadata + def pre_get_billing_account( self, request: cloud_billing.GetBillingAccountRequest, @@ -209,12 +232,35 @@ def post_get_billing_account( ) -> cloud_billing.BillingAccount: """Post-rpc interceptor for get_billing_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_billing_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_get_billing_account` interceptor runs + before the `post_get_billing_account_with_metadata` interceptor. """ return response + def post_get_billing_account_with_metadata( + self, + response: cloud_billing.BillingAccount, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_billing.BillingAccount, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_billing_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_get_billing_account_with_metadata` + interceptor in new development instead of the `post_get_billing_account` interceptor. + When both interceptors are used, this `post_get_billing_account_with_metadata` interceptor runs after the + `post_get_billing_account` interceptor. The (possibly modified) response returned by + `post_get_billing_account` will be passed to + `post_get_billing_account_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -232,12 +278,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_project_billing_info( self, request: cloud_billing.GetProjectBillingInfoRequest, @@ -258,12 +327,37 @@ def post_get_project_billing_info( ) -> cloud_billing.ProjectBillingInfo: """Post-rpc interceptor for get_project_billing_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_project_billing_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_get_project_billing_info` interceptor runs + before the `post_get_project_billing_info_with_metadata` interceptor. """ return response + def post_get_project_billing_info_with_metadata( + self, + response: cloud_billing.ProjectBillingInfo, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_billing.ProjectBillingInfo, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_project_billing_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_get_project_billing_info_with_metadata` + interceptor in new development instead of the `post_get_project_billing_info` interceptor. + When both interceptors are used, this `post_get_project_billing_info_with_metadata` interceptor runs after the + `post_get_project_billing_info` interceptor. The (possibly modified) response returned by + `post_get_project_billing_info` will be passed to + `post_get_project_billing_info_with_metadata`. + """ + return response, metadata + def pre_list_billing_accounts( self, request: cloud_billing.ListBillingAccountsRequest, @@ -284,12 +378,38 @@ def post_list_billing_accounts( ) -> cloud_billing.ListBillingAccountsResponse: """Post-rpc interceptor for list_billing_accounts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_billing_accounts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_list_billing_accounts` interceptor runs + before the `post_list_billing_accounts_with_metadata` interceptor. """ return response + def post_list_billing_accounts_with_metadata( + self, + response: cloud_billing.ListBillingAccountsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_billing.ListBillingAccountsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_billing_accounts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_list_billing_accounts_with_metadata` + interceptor in new development instead of the `post_list_billing_accounts` interceptor. + When both interceptors are used, this `post_list_billing_accounts_with_metadata` interceptor runs after the + `post_list_billing_accounts` interceptor. The (possibly modified) response returned by + `post_list_billing_accounts` will be passed to + `post_list_billing_accounts_with_metadata`. + """ + return response, metadata + def pre_list_project_billing_info( self, request: cloud_billing.ListProjectBillingInfoRequest, @@ -310,12 +430,38 @@ def post_list_project_billing_info( ) -> cloud_billing.ListProjectBillingInfoResponse: """Post-rpc interceptor for list_project_billing_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_project_billing_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_list_project_billing_info` interceptor runs + before the `post_list_project_billing_info_with_metadata` interceptor. """ return response + def post_list_project_billing_info_with_metadata( + self, + response: cloud_billing.ListProjectBillingInfoResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_billing.ListProjectBillingInfoResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_project_billing_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_list_project_billing_info_with_metadata` + interceptor in new development instead of the `post_list_project_billing_info` interceptor. + When both interceptors are used, this `post_list_project_billing_info_with_metadata` interceptor runs after the + `post_list_project_billing_info` interceptor. The (possibly modified) response returned by + `post_list_project_billing_info` will be passed to + `post_list_project_billing_info_with_metadata`. + """ + return response, metadata + def pre_move_billing_account( self, request: cloud_billing.MoveBillingAccountRequest, @@ -335,12 +481,35 @@ def post_move_billing_account( ) -> cloud_billing.BillingAccount: """Post-rpc interceptor for move_billing_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_billing_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_move_billing_account` interceptor runs + before the `post_move_billing_account_with_metadata` interceptor. """ return response + def post_move_billing_account_with_metadata( + self, + response: cloud_billing.BillingAccount, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_billing.BillingAccount, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_billing_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_move_billing_account_with_metadata` + interceptor in new development instead of the `post_move_billing_account` interceptor. + When both interceptors are used, this `post_move_billing_account_with_metadata` interceptor runs after the + `post_move_billing_account` interceptor. The (possibly modified) response returned by + `post_move_billing_account` will be passed to + `post_move_billing_account_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -358,12 +527,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -384,12 +576,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_billing_account( self, request: cloud_billing.UpdateBillingAccountRequest, @@ -410,12 +628,35 @@ def post_update_billing_account( ) -> cloud_billing.BillingAccount: """Post-rpc interceptor for update_billing_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_billing_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_update_billing_account` interceptor runs + before the `post_update_billing_account_with_metadata` interceptor. """ return response + def post_update_billing_account_with_metadata( + self, + response: cloud_billing.BillingAccount, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_billing.BillingAccount, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_billing_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_update_billing_account_with_metadata` + interceptor in new development instead of the `post_update_billing_account` interceptor. + When both interceptors are used, this `post_update_billing_account_with_metadata` interceptor runs after the + `post_update_billing_account` interceptor. The (possibly modified) response returned by + `post_update_billing_account` will be passed to + `post_update_billing_account_with_metadata`. + """ + return response, metadata + def pre_update_project_billing_info( self, request: cloud_billing.UpdateProjectBillingInfoRequest, @@ -436,12 +677,37 @@ def post_update_project_billing_info( ) -> cloud_billing.ProjectBillingInfo: """Post-rpc interceptor for update_project_billing_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_project_billing_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBilling server but before - it is returned to user code. + it is returned to user code. This `post_update_project_billing_info` interceptor runs + before the `post_update_project_billing_info_with_metadata` interceptor. """ return response + def post_update_project_billing_info_with_metadata( + self, + response: cloud_billing.ProjectBillingInfo, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_billing.ProjectBillingInfo, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_project_billing_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBilling server but before it is returned to user code. + + We recommend only using this `post_update_project_billing_info_with_metadata` + interceptor in new development instead of the `post_update_project_billing_info` interceptor. + When both interceptors are used, this `post_update_project_billing_info_with_metadata` interceptor runs after the + `post_update_project_billing_info` interceptor. The (possibly modified) response returned by + `post_update_project_billing_info` will be passed to + `post_update_project_billing_info_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudBillingRestStub: @@ -658,6 +924,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_billing_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_billing_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -803,6 +1073,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_billing_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_billing_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1020,6 +1294,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1168,6 +1446,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_project_billing_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_project_billing_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1312,6 +1594,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_billing_accounts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_billing_accounts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1456,6 +1742,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_project_billing_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_project_billing_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1609,6 +1899,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_billing_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_billing_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1834,6 +2128,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1982,6 +2280,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2133,6 +2435,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_billing_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_billing_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2291,6 +2597,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_project_billing_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_project_billing_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py index f24bba9587a8..18f1fc5eb383 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -495,6 +497,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/transports/rest.py b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/transports/rest.py index 81482d2bce28..e31bcc04aa1b 100644 --- a/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/transports/rest.py +++ b/packages/google-cloud-billing/google/cloud/billing_v1/services/cloud_catalog/transports/rest.py @@ -109,12 +109,37 @@ def post_list_services( ) -> cloud_catalog.ListServicesResponse: """Post-rpc interceptor for list_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudCatalog server but before - it is returned to user code. + it is returned to user code. This `post_list_services` interceptor runs + before the `post_list_services_with_metadata` interceptor. """ return response + def post_list_services_with_metadata( + self, + response: cloud_catalog.ListServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_catalog.ListServicesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudCatalog server but before it is returned to user code. + + We recommend only using this `post_list_services_with_metadata` + interceptor in new development instead of the `post_list_services` interceptor. + When both interceptors are used, this `post_list_services_with_metadata` interceptor runs after the + `post_list_services` interceptor. The (possibly modified) response returned by + `post_list_services` will be passed to + `post_list_services_with_metadata`. + """ + return response, metadata + def pre_list_skus( self, request: cloud_catalog.ListSkusRequest, @@ -132,12 +157,35 @@ def post_list_skus( ) -> cloud_catalog.ListSkusResponse: """Post-rpc interceptor for list_skus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_skus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudCatalog server but before - it is returned to user code. + it is returned to user code. This `post_list_skus` interceptor runs + before the `post_list_skus_with_metadata` interceptor. """ return response + def post_list_skus_with_metadata( + self, + response: cloud_catalog.ListSkusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_catalog.ListSkusResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_skus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudCatalog server but before it is returned to user code. + + We recommend only using this `post_list_skus_with_metadata` + interceptor in new development instead of the `post_list_skus` interceptor. + When both interceptors are used, this `post_list_skus_with_metadata` interceptor runs after the + `post_list_skus` interceptor. The (possibly modified) response returned by + `post_list_skus` will be passed to + `post_list_skus_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudCatalogRestStub: @@ -346,6 +394,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -490,6 +542,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_skus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_skus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json index 9299cad59cc3..cb2b9e5078c2 100644 --- a/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json +++ b/packages/google-cloud-billing/samples/generated_samples/snippet_metadata_google.cloud.billing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing", - "version": "1.15.0" + "version": "1.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py index cfb8fc6b73b5..5628f20a03e3 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_billing.py @@ -65,6 +65,13 @@ ) from google.cloud.billing_v1.types import cloud_billing +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -308,6 +315,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudBillingClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudBillingClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8005,10 +8055,13 @@ def test_get_billing_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_get_billing_account" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, "post_get_billing_account_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_get_billing_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.GetBillingAccountRequest.pb( cloud_billing.GetBillingAccountRequest() ) @@ -8034,6 +8087,7 @@ def test_get_billing_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.BillingAccount() + post_with_metadata.return_value = cloud_billing.BillingAccount(), metadata client.get_billing_account( request, @@ -8045,6 +8099,7 @@ def test_get_billing_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_billing_accounts_rest_bad_request( @@ -8129,10 +8184,14 @@ def test_list_billing_accounts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_list_billing_accounts" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_list_billing_accounts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_list_billing_accounts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.ListBillingAccountsRequest.pb( cloud_billing.ListBillingAccountsRequest() ) @@ -8158,6 +8217,10 @@ def test_list_billing_accounts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.ListBillingAccountsResponse() + post_with_metadata.return_value = ( + cloud_billing.ListBillingAccountsResponse(), + metadata, + ) client.list_billing_accounts( request, @@ -8169,6 +8232,7 @@ def test_list_billing_accounts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_billing_account_rest_bad_request( @@ -8338,10 +8402,14 @@ def test_update_billing_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_update_billing_account" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_update_billing_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_update_billing_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.UpdateBillingAccountRequest.pb( cloud_billing.UpdateBillingAccountRequest() ) @@ -8367,6 +8435,7 @@ def test_update_billing_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.BillingAccount() + post_with_metadata.return_value = cloud_billing.BillingAccount(), metadata client.update_billing_account( request, @@ -8378,6 +8447,7 @@ def test_update_billing_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_billing_account_rest_bad_request( @@ -8549,10 +8619,14 @@ def test_create_billing_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_create_billing_account" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_create_billing_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_create_billing_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.CreateBillingAccountRequest.pb( cloud_billing.CreateBillingAccountRequest() ) @@ -8578,6 +8652,7 @@ def test_create_billing_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.BillingAccount() + post_with_metadata.return_value = cloud_billing.BillingAccount(), metadata client.create_billing_account( request, @@ -8589,6 +8664,7 @@ def test_create_billing_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_project_billing_info_rest_bad_request( @@ -8673,10 +8749,14 @@ def test_list_project_billing_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_list_project_billing_info" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_list_project_billing_info_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_list_project_billing_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.ListProjectBillingInfoRequest.pb( cloud_billing.ListProjectBillingInfoRequest() ) @@ -8702,6 +8782,10 @@ def test_list_project_billing_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.ListProjectBillingInfoResponse() + post_with_metadata.return_value = ( + cloud_billing.ListProjectBillingInfoResponse(), + metadata, + ) client.list_project_billing_info( request, @@ -8713,6 +8797,7 @@ def test_list_project_billing_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_project_billing_info_rest_bad_request( @@ -8803,10 +8888,14 @@ def test_get_project_billing_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_get_project_billing_info" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_get_project_billing_info_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_get_project_billing_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.GetProjectBillingInfoRequest.pb( cloud_billing.GetProjectBillingInfoRequest() ) @@ -8832,6 +8921,7 @@ def test_get_project_billing_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.ProjectBillingInfo() + post_with_metadata.return_value = cloud_billing.ProjectBillingInfo(), metadata client.get_project_billing_info( request, @@ -8843,6 +8933,7 @@ def test_get_project_billing_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_project_billing_info_rest_bad_request( @@ -9010,10 +9101,14 @@ def test_update_project_billing_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_update_project_billing_info" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_update_project_billing_info_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_update_project_billing_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.UpdateProjectBillingInfoRequest.pb( cloud_billing.UpdateProjectBillingInfoRequest() ) @@ -9039,6 +9134,7 @@ def test_update_project_billing_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.ProjectBillingInfo() + post_with_metadata.return_value = cloud_billing.ProjectBillingInfo(), metadata client.update_project_billing_info( request, @@ -9050,6 +9146,7 @@ def test_update_project_billing_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -9133,10 +9230,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -9158,6 +9258,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -9169,6 +9270,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -9252,10 +9354,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -9277,6 +9382,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -9288,6 +9394,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -9369,10 +9476,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -9396,6 +9507,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -9407,6 +9522,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_billing_account_rest_bad_request( @@ -9501,10 +9617,14 @@ def test_move_billing_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBillingRestInterceptor, "post_move_billing_account" ) as post, mock.patch.object( + transports.CloudBillingRestInterceptor, + "post_move_billing_account_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBillingRestInterceptor, "pre_move_billing_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_billing.MoveBillingAccountRequest.pb( cloud_billing.MoveBillingAccountRequest() ) @@ -9530,6 +9650,7 @@ def test_move_billing_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_billing.BillingAccount() + post_with_metadata.return_value = cloud_billing.BillingAccount(), metadata client.move_billing_account( request, @@ -9541,6 +9662,7 @@ def test_move_billing_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py index e49ec3f945d9..331715137ed5 100644 --- a/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py +++ b/packages/google-cloud-billing/tests/unit/gapic/billing_v1/test_cloud_catalog.py @@ -61,6 +61,13 @@ ) from google.cloud.billing_v1.types import cloud_catalog +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudCatalogClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudCatalogClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2590,10 +2640,13 @@ def test_list_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudCatalogRestInterceptor, "post_list_services" ) as post, mock.patch.object( + transports.CloudCatalogRestInterceptor, "post_list_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudCatalogRestInterceptor, "pre_list_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_catalog.ListServicesRequest.pb( cloud_catalog.ListServicesRequest() ) @@ -2619,6 +2672,7 @@ def test_list_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_catalog.ListServicesResponse() + post_with_metadata.return_value = cloud_catalog.ListServicesResponse(), metadata client.list_services( request, @@ -2630,6 +2684,7 @@ def test_list_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_skus_rest_bad_request(request_type=cloud_catalog.ListSkusRequest): @@ -2712,10 +2767,13 @@ def test_list_skus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudCatalogRestInterceptor, "post_list_skus" ) as post, mock.patch.object( + transports.CloudCatalogRestInterceptor, "post_list_skus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudCatalogRestInterceptor, "pre_list_skus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_catalog.ListSkusRequest.pb(cloud_catalog.ListSkusRequest()) transcode.return_value = { "method": "post", @@ -2739,6 +2797,7 @@ def test_list_skus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_catalog.ListSkusResponse() + post_with_metadata.return_value = cloud_catalog.ListSkusResponse(), metadata client.list_skus( request, @@ -2750,6 +2809,7 @@ def test_list_skus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-binary-authorization/CHANGELOG.md b/packages/google-cloud-binary-authorization/CHANGELOG.md index 44ae5ffd3797..6b2ff831577a 100644 --- a/packages/google-cloud-binary-authorization/CHANGELOG.md +++ b/packages/google-cloud-binary-authorization/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-binary-authorization-v1.12.0...google-cloud-binary-authorization-v1.13.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-binary-authorization-v1.11.1...google-cloud-binary-authorization-v1.12.0) (2024-12-12) diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py index 83e9804a7776..8270d9c8d042 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -511,6 +513,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/transports/rest.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/transports/rest.py index a2ecdb27bba1..fa2ff854ba2f 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/transports/rest.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/binauthz_management_service_v1/transports/rest.py @@ -142,12 +142,35 @@ def pre_create_attestor( def post_create_attestor(self, response: resources.Attestor) -> resources.Attestor: """Post-rpc interceptor for create_attestor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_attestor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1 server but before - it is returned to user code. + it is returned to user code. This `post_create_attestor` interceptor runs + before the `post_create_attestor_with_metadata` interceptor. """ return response + def post_create_attestor_with_metadata( + self, + response: resources.Attestor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Attestor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_attestor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1 server but before it is returned to user code. + + We recommend only using this `post_create_attestor_with_metadata` + interceptor in new development instead of the `post_create_attestor` interceptor. + When both interceptors are used, this `post_create_attestor_with_metadata` interceptor runs after the + `post_create_attestor` interceptor. The (possibly modified) response returned by + `post_create_attestor` will be passed to + `post_create_attestor_with_metadata`. + """ + return response, metadata + def pre_delete_attestor( self, request: service.DeleteAttestorRequest, @@ -175,12 +198,35 @@ def pre_get_attestor( def post_get_attestor(self, response: resources.Attestor) -> resources.Attestor: """Post-rpc interceptor for get_attestor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attestor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1 server but before - it is returned to user code. + it is returned to user code. This `post_get_attestor` interceptor runs + before the `post_get_attestor_with_metadata` interceptor. """ return response + def post_get_attestor_with_metadata( + self, + response: resources.Attestor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Attestor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_attestor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1 server but before it is returned to user code. + + We recommend only using this `post_get_attestor_with_metadata` + interceptor in new development instead of the `post_get_attestor` interceptor. + When both interceptors are used, this `post_get_attestor_with_metadata` interceptor runs after the + `post_get_attestor` interceptor. The (possibly modified) response returned by + `post_get_attestor` will be passed to + `post_get_attestor_with_metadata`. + """ + return response, metadata + def pre_get_policy( self, request: service.GetPolicyRequest, @@ -196,12 +242,35 @@ def pre_get_policy( def post_get_policy(self, response: resources.Policy) -> resources.Policy: """Post-rpc interceptor for get_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1 server but before - it is returned to user code. + it is returned to user code. This `post_get_policy` interceptor runs + before the `post_get_policy_with_metadata` interceptor. """ return response + def post_get_policy_with_metadata( + self, + response: resources.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1 server but before it is returned to user code. + + We recommend only using this `post_get_policy_with_metadata` + interceptor in new development instead of the `post_get_policy` interceptor. + When both interceptors are used, this `post_get_policy_with_metadata` interceptor runs after the + `post_get_policy` interceptor. The (possibly modified) response returned by + `post_get_policy` will be passed to + `post_get_policy_with_metadata`. + """ + return response, metadata + def pre_list_attestors( self, request: service.ListAttestorsRequest, @@ -219,12 +288,35 @@ def post_list_attestors( ) -> service.ListAttestorsResponse: """Post-rpc interceptor for list_attestors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_attestors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1 server but before - it is returned to user code. + it is returned to user code. This `post_list_attestors` interceptor runs + before the `post_list_attestors_with_metadata` interceptor. """ return response + def post_list_attestors_with_metadata( + self, + response: service.ListAttestorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListAttestorsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_attestors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1 server but before it is returned to user code. + + We recommend only using this `post_list_attestors_with_metadata` + interceptor in new development instead of the `post_list_attestors` interceptor. + When both interceptors are used, this `post_list_attestors_with_metadata` interceptor runs after the + `post_list_attestors` interceptor. The (possibly modified) response returned by + `post_list_attestors` will be passed to + `post_list_attestors_with_metadata`. + """ + return response, metadata + def pre_update_attestor( self, request: service.UpdateAttestorRequest, @@ -240,12 +332,35 @@ def pre_update_attestor( def post_update_attestor(self, response: resources.Attestor) -> resources.Attestor: """Post-rpc interceptor for update_attestor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_attestor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1 server but before - it is returned to user code. + it is returned to user code. This `post_update_attestor` interceptor runs + before the `post_update_attestor_with_metadata` interceptor. """ return response + def post_update_attestor_with_metadata( + self, + response: resources.Attestor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Attestor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_attestor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1 server but before it is returned to user code. + + We recommend only using this `post_update_attestor_with_metadata` + interceptor in new development instead of the `post_update_attestor` interceptor. + When both interceptors are used, this `post_update_attestor_with_metadata` interceptor runs after the + `post_update_attestor` interceptor. The (possibly modified) response returned by + `post_update_attestor` will be passed to + `post_update_attestor_with_metadata`. + """ + return response, metadata + def pre_update_policy( self, request: service.UpdatePolicyRequest, @@ -261,12 +376,35 @@ def pre_update_policy( def post_update_policy(self, response: resources.Policy) -> resources.Policy: """Post-rpc interceptor for update_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1 server but before - it is returned to user code. + it is returned to user code. This `post_update_policy` interceptor runs + before the `post_update_policy_with_metadata` interceptor. """ return response + def post_update_policy_with_metadata( + self, + response: resources.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1 server but before it is returned to user code. + + We recommend only using this `post_update_policy_with_metadata` + interceptor in new development instead of the `post_update_policy` interceptor. + When both interceptors are used, this `post_update_policy_with_metadata` interceptor runs after the + `post_update_policy` interceptor. The (possibly modified) response returned by + `post_update_policy` will be passed to + `post_update_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class BinauthzManagementServiceV1RestStub: @@ -493,6 +631,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_attestor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_attestor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -751,6 +893,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attestor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attestor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -897,6 +1043,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1043,6 +1193,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_attestors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_attestors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1197,6 +1351,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_attestor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_attestor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1349,6 +1507,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py index d4c7c39ef0e7..ac0ed835626a 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -478,6 +480,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/transports/rest.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/transports/rest.py index c1b06a8b6c5b..23b5f9eda1ae 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/transports/rest.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/system_policy_v1/transports/rest.py @@ -97,12 +97,35 @@ def pre_get_system_policy( def post_get_system_policy(self, response: resources.Policy) -> resources.Policy: """Post-rpc interceptor for get_system_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_system_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SystemPolicyV1 server but before - it is returned to user code. + it is returned to user code. This `post_get_system_policy` interceptor runs + before the `post_get_system_policy_with_metadata` interceptor. """ return response + def post_get_system_policy_with_metadata( + self, + response: resources.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_system_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SystemPolicyV1 server but before it is returned to user code. + + We recommend only using this `post_get_system_policy_with_metadata` + interceptor in new development instead of the `post_get_system_policy` interceptor. + When both interceptors are used, this `post_get_system_policy_with_metadata` interceptor runs after the + `post_get_system_policy` interceptor. The (possibly modified) response returned by + `post_get_system_policy` will be passed to + `post_get_system_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SystemPolicyV1RestStub: @@ -312,6 +335,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_system_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_system_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py index c2ea5ae38197..96d98733d5bf 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/transports/rest.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/transports/rest.py index 150b136a97d6..1e2241c81f0c 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/transports/rest.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1/services/validation_helper_v1/transports/rest.py @@ -102,12 +102,38 @@ def post_validate_attestation_occurrence( ) -> service.ValidateAttestationOccurrenceResponse: """Post-rpc interceptor for validate_attestation_occurrence - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_attestation_occurrence_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ValidationHelperV1 server but before - it is returned to user code. + it is returned to user code. This `post_validate_attestation_occurrence` interceptor runs + before the `post_validate_attestation_occurrence_with_metadata` interceptor. """ return response + def post_validate_attestation_occurrence_with_metadata( + self, + response: service.ValidateAttestationOccurrenceResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ValidateAttestationOccurrenceResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for validate_attestation_occurrence + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ValidationHelperV1 server but before it is returned to user code. + + We recommend only using this `post_validate_attestation_occurrence_with_metadata` + interceptor in new development instead of the `post_validate_attestation_occurrence` interceptor. + When both interceptors are used, this `post_validate_attestation_occurrence_with_metadata` interceptor runs after the + `post_validate_attestation_occurrence` interceptor. The (possibly modified) response returned by + `post_validate_attestation_occurrence` will be passed to + `post_validate_attestation_occurrence_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ValidationHelperV1RestStub: @@ -325,6 +351,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_attestation_occurrence(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_validate_attestation_occurrence_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py index 3c9049d1f08f..d7073b528575 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/rest.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/rest.py index 9232ba96aeaf..66167ed2ba53 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/rest.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/binauthz_management_service_v1_beta1/transports/rest.py @@ -142,12 +142,35 @@ def pre_create_attestor( def post_create_attestor(self, response: resources.Attestor) -> resources.Attestor: """Post-rpc interceptor for create_attestor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_attestor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_create_attestor` interceptor runs + before the `post_create_attestor_with_metadata` interceptor. """ return response + def post_create_attestor_with_metadata( + self, + response: resources.Attestor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Attestor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_attestor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_create_attestor_with_metadata` + interceptor in new development instead of the `post_create_attestor` interceptor. + When both interceptors are used, this `post_create_attestor_with_metadata` interceptor runs after the + `post_create_attestor` interceptor. The (possibly modified) response returned by + `post_create_attestor` will be passed to + `post_create_attestor_with_metadata`. + """ + return response, metadata + def pre_delete_attestor( self, request: service.DeleteAttestorRequest, @@ -175,12 +198,35 @@ def pre_get_attestor( def post_get_attestor(self, response: resources.Attestor) -> resources.Attestor: """Post-rpc interceptor for get_attestor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attestor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_get_attestor` interceptor runs + before the `post_get_attestor_with_metadata` interceptor. """ return response + def post_get_attestor_with_metadata( + self, + response: resources.Attestor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Attestor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_attestor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_get_attestor_with_metadata` + interceptor in new development instead of the `post_get_attestor` interceptor. + When both interceptors are used, this `post_get_attestor_with_metadata` interceptor runs after the + `post_get_attestor` interceptor. The (possibly modified) response returned by + `post_get_attestor` will be passed to + `post_get_attestor_with_metadata`. + """ + return response, metadata + def pre_get_policy( self, request: service.GetPolicyRequest, @@ -196,12 +242,35 @@ def pre_get_policy( def post_get_policy(self, response: resources.Policy) -> resources.Policy: """Post-rpc interceptor for get_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_get_policy` interceptor runs + before the `post_get_policy_with_metadata` interceptor. """ return response + def post_get_policy_with_metadata( + self, + response: resources.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_get_policy_with_metadata` + interceptor in new development instead of the `post_get_policy` interceptor. + When both interceptors are used, this `post_get_policy_with_metadata` interceptor runs after the + `post_get_policy` interceptor. The (possibly modified) response returned by + `post_get_policy` will be passed to + `post_get_policy_with_metadata`. + """ + return response, metadata + def pre_list_attestors( self, request: service.ListAttestorsRequest, @@ -219,12 +288,35 @@ def post_list_attestors( ) -> service.ListAttestorsResponse: """Post-rpc interceptor for list_attestors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_attestors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_list_attestors` interceptor runs + before the `post_list_attestors_with_metadata` interceptor. """ return response + def post_list_attestors_with_metadata( + self, + response: service.ListAttestorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListAttestorsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_attestors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_list_attestors_with_metadata` + interceptor in new development instead of the `post_list_attestors` interceptor. + When both interceptors are used, this `post_list_attestors_with_metadata` interceptor runs after the + `post_list_attestors` interceptor. The (possibly modified) response returned by + `post_list_attestors` will be passed to + `post_list_attestors_with_metadata`. + """ + return response, metadata + def pre_update_attestor( self, request: service.UpdateAttestorRequest, @@ -240,12 +332,35 @@ def pre_update_attestor( def post_update_attestor(self, response: resources.Attestor) -> resources.Attestor: """Post-rpc interceptor for update_attestor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_attestor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_update_attestor` interceptor runs + before the `post_update_attestor_with_metadata` interceptor. """ return response + def post_update_attestor_with_metadata( + self, + response: resources.Attestor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Attestor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_attestor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_update_attestor_with_metadata` + interceptor in new development instead of the `post_update_attestor` interceptor. + When both interceptors are used, this `post_update_attestor_with_metadata` interceptor runs after the + `post_update_attestor` interceptor. The (possibly modified) response returned by + `post_update_attestor` will be passed to + `post_update_attestor_with_metadata`. + """ + return response, metadata + def pre_update_policy( self, request: service.UpdatePolicyRequest, @@ -261,12 +376,35 @@ def pre_update_policy( def post_update_policy(self, response: resources.Policy) -> resources.Policy: """Post-rpc interceptor for update_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BinauthzManagementServiceV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_update_policy` interceptor runs + before the `post_update_policy_with_metadata` interceptor. """ return response + def post_update_policy_with_metadata( + self, + response: resources.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BinauthzManagementServiceV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_update_policy_with_metadata` + interceptor in new development instead of the `post_update_policy` interceptor. + When both interceptors are used, this `post_update_policy_with_metadata` interceptor runs after the + `post_update_policy` interceptor. The (possibly modified) response returned by + `post_update_policy` will be passed to + `post_update_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class BinauthzManagementServiceV1Beta1RestStub: @@ -493,6 +631,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_attestor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_attestor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -747,6 +889,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attestor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attestor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -894,6 +1040,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1038,6 +1188,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_attestors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_attestors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1190,6 +1344,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_attestor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_attestor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1341,6 +1499,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py index 43b7d624fc96..130cef79b3ae 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -478,6 +480,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/transports/rest.py b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/transports/rest.py index c060c2bf08f3..5831544664fd 100644 --- a/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/transports/rest.py +++ b/packages/google-cloud-binary-authorization/google/cloud/binaryauthorization_v1beta1/services/system_policy_v1_beta1/transports/rest.py @@ -97,12 +97,35 @@ def pre_get_system_policy( def post_get_system_policy(self, response: resources.Policy) -> resources.Policy: """Post-rpc interceptor for get_system_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_system_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SystemPolicyV1Beta1 server but before - it is returned to user code. + it is returned to user code. This `post_get_system_policy` interceptor runs + before the `post_get_system_policy_with_metadata` interceptor. """ return response + def post_get_system_policy_with_metadata( + self, + response: resources.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_system_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SystemPolicyV1Beta1 server but before it is returned to user code. + + We recommend only using this `post_get_system_policy_with_metadata` + interceptor in new development instead of the `post_get_system_policy` interceptor. + When both interceptors are used, this `post_get_system_policy_with_metadata` interceptor runs after the + `post_get_system_policy` interceptor. The (possibly modified) response returned by + `post_get_system_policy` will be passed to + `post_get_system_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SystemPolicyV1Beta1RestStub: @@ -314,6 +337,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_system_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_system_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json index 4836a62dd96f..00ed67977f5b 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.12.0" + "version": "1.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json index 44122e40fd65..b61f45a1ad47 100644 --- a/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json +++ b/packages/google-cloud-binary-authorization/samples/generated_samples/snippet_metadata_google.cloud.binaryauthorization.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-binary-authorization", - "version": "1.12.0" + "version": "1.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py index db66ba0e415f..ab12fcc039b0 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_binauthz_management_service_v1.py @@ -61,6 +61,13 @@ ) from google.cloud.binaryauthorization_v1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BinauthzManagementServiceV1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BinauthzManagementServiceV1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5488,10 +5538,14 @@ def test_get_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "post_get_policy" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1RestInterceptor, + "post_get_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "pre_get_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetPolicyRequest.pb(service.GetPolicyRequest()) transcode.return_value = { "method": "post", @@ -5513,6 +5567,7 @@ def test_get_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Policy() + post_with_metadata.return_value = resources.Policy(), metadata client.get_policy( request, @@ -5524,6 +5579,7 @@ def test_get_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_policy_rest_bad_request(request_type=service.UpdatePolicyRequest): @@ -5699,10 +5755,14 @@ def test_update_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "post_update_policy" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1RestInterceptor, + "post_update_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "pre_update_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdatePolicyRequest.pb(service.UpdatePolicyRequest()) transcode.return_value = { "method": "post", @@ -5724,6 +5784,7 @@ def test_update_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Policy() + post_with_metadata.return_value = resources.Policy(), metadata client.update_policy( request, @@ -5735,6 +5796,7 @@ def test_update_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_attestor_rest_bad_request(request_type=service.CreateAttestorRequest): @@ -5906,10 +5968,14 @@ def test_create_attestor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "post_create_attestor" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1RestInterceptor, + "post_create_attestor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "pre_create_attestor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateAttestorRequest.pb(service.CreateAttestorRequest()) transcode.return_value = { "method": "post", @@ -5931,6 +5997,7 @@ def test_create_attestor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Attestor() + post_with_metadata.return_value = resources.Attestor(), metadata client.create_attestor( request, @@ -5942,6 +6009,7 @@ def test_create_attestor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attestor_rest_bad_request(request_type=service.GetAttestorRequest): @@ -6026,10 +6094,14 @@ def test_get_attestor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "post_get_attestor" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1RestInterceptor, + "post_get_attestor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "pre_get_attestor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetAttestorRequest.pb(service.GetAttestorRequest()) transcode.return_value = { "method": "post", @@ -6051,6 +6123,7 @@ def test_get_attestor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Attestor() + post_with_metadata.return_value = resources.Attestor(), metadata client.get_attestor( request, @@ -6062,6 +6135,7 @@ def test_get_attestor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_attestor_rest_bad_request(request_type=service.UpdateAttestorRequest): @@ -6233,10 +6307,14 @@ def test_update_attestor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "post_update_attestor" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1RestInterceptor, + "post_update_attestor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "pre_update_attestor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateAttestorRequest.pb(service.UpdateAttestorRequest()) transcode.return_value = { "method": "post", @@ -6258,6 +6336,7 @@ def test_update_attestor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Attestor() + post_with_metadata.return_value = resources.Attestor(), metadata client.update_attestor( request, @@ -6269,6 +6348,7 @@ def test_update_attestor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_attestors_rest_bad_request(request_type=service.ListAttestorsRequest): @@ -6351,10 +6431,14 @@ def test_list_attestors_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "post_list_attestors" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1RestInterceptor, + "post_list_attestors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1RestInterceptor, "pre_list_attestors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListAttestorsRequest.pb(service.ListAttestorsRequest()) transcode.return_value = { "method": "post", @@ -6378,6 +6462,7 @@ def test_list_attestors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListAttestorsResponse() + post_with_metadata.return_value = service.ListAttestorsResponse(), metadata client.list_attestors( request, @@ -6389,6 +6474,7 @@ def test_list_attestors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_attestor_rest_bad_request(request_type=service.DeleteAttestorRequest): diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py index 938fc36413b1..6a99b249472c 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_system_policy_v1.py @@ -60,6 +60,13 @@ ) from google.cloud.binaryauthorization_v1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SystemPolicyV1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SystemPolicyV1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1861,10 +1911,13 @@ def test_get_system_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SystemPolicyV1RestInterceptor, "post_get_system_policy" ) as post, mock.patch.object( + transports.SystemPolicyV1RestInterceptor, "post_get_system_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SystemPolicyV1RestInterceptor, "pre_get_system_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSystemPolicyRequest.pb(service.GetSystemPolicyRequest()) transcode.return_value = { "method": "post", @@ -1886,6 +1939,7 @@ def test_get_system_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Policy() + post_with_metadata.return_value = resources.Policy(), metadata client.get_system_policy( request, @@ -1897,6 +1951,7 @@ def test_get_system_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py index 614c6fc6b159..e9fe91565768 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1/test_validation_helper_v1.py @@ -59,6 +59,13 @@ ) from google.cloud.binaryauthorization_v1.types import service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -330,6 +337,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ValidationHelperV1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ValidationHelperV1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1785,11 +1835,15 @@ def test_validate_attestation_occurrence_rest_interceptors(null_interceptor): transports.ValidationHelperV1RestInterceptor, "post_validate_attestation_occurrence", ) as post, mock.patch.object( + transports.ValidationHelperV1RestInterceptor, + "post_validate_attestation_occurrence_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ValidationHelperV1RestInterceptor, "pre_validate_attestation_occurrence", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ValidateAttestationOccurrenceRequest.pb( service.ValidateAttestationOccurrenceRequest() ) @@ -1815,6 +1869,10 @@ def test_validate_attestation_occurrence_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ValidateAttestationOccurrenceResponse() + post_with_metadata.return_value = ( + service.ValidateAttestationOccurrenceResponse(), + metadata, + ) client.validate_attestation_occurrence( request, @@ -1826,6 +1884,7 @@ def test_validate_attestation_occurrence_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py index caacaa039933..36e754f926c4 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_binauthz_management_service_v1_beta1.py @@ -61,6 +61,13 @@ ) from google.cloud.binaryauthorization_v1beta1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -355,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BinauthzManagementServiceV1Beta1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BinauthzManagementServiceV1Beta1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5512,10 +5562,14 @@ def test_get_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "post_get_policy" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1Beta1RestInterceptor, + "post_get_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "pre_get_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetPolicyRequest.pb(service.GetPolicyRequest()) transcode.return_value = { "method": "post", @@ -5537,6 +5591,7 @@ def test_get_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Policy() + post_with_metadata.return_value = resources.Policy(), metadata client.get_policy( request, @@ -5548,6 +5603,7 @@ def test_get_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_policy_rest_bad_request(request_type=service.UpdatePolicyRequest): @@ -5723,10 +5779,14 @@ def test_update_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "post_update_policy" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1Beta1RestInterceptor, + "post_update_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "pre_update_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdatePolicyRequest.pb(service.UpdatePolicyRequest()) transcode.return_value = { "method": "post", @@ -5748,6 +5808,7 @@ def test_update_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Policy() + post_with_metadata.return_value = resources.Policy(), metadata client.update_policy( request, @@ -5759,6 +5820,7 @@ def test_update_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_attestor_rest_bad_request(request_type=service.CreateAttestorRequest): @@ -5931,11 +5993,15 @@ def test_create_attestor_rest_interceptors(null_interceptor): transports.BinauthzManagementServiceV1Beta1RestInterceptor, "post_create_attestor", ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1Beta1RestInterceptor, + "post_create_attestor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "pre_create_attestor", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateAttestorRequest.pb(service.CreateAttestorRequest()) transcode.return_value = { "method": "post", @@ -5957,6 +6023,7 @@ def test_create_attestor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Attestor() + post_with_metadata.return_value = resources.Attestor(), metadata client.create_attestor( request, @@ -5968,6 +6035,7 @@ def test_create_attestor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attestor_rest_bad_request(request_type=service.GetAttestorRequest): @@ -6052,10 +6120,14 @@ def test_get_attestor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "post_get_attestor" ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1Beta1RestInterceptor, + "post_get_attestor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "pre_get_attestor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetAttestorRequest.pb(service.GetAttestorRequest()) transcode.return_value = { "method": "post", @@ -6077,6 +6149,7 @@ def test_get_attestor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Attestor() + post_with_metadata.return_value = resources.Attestor(), metadata client.get_attestor( request, @@ -6088,6 +6161,7 @@ def test_get_attestor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_attestor_rest_bad_request(request_type=service.UpdateAttestorRequest): @@ -6260,11 +6334,15 @@ def test_update_attestor_rest_interceptors(null_interceptor): transports.BinauthzManagementServiceV1Beta1RestInterceptor, "post_update_attestor", ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1Beta1RestInterceptor, + "post_update_attestor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "pre_update_attestor", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateAttestorRequest.pb(service.UpdateAttestorRequest()) transcode.return_value = { "method": "post", @@ -6286,6 +6364,7 @@ def test_update_attestor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Attestor() + post_with_metadata.return_value = resources.Attestor(), metadata client.update_attestor( request, @@ -6297,6 +6376,7 @@ def test_update_attestor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_attestors_rest_bad_request(request_type=service.ListAttestorsRequest): @@ -6380,10 +6460,14 @@ def test_list_attestors_rest_interceptors(null_interceptor): transports.BinauthzManagementServiceV1Beta1RestInterceptor, "post_list_attestors", ) as post, mock.patch.object( + transports.BinauthzManagementServiceV1Beta1RestInterceptor, + "post_list_attestors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BinauthzManagementServiceV1Beta1RestInterceptor, "pre_list_attestors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListAttestorsRequest.pb(service.ListAttestorsRequest()) transcode.return_value = { "method": "post", @@ -6407,6 +6491,7 @@ def test_list_attestors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListAttestorsResponse() + post_with_metadata.return_value = service.ListAttestorsResponse(), metadata client.list_attestors( request, @@ -6418,6 +6503,7 @@ def test_list_attestors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_attestor_rest_bad_request(request_type=service.DeleteAttestorRequest): diff --git a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py index a087ff6756ca..da5fa9ed9973 100644 --- a/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py +++ b/packages/google-cloud-binary-authorization/tests/unit/gapic/binaryauthorization_v1beta1/test_system_policy_v1_beta1.py @@ -60,6 +60,13 @@ ) from google.cloud.binaryauthorization_v1beta1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -333,6 +340,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SystemPolicyV1Beta1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SystemPolicyV1Beta1Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1917,10 +1967,14 @@ def test_get_system_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SystemPolicyV1Beta1RestInterceptor, "post_get_system_policy" ) as post, mock.patch.object( + transports.SystemPolicyV1Beta1RestInterceptor, + "post_get_system_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SystemPolicyV1Beta1RestInterceptor, "pre_get_system_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSystemPolicyRequest.pb(service.GetSystemPolicyRequest()) transcode.return_value = { "method": "post", @@ -1942,6 +1996,7 @@ def test_get_system_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Policy() + post_with_metadata.return_value = resources.Policy(), metadata client.get_system_policy( request, @@ -1953,6 +2008,7 @@ def test_get_system_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-build/CHANGELOG.md b/packages/google-cloud-build/CHANGELOG.md index d8546e89e8a9..2388460d8ef3 100644 --- a/packages/google-cloud-build/CHANGELOG.md +++ b/packages/google-cloud-build/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.30.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.29.0...google-cloud-build-v3.30.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + +## [3.29.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.28.0...google-cloud-build-v3.29.0) (2025-01-13) + + +### Features + +* [google-cloud-build] Add GoModule to Artifact and Results messages and new GO_MODULE_H1 hash type ([#13416](https://github.com/googleapis/google-cloud-python/issues/13416)) ([8a3a6d6](https://github.com/googleapis/google-cloud-python/commit/8a3a6d61b63ce4321e8c9e94511010a7245e3d40)) +* [google-cloud-build] Add option to enable structured logging ([#13430](https://github.com/googleapis/google-cloud-python/issues/13430)) ([dc7d7f2](https://github.com/googleapis/google-cloud-python/commit/dc7d7f21bad125dd98967a3e840e91c34d6a8a10)) + ## [3.28.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-build-v3.27.1...google-cloud-build-v3.28.0) (2024-12-12) diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py index 8dc4cefb7d4c..ec010bed1b50 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py @@ -85,6 +85,7 @@ UpdateBuildTriggerRequest, UpdateWorkerPoolOperationMetadata, UpdateWorkerPoolRequest, + UploadedGoModule, UploadedMavenArtifact, UploadedNpmPackage, UploadedPythonPackage, @@ -156,6 +157,7 @@ "UpdateBuildTriggerRequest", "UpdateWorkerPoolOperationMetadata", "UpdateWorkerPoolRequest", + "UploadedGoModule", "UploadedMavenArtifact", "UploadedNpmPackage", "UploadedPythonPackage", diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py index 32cacc95a964..b154d635e33c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.30.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py index af6a17f24651..ba562f8613dd 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -80,6 +80,7 @@ UpdateBuildTriggerRequest, UpdateWorkerPoolOperationMetadata, UpdateWorkerPoolRequest, + UploadedGoModule, UploadedMavenArtifact, UploadedNpmPackage, UploadedPythonPackage, @@ -151,6 +152,7 @@ "UpdateBuildTriggerRequest", "UpdateWorkerPoolOperationMetadata", "UpdateWorkerPoolRequest", + "UploadedGoModule", "UploadedMavenArtifact", "UploadedNpmPackage", "UploadedPythonPackage", diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py index 32cacc95a964..b154d635e33c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.30.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 348efb88cb69..ce824b0d6df8 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -719,6 +721,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py index f3b4ff8c54ec..1993e6e0b275 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py @@ -233,12 +233,35 @@ def post_approve_build( ) -> operations_pb2.Operation: """Post-rpc interceptor for approve_build - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_approve_build_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_approve_build` interceptor runs + before the `post_approve_build_with_metadata` interceptor. """ return response + def post_approve_build_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for approve_build + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_approve_build_with_metadata` + interceptor in new development instead of the `post_approve_build` interceptor. + When both interceptors are used, this `post_approve_build_with_metadata` interceptor runs after the + `post_approve_build` interceptor. The (possibly modified) response returned by + `post_approve_build` will be passed to + `post_approve_build_with_metadata`. + """ + return response, metadata + def pre_cancel_build( self, request: cloudbuild.CancelBuildRequest, @@ -254,12 +277,35 @@ def pre_cancel_build( def post_cancel_build(self, response: cloudbuild.Build) -> cloudbuild.Build: """Post-rpc interceptor for cancel_build - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_build_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_cancel_build` interceptor runs + before the `post_cancel_build_with_metadata` interceptor. """ return response + def post_cancel_build_with_metadata( + self, + response: cloudbuild.Build, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.Build, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel_build + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_cancel_build_with_metadata` + interceptor in new development instead of the `post_cancel_build` interceptor. + When both interceptors are used, this `post_cancel_build_with_metadata` interceptor runs after the + `post_cancel_build` interceptor. The (possibly modified) response returned by + `post_cancel_build` will be passed to + `post_cancel_build_with_metadata`. + """ + return response, metadata + def pre_create_build( self, request: cloudbuild.CreateBuildRequest, @@ -277,12 +323,35 @@ def post_create_build( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_build - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_build_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_create_build` interceptor runs + before the `post_create_build_with_metadata` interceptor. """ return response + def post_create_build_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_build + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_create_build_with_metadata` + interceptor in new development instead of the `post_create_build` interceptor. + When both interceptors are used, this `post_create_build_with_metadata` interceptor runs after the + `post_create_build` interceptor. The (possibly modified) response returned by + `post_create_build` will be passed to + `post_create_build_with_metadata`. + """ + return response, metadata + def pre_create_build_trigger( self, request: cloudbuild.CreateBuildTriggerRequest, @@ -302,12 +371,35 @@ def post_create_build_trigger( ) -> cloudbuild.BuildTrigger: """Post-rpc interceptor for create_build_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_build_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_create_build_trigger` interceptor runs + before the `post_create_build_trigger_with_metadata` interceptor. """ return response + def post_create_build_trigger_with_metadata( + self, + response: cloudbuild.BuildTrigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.BuildTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_build_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_create_build_trigger_with_metadata` + interceptor in new development instead of the `post_create_build_trigger` interceptor. + When both interceptors are used, this `post_create_build_trigger_with_metadata` interceptor runs after the + `post_create_build_trigger` interceptor. The (possibly modified) response returned by + `post_create_build_trigger` will be passed to + `post_create_build_trigger_with_metadata`. + """ + return response, metadata + def pre_create_worker_pool( self, request: cloudbuild.CreateWorkerPoolRequest, @@ -327,12 +419,35 @@ def post_create_worker_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_worker_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_worker_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_create_worker_pool` interceptor runs + before the `post_create_worker_pool_with_metadata` interceptor. """ return response + def post_create_worker_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_worker_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_create_worker_pool_with_metadata` + interceptor in new development instead of the `post_create_worker_pool` interceptor. + When both interceptors are used, this `post_create_worker_pool_with_metadata` interceptor runs after the + `post_create_worker_pool` interceptor. The (possibly modified) response returned by + `post_create_worker_pool` will be passed to + `post_create_worker_pool_with_metadata`. + """ + return response, metadata + def pre_delete_build_trigger( self, request: cloudbuild.DeleteBuildTriggerRequest, @@ -366,12 +481,35 @@ def post_delete_worker_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_worker_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_worker_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_delete_worker_pool` interceptor runs + before the `post_delete_worker_pool_with_metadata` interceptor. """ return response + def post_delete_worker_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_worker_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_delete_worker_pool_with_metadata` + interceptor in new development instead of the `post_delete_worker_pool` interceptor. + When both interceptors are used, this `post_delete_worker_pool_with_metadata` interceptor runs after the + `post_delete_worker_pool` interceptor. The (possibly modified) response returned by + `post_delete_worker_pool` will be passed to + `post_delete_worker_pool_with_metadata`. + """ + return response, metadata + def pre_get_build( self, request: cloudbuild.GetBuildRequest, @@ -387,12 +525,35 @@ def pre_get_build( def post_get_build(self, response: cloudbuild.Build) -> cloudbuild.Build: """Post-rpc interceptor for get_build - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_build_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_get_build` interceptor runs + before the `post_get_build_with_metadata` interceptor. """ return response + def post_get_build_with_metadata( + self, + response: cloudbuild.Build, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.Build, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_build + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_get_build_with_metadata` + interceptor in new development instead of the `post_get_build` interceptor. + When both interceptors are used, this `post_get_build_with_metadata` interceptor runs after the + `post_get_build` interceptor. The (possibly modified) response returned by + `post_get_build` will be passed to + `post_get_build_with_metadata`. + """ + return response, metadata + def pre_get_build_trigger( self, request: cloudbuild.GetBuildTriggerRequest, @@ -412,12 +573,35 @@ def post_get_build_trigger( ) -> cloudbuild.BuildTrigger: """Post-rpc interceptor for get_build_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_build_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_get_build_trigger` interceptor runs + before the `post_get_build_trigger_with_metadata` interceptor. """ return response + def post_get_build_trigger_with_metadata( + self, + response: cloudbuild.BuildTrigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.BuildTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_build_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_get_build_trigger_with_metadata` + interceptor in new development instead of the `post_get_build_trigger` interceptor. + When both interceptors are used, this `post_get_build_trigger_with_metadata` interceptor runs after the + `post_get_build_trigger` interceptor. The (possibly modified) response returned by + `post_get_build_trigger` will be passed to + `post_get_build_trigger_with_metadata`. + """ + return response, metadata + def pre_get_worker_pool( self, request: cloudbuild.GetWorkerPoolRequest, @@ -437,12 +621,35 @@ def post_get_worker_pool( ) -> cloudbuild.WorkerPool: """Post-rpc interceptor for get_worker_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_worker_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_get_worker_pool` interceptor runs + before the `post_get_worker_pool_with_metadata` interceptor. """ return response + def post_get_worker_pool_with_metadata( + self, + response: cloudbuild.WorkerPool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.WorkerPool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_worker_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_get_worker_pool_with_metadata` + interceptor in new development instead of the `post_get_worker_pool` interceptor. + When both interceptors are used, this `post_get_worker_pool_with_metadata` interceptor runs after the + `post_get_worker_pool` interceptor. The (possibly modified) response returned by + `post_get_worker_pool` will be passed to + `post_get_worker_pool_with_metadata`. + """ + return response, metadata + def pre_list_builds( self, request: cloudbuild.ListBuildsRequest, @@ -460,12 +667,35 @@ def post_list_builds( ) -> cloudbuild.ListBuildsResponse: """Post-rpc interceptor for list_builds - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_builds_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_list_builds` interceptor runs + before the `post_list_builds_with_metadata` interceptor. """ return response + def post_list_builds_with_metadata( + self, + response: cloudbuild.ListBuildsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.ListBuildsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_builds + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_list_builds_with_metadata` + interceptor in new development instead of the `post_list_builds` interceptor. + When both interceptors are used, this `post_list_builds_with_metadata` interceptor runs after the + `post_list_builds` interceptor. The (possibly modified) response returned by + `post_list_builds` will be passed to + `post_list_builds_with_metadata`. + """ + return response, metadata + def pre_list_build_triggers( self, request: cloudbuild.ListBuildTriggersRequest, @@ -485,12 +715,37 @@ def post_list_build_triggers( ) -> cloudbuild.ListBuildTriggersResponse: """Post-rpc interceptor for list_build_triggers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_build_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_list_build_triggers` interceptor runs + before the `post_list_build_triggers_with_metadata` interceptor. """ return response + def post_list_build_triggers_with_metadata( + self, + response: cloudbuild.ListBuildTriggersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloudbuild.ListBuildTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_build_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_list_build_triggers_with_metadata` + interceptor in new development instead of the `post_list_build_triggers` interceptor. + When both interceptors are used, this `post_list_build_triggers_with_metadata` interceptor runs after the + `post_list_build_triggers` interceptor. The (possibly modified) response returned by + `post_list_build_triggers` will be passed to + `post_list_build_triggers_with_metadata`. + """ + return response, metadata + def pre_list_worker_pools( self, request: cloudbuild.ListWorkerPoolsRequest, @@ -510,12 +765,37 @@ def post_list_worker_pools( ) -> cloudbuild.ListWorkerPoolsResponse: """Post-rpc interceptor for list_worker_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_worker_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_list_worker_pools` interceptor runs + before the `post_list_worker_pools_with_metadata` interceptor. """ return response + def post_list_worker_pools_with_metadata( + self, + response: cloudbuild.ListWorkerPoolsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloudbuild.ListWorkerPoolsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_worker_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_list_worker_pools_with_metadata` + interceptor in new development instead of the `post_list_worker_pools` interceptor. + When both interceptors are used, this `post_list_worker_pools_with_metadata` interceptor runs after the + `post_list_worker_pools` interceptor. The (possibly modified) response returned by + `post_list_worker_pools` will be passed to + `post_list_worker_pools_with_metadata`. + """ + return response, metadata + def pre_receive_trigger_webhook( self, request: cloudbuild.ReceiveTriggerWebhookRequest, @@ -535,12 +815,38 @@ def post_receive_trigger_webhook( ) -> cloudbuild.ReceiveTriggerWebhookResponse: """Post-rpc interceptor for receive_trigger_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_receive_trigger_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_receive_trigger_webhook` interceptor runs + before the `post_receive_trigger_webhook_with_metadata` interceptor. """ return response + def post_receive_trigger_webhook_with_metadata( + self, + response: cloudbuild.ReceiveTriggerWebhookResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloudbuild.ReceiveTriggerWebhookResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for receive_trigger_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_receive_trigger_webhook_with_metadata` + interceptor in new development instead of the `post_receive_trigger_webhook` interceptor. + When both interceptors are used, this `post_receive_trigger_webhook_with_metadata` interceptor runs after the + `post_receive_trigger_webhook` interceptor. The (possibly modified) response returned by + `post_receive_trigger_webhook` will be passed to + `post_receive_trigger_webhook_with_metadata`. + """ + return response, metadata + def pre_retry_build( self, request: cloudbuild.RetryBuildRequest, @@ -558,12 +864,35 @@ def post_retry_build( ) -> operations_pb2.Operation: """Post-rpc interceptor for retry_build - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retry_build_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_retry_build` interceptor runs + before the `post_retry_build_with_metadata` interceptor. """ return response + def post_retry_build_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for retry_build + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_retry_build_with_metadata` + interceptor in new development instead of the `post_retry_build` interceptor. + When both interceptors are used, this `post_retry_build_with_metadata` interceptor runs after the + `post_retry_build` interceptor. The (possibly modified) response returned by + `post_retry_build` will be passed to + `post_retry_build_with_metadata`. + """ + return response, metadata + def pre_run_build_trigger( self, request: cloudbuild.RunBuildTriggerRequest, @@ -583,12 +912,35 @@ def post_run_build_trigger( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_build_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_build_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_run_build_trigger` interceptor runs + before the `post_run_build_trigger_with_metadata` interceptor. """ return response + def post_run_build_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_build_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_run_build_trigger_with_metadata` + interceptor in new development instead of the `post_run_build_trigger` interceptor. + When both interceptors are used, this `post_run_build_trigger_with_metadata` interceptor runs after the + `post_run_build_trigger` interceptor. The (possibly modified) response returned by + `post_run_build_trigger` will be passed to + `post_run_build_trigger_with_metadata`. + """ + return response, metadata + def pre_update_build_trigger( self, request: cloudbuild.UpdateBuildTriggerRequest, @@ -608,12 +960,35 @@ def post_update_build_trigger( ) -> cloudbuild.BuildTrigger: """Post-rpc interceptor for update_build_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_build_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_update_build_trigger` interceptor runs + before the `post_update_build_trigger_with_metadata` interceptor. """ return response + def post_update_build_trigger_with_metadata( + self, + response: cloudbuild.BuildTrigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloudbuild.BuildTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_build_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_update_build_trigger_with_metadata` + interceptor in new development instead of the `post_update_build_trigger` interceptor. + When both interceptors are used, this `post_update_build_trigger_with_metadata` interceptor runs after the + `post_update_build_trigger` interceptor. The (possibly modified) response returned by + `post_update_build_trigger` will be passed to + `post_update_build_trigger_with_metadata`. + """ + return response, metadata + def pre_update_worker_pool( self, request: cloudbuild.UpdateWorkerPoolRequest, @@ -633,12 +1008,35 @@ def post_update_worker_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_worker_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_worker_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudBuild server but before - it is returned to user code. + it is returned to user code. This `post_update_worker_pool` interceptor runs + before the `post_update_worker_pool_with_metadata` interceptor. """ return response + def post_update_worker_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_worker_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_update_worker_pool_with_metadata` + interceptor in new development instead of the `post_update_worker_pool` interceptor. + When both interceptors are used, this `post_update_worker_pool_with_metadata` interceptor runs after the + `post_update_worker_pool` interceptor. The (possibly modified) response returned by + `post_update_worker_pool` will be passed to + `post_update_worker_pool_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudBuildRestStub: @@ -915,6 +1313,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_approve_build(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_approve_build_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1089,6 +1491,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel_build(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_build_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1240,6 +1646,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_build(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_build_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1391,6 +1801,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_build_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_build_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1540,6 +1954,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_worker_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_worker_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1791,6 +2209,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_worker_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_worker_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1957,6 +2379,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_build(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_build_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2102,6 +2528,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_build_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_build_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2261,6 +2691,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_worker_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_worker_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2403,6 +2837,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_builds(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_builds_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2545,6 +2983,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_build_triggers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_build_triggers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2689,6 +3131,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_worker_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_worker_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2843,6 +3289,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_receive_trigger_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_receive_trigger_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2994,6 +3444,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_retry_build(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retry_build_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3144,6 +3598,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_build_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_build_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3295,6 +3753,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_build_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_build_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3444,6 +3906,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_worker_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_worker_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py index ae459a63007c..bbe931345c88 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py @@ -74,6 +74,7 @@ UpdateBuildTriggerRequest, UpdateWorkerPoolOperationMetadata, UpdateWorkerPoolRequest, + UploadedGoModule, UploadedMavenArtifact, UploadedNpmPackage, UploadedPythonPackage, @@ -143,6 +144,7 @@ "UpdateBuildTriggerRequest", "UpdateWorkerPoolOperationMetadata", "UpdateWorkerPoolRequest", + "UploadedGoModule", "UploadedMavenArtifact", "UploadedNpmPackage", "UploadedPythonPackage", diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index f2c8afd90148..11d51f57d8ab 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -36,6 +36,7 @@ "BuiltImage", "UploadedPythonPackage", "UploadedMavenArtifact", + "UploadedGoModule", "UploadedNpmPackage", "BuildStep", "Volume", @@ -530,6 +531,37 @@ class UploadedMavenArtifact(proto.Message): ) +class UploadedGoModule(proto.Message): + r"""A Go module artifact uploaded to Artifact Registry using the + GoModule directive. + + Attributes: + uri (str): + URI of the uploaded artifact. + file_hashes (google.cloud.devtools.cloudbuild_v1.types.FileHashes): + Hash types and values of the Go Module + Artifact. + push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): + Output only. Stores timing information for + pushing the specified artifact. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + file_hashes: "FileHashes" = proto.Field( + proto.MESSAGE, + number=2, + message="FileHashes", + ) + push_timing: "TimeSpan" = proto.Field( + proto.MESSAGE, + number=3, + message="TimeSpan", + ) + + class UploadedNpmPackage(proto.Message): r"""An npm package uploaded to Artifact Registry using the NpmPackage directive. @@ -835,6 +867,9 @@ class Results(proto.Message): maven_artifacts (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedMavenArtifact]): Maven artifacts uploaded to Artifact Registry at the end of the build. + go_modules (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedGoModule]): + Optional. Go module artifacts uploaded to + Artifact Registry at the end of the build. npm_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.UploadedNpmPackage]): Npm packages uploaded to Artifact Registry at the end of the build. @@ -876,6 +911,11 @@ class Results(proto.Message): number=9, message="UploadedMavenArtifact", ) + go_modules: MutableSequence["UploadedGoModule"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="UploadedGoModule", + ) npm_packages: MutableSequence["UploadedNpmPackage"] = proto.RepeatedField( proto.MESSAGE, number=12, @@ -1368,6 +1408,13 @@ class Artifacts(proto.Message): If any artifacts fail to be pushed, the build is marked FAILURE. + go_modules (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.GoModule]): + Optional. A list of Go modules to be uploaded + to Artifact Registry upon successful completion + of all build steps. + + If any objects fail to be pushed, the build is + marked FAILURE. python_packages (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.Artifacts.PythonPackage]): A list of Python packages to be uploaded to Artifact Registry upon successful completion of @@ -1478,6 +1525,68 @@ class MavenArtifact(proto.Message): number=5, ) + class GoModule(proto.Message): + r"""Go module to upload to Artifact Registry upon successful + completion of all build steps. A module refers to all + dependencies in a go.mod file. + + Attributes: + repository_name (str): + Optional. Artifact Registry repository name. + + Specified Go modules will be zipped and uploaded + to Artifact Registry with this location as a + prefix. + e.g. my-go-repo + repository_location (str): + Optional. Location of the Artifact Registry + repository. i.e. us-east1 Defaults to the + build’s location. + repository_project_id (str): + Optional. Project ID of the Artifact Registry + repository. Defaults to the build project. + source_path (str): + Optional. Source path of the go.mod file in + the build's workspace. If not specified, this + will default to the current directory. e.g. + ~/code/go/mypackage + module_path (str): + Optional. The Go module's "module path". + e.g. example.com/foo/v2 + module_version (str): + Optional. The Go module's semantic version in + the form vX.Y.Z. e.g. v0.1.1 Pre-release + identifiers can also be added by appending a + dash and dot separated ASCII alphanumeric + characters and hyphens. e.g. + v0.2.3-alpha.x.12m.5 + """ + + repository_name: str = proto.Field( + proto.STRING, + number=1, + ) + repository_location: str = proto.Field( + proto.STRING, + number=2, + ) + repository_project_id: str = proto.Field( + proto.STRING, + number=3, + ) + source_path: str = proto.Field( + proto.STRING, + number=4, + ) + module_path: str = proto.Field( + proto.STRING, + number=5, + ) + module_version: str = proto.Field( + proto.STRING, + number=6, + ) + class PythonPackage(proto.Message): r"""Python package to upload to Artifact Registry upon successful completion of all build steps. A package can encapsulate @@ -1546,6 +1655,11 @@ class NpmPackage(proto.Message): number=3, message=MavenArtifact, ) + go_modules: MutableSequence[GoModule] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=GoModule, + ) python_packages: MutableSequence[PythonPackage] = proto.RepeatedField( proto.MESSAGE, number=5, @@ -1685,12 +1799,16 @@ class HashType(proto.Enum): Use a sha256 hash. MD5 (2): Use a md5 hash. + GO_MODULE_H1 (3): + Dirhash of a Go module's source code which is + then hex-encoded. SHA512 (4): Use a sha512 hash. """ NONE = 0 SHA256 = 1 MD5 = 2 + GO_MODULE_H1 = 3 SHA512 = 4 type_: HashType = proto.Field( @@ -3160,6 +3278,11 @@ class BuildOptions(proto.Message): default_logs_bucket_behavior (google.cloud.devtools.cloudbuild_v1.types.BuildOptions.DefaultLogsBucketBehavior): Optional. Option to specify how default logs buckets are setup. + enable_structured_logging (bool): + Optional. Option to specify whether + structured logging is enabled. + If true, JSON-formatted logs are parsed as + structured logs. """ class VerifyOption(proto.Enum): @@ -3388,6 +3511,10 @@ class PoolOption(proto.Message): number=21, enum=DefaultLogsBucketBehavior, ) + enable_structured_logging: bool = proto.Field( + proto.BOOL, + number=23, + ) class ReceiveTriggerWebhookRequest(proto.Message): diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py index 32cacc95a964..b154d635e33c 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.28.0" # {x-release-please-version} +__version__ = "3.30.0" # {x-release-please-version} diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py index af342d03425e..e6a62178a8be 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -566,6 +568,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2514,16 +2543,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2690,16 +2723,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2812,16 +2849,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2872,16 +2913,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py index 423e06e151e9..919c70ccefda 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v2/services/repository_manager/transports/rest.py @@ -210,12 +210,35 @@ def post_batch_create_repositories( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_repositories` interceptor runs + before the `post_batch_create_repositories_with_metadata` interceptor. """ return response + def post_batch_create_repositories_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_batch_create_repositories_with_metadata` + interceptor in new development instead of the `post_batch_create_repositories` interceptor. + When both interceptors are used, this `post_batch_create_repositories_with_metadata` interceptor runs after the + `post_batch_create_repositories` interceptor. The (possibly modified) response returned by + `post_batch_create_repositories` will be passed to + `post_batch_create_repositories_with_metadata`. + """ + return response, metadata + def pre_create_connection( self, request: repositories.CreateConnectionRequest, @@ -235,12 +258,35 @@ def post_create_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_create_connection` interceptor runs + before the `post_create_connection_with_metadata` interceptor. """ return response + def post_create_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_create_connection_with_metadata` + interceptor in new development instead of the `post_create_connection` interceptor. + When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the + `post_create_connection` interceptor. The (possibly modified) response returned by + `post_create_connection` will be passed to + `post_create_connection_with_metadata`. + """ + return response, metadata + def pre_create_repository( self, request: repositories.CreateRepositoryRequest, @@ -260,12 +306,35 @@ def post_create_repository( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_create_repository` interceptor runs + before the `post_create_repository_with_metadata` interceptor. """ return response + def post_create_repository_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_create_repository_with_metadata` + interceptor in new development instead of the `post_create_repository` interceptor. + When both interceptors are used, this `post_create_repository_with_metadata` interceptor runs after the + `post_create_repository` interceptor. The (possibly modified) response returned by + `post_create_repository` will be passed to + `post_create_repository_with_metadata`. + """ + return response, metadata + def pre_delete_connection( self, request: repositories.DeleteConnectionRequest, @@ -285,12 +354,35 @@ def post_delete_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_connection` interceptor runs + before the `post_delete_connection_with_metadata` interceptor. """ return response + def post_delete_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_delete_connection_with_metadata` + interceptor in new development instead of the `post_delete_connection` interceptor. + When both interceptors are used, this `post_delete_connection_with_metadata` interceptor runs after the + `post_delete_connection` interceptor. The (possibly modified) response returned by + `post_delete_connection` will be passed to + `post_delete_connection_with_metadata`. + """ + return response, metadata + def pre_delete_repository( self, request: repositories.DeleteRepositoryRequest, @@ -310,12 +402,35 @@ def post_delete_repository( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_repository` interceptor runs + before the `post_delete_repository_with_metadata` interceptor. """ return response + def post_delete_repository_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_delete_repository_with_metadata` + interceptor in new development instead of the `post_delete_repository` interceptor. + When both interceptors are used, this `post_delete_repository_with_metadata` interceptor runs after the + `post_delete_repository` interceptor. The (possibly modified) response returned by + `post_delete_repository` will be passed to + `post_delete_repository_with_metadata`. + """ + return response, metadata + def pre_fetch_git_refs( self, request: repositories.FetchGitRefsRequest, @@ -335,12 +450,37 @@ def post_fetch_git_refs( ) -> repositories.FetchGitRefsResponse: """Post-rpc interceptor for fetch_git_refs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_git_refs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_fetch_git_refs` interceptor runs + before the `post_fetch_git_refs_with_metadata` interceptor. """ return response + def post_fetch_git_refs_with_metadata( + self, + response: repositories.FetchGitRefsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repositories.FetchGitRefsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_git_refs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_fetch_git_refs_with_metadata` + interceptor in new development instead of the `post_fetch_git_refs` interceptor. + When both interceptors are used, this `post_fetch_git_refs_with_metadata` interceptor runs after the + `post_fetch_git_refs` interceptor. The (possibly modified) response returned by + `post_fetch_git_refs` will be passed to + `post_fetch_git_refs_with_metadata`. + """ + return response, metadata + def pre_fetch_linkable_repositories( self, request: repositories.FetchLinkableRepositoriesRequest, @@ -361,12 +501,38 @@ def post_fetch_linkable_repositories( ) -> repositories.FetchLinkableRepositoriesResponse: """Post-rpc interceptor for fetch_linkable_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_linkable_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_fetch_linkable_repositories` interceptor runs + before the `post_fetch_linkable_repositories_with_metadata` interceptor. """ return response + def post_fetch_linkable_repositories_with_metadata( + self, + response: repositories.FetchLinkableRepositoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repositories.FetchLinkableRepositoriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_linkable_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_fetch_linkable_repositories_with_metadata` + interceptor in new development instead of the `post_fetch_linkable_repositories` interceptor. + When both interceptors are used, this `post_fetch_linkable_repositories_with_metadata` interceptor runs after the + `post_fetch_linkable_repositories` interceptor. The (possibly modified) response returned by + `post_fetch_linkable_repositories` will be passed to + `post_fetch_linkable_repositories_with_metadata`. + """ + return response, metadata + def pre_fetch_read_token( self, request: repositories.FetchReadTokenRequest, @@ -386,12 +552,37 @@ def post_fetch_read_token( ) -> repositories.FetchReadTokenResponse: """Post-rpc interceptor for fetch_read_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_read_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_fetch_read_token` interceptor runs + before the `post_fetch_read_token_with_metadata` interceptor. """ return response + def post_fetch_read_token_with_metadata( + self, + response: repositories.FetchReadTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repositories.FetchReadTokenResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_read_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_fetch_read_token_with_metadata` + interceptor in new development instead of the `post_fetch_read_token` interceptor. + When both interceptors are used, this `post_fetch_read_token_with_metadata` interceptor runs after the + `post_fetch_read_token` interceptor. The (possibly modified) response returned by + `post_fetch_read_token` will be passed to + `post_fetch_read_token_with_metadata`. + """ + return response, metadata + def pre_fetch_read_write_token( self, request: repositories.FetchReadWriteTokenRequest, @@ -411,12 +602,38 @@ def post_fetch_read_write_token( ) -> repositories.FetchReadWriteTokenResponse: """Post-rpc interceptor for fetch_read_write_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_read_write_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_fetch_read_write_token` interceptor runs + before the `post_fetch_read_write_token_with_metadata` interceptor. """ return response + def post_fetch_read_write_token_with_metadata( + self, + response: repositories.FetchReadWriteTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repositories.FetchReadWriteTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_read_write_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_fetch_read_write_token_with_metadata` + interceptor in new development instead of the `post_fetch_read_write_token` interceptor. + When both interceptors are used, this `post_fetch_read_write_token_with_metadata` interceptor runs after the + `post_fetch_read_write_token` interceptor. The (possibly modified) response returned by + `post_fetch_read_write_token` will be passed to + `post_fetch_read_write_token_with_metadata`. + """ + return response, metadata + def pre_get_connection( self, request: repositories.GetConnectionRequest, @@ -436,12 +653,35 @@ def post_get_connection( ) -> repositories.Connection: """Post-rpc interceptor for get_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_get_connection` interceptor runs + before the `post_get_connection_with_metadata` interceptor. """ return response + def post_get_connection_with_metadata( + self, + response: repositories.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[repositories.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_get_connection_with_metadata` + interceptor in new development instead of the `post_get_connection` interceptor. + When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the + `post_get_connection` interceptor. The (possibly modified) response returned by + `post_get_connection` will be passed to + `post_get_connection_with_metadata`. + """ + return response, metadata + def pre_get_repository( self, request: repositories.GetRepositoryRequest, @@ -461,12 +701,35 @@ def post_get_repository( ) -> repositories.Repository: """Post-rpc interceptor for get_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_get_repository` interceptor runs + before the `post_get_repository_with_metadata` interceptor. """ return response + def post_get_repository_with_metadata( + self, + response: repositories.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[repositories.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_get_repository_with_metadata` + interceptor in new development instead of the `post_get_repository` interceptor. + When both interceptors are used, this `post_get_repository_with_metadata` interceptor runs after the + `post_get_repository` interceptor. The (possibly modified) response returned by + `post_get_repository` will be passed to + `post_get_repository_with_metadata`. + """ + return response, metadata + def pre_list_connections( self, request: repositories.ListConnectionsRequest, @@ -486,12 +749,37 @@ def post_list_connections( ) -> repositories.ListConnectionsResponse: """Post-rpc interceptor for list_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_list_connections` interceptor runs + before the `post_list_connections_with_metadata` interceptor. """ return response + def post_list_connections_with_metadata( + self, + response: repositories.ListConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repositories.ListConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_list_connections_with_metadata` + interceptor in new development instead of the `post_list_connections` interceptor. + When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the + `post_list_connections` interceptor. The (possibly modified) response returned by + `post_list_connections` will be passed to + `post_list_connections_with_metadata`. + """ + return response, metadata + def pre_list_repositories( self, request: repositories.ListRepositoriesRequest, @@ -511,12 +799,37 @@ def post_list_repositories( ) -> repositories.ListRepositoriesResponse: """Post-rpc interceptor for list_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_list_repositories` interceptor runs + before the `post_list_repositories_with_metadata` interceptor. """ return response + def post_list_repositories_with_metadata( + self, + response: repositories.ListRepositoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + repositories.ListRepositoriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_list_repositories_with_metadata` + interceptor in new development instead of the `post_list_repositories` interceptor. + When both interceptors are used, this `post_list_repositories_with_metadata` interceptor runs after the + `post_list_repositories` interceptor. The (possibly modified) response returned by + `post_list_repositories` will be passed to + `post_list_repositories_with_metadata`. + """ + return response, metadata + def pre_update_connection( self, request: repositories.UpdateConnectionRequest, @@ -536,12 +849,35 @@ def post_update_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RepositoryManager server but before - it is returned to user code. + it is returned to user code. This `post_update_connection` interceptor runs + before the `post_update_connection_with_metadata` interceptor. """ return response + def post_update_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RepositoryManager server but before it is returned to user code. + + We recommend only using this `post_update_connection_with_metadata` + interceptor in new development instead of the `post_update_connection` interceptor. + When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the + `post_update_connection` interceptor. The (possibly modified) response returned by + `post_update_connection` will be passed to + `post_update_connection_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -921,6 +1257,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1071,6 +1411,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1221,6 +1565,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1365,6 +1713,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1509,6 +1861,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1649,6 +2005,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_git_refs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_git_refs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1800,6 +2160,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_linkable_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_linkable_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1953,6 +2317,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_read_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_read_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2109,6 +2477,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_read_write_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_read_write_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2256,6 +2628,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2400,6 +2776,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2546,6 +2926,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2694,6 +3078,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2846,6 +3234,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index 4bb604454cc6..33a3513a412e 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.28.0" + "version": "3.30.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index 972c4df91f39..f364c6f47609 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.28.0" + "version": "3.30.0" }, "snippets": [ { diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 71c4b3109603..aad2528059e7 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -76,6 +76,13 @@ ) from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +318,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudBuildClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudBuildClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12776,6 +12826,7 @@ def test_create_build_rest_call_success(request_type): "maven_artifacts": [ {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} ], + "go_modules": [{"uri": "uri_value", "file_hashes": {}, "push_timing": {}}], "npm_packages": [ {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} ], @@ -12802,6 +12853,16 @@ def test_create_build_rest_call_success(request_type): "version": "version_value", } ], + "go_modules": [ + { + "repository_name": "repository_name_value", + "repository_location": "repository_location_value", + "repository_project_id": "repository_project_id_value", + "source_path": "source_path_value", + "module_path": "module_path_value", + "module_version": "module_version_value", + } + ], "python_packages": [ { "repository": "repository_value", @@ -12836,6 +12897,7 @@ def test_create_build_rest_call_success(request_type): "secret_env": ["secret_env_value1", "secret_env_value2"], "volumes": {}, "default_logs_bucket_behavior": 1, + "enable_structured_logging": True, }, "log_url": "log_url_value", "substitutions": {}, @@ -12969,10 +13031,13 @@ def test_create_build_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_create_build" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_create_build_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_create_build" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.CreateBuildRequest.pb(cloudbuild.CreateBuildRequest()) transcode.return_value = { "method": "post", @@ -12994,6 +13059,7 @@ def test_create_build_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_build( request, @@ -13005,6 +13071,7 @@ def test_create_build_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_build_rest_bad_request(request_type=cloudbuild.GetBuildRequest): @@ -13107,10 +13174,13 @@ def test_get_build_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_get_build" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_build_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_get_build" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.GetBuildRequest.pb(cloudbuild.GetBuildRequest()) transcode.return_value = { "method": "post", @@ -13132,6 +13202,7 @@ def test_get_build_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.Build() + post_with_metadata.return_value = cloudbuild.Build(), metadata client.get_build( request, @@ -13143,6 +13214,7 @@ def test_get_build_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_builds_rest_bad_request(request_type=cloudbuild.ListBuildsRequest): @@ -13225,10 +13297,13 @@ def test_list_builds_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_list_builds" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_list_builds_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_list_builds" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.ListBuildsRequest.pb(cloudbuild.ListBuildsRequest()) transcode.return_value = { "method": "post", @@ -13252,6 +13327,7 @@ def test_list_builds_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.ListBuildsResponse() + post_with_metadata.return_value = cloudbuild.ListBuildsResponse(), metadata client.list_builds( request, @@ -13263,6 +13339,7 @@ def test_list_builds_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_build_rest_bad_request(request_type=cloudbuild.CancelBuildRequest): @@ -13365,10 +13442,13 @@ def test_cancel_build_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_cancel_build" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_cancel_build_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_cancel_build" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.CancelBuildRequest.pb(cloudbuild.CancelBuildRequest()) transcode.return_value = { "method": "post", @@ -13390,6 +13470,7 @@ def test_cancel_build_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.Build() + post_with_metadata.return_value = cloudbuild.Build(), metadata client.cancel_build( request, @@ -13401,6 +13482,7 @@ def test_cancel_build_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retry_build_rest_bad_request(request_type=cloudbuild.RetryBuildRequest): @@ -13479,10 +13561,13 @@ def test_retry_build_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_retry_build" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_retry_build_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_retry_build" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.RetryBuildRequest.pb(cloudbuild.RetryBuildRequest()) transcode.return_value = { "method": "post", @@ -13504,6 +13589,7 @@ def test_retry_build_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.retry_build( request, @@ -13515,6 +13601,7 @@ def test_retry_build_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_approve_build_rest_bad_request(request_type=cloudbuild.ApproveBuildRequest): @@ -13593,10 +13680,13 @@ def test_approve_build_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_approve_build" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_approve_build_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_approve_build" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.ApproveBuildRequest.pb(cloudbuild.ApproveBuildRequest()) transcode.return_value = { "method": "post", @@ -13618,6 +13708,7 @@ def test_approve_build_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.approve_build( request, @@ -13629,6 +13720,7 @@ def test_approve_build_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_build_trigger_rest_bad_request( @@ -13786,6 +13878,9 @@ def test_create_build_trigger_rest_call_success(request_type): "maven_artifacts": [ {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} ], + "go_modules": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], "npm_packages": [ {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} ], @@ -13812,6 +13907,16 @@ def test_create_build_trigger_rest_call_success(request_type): "version": "version_value", } ], + "go_modules": [ + { + "repository_name": "repository_name_value", + "repository_location": "repository_location_value", + "repository_project_id": "repository_project_id_value", + "source_path": "source_path_value", + "module_path": "module_path_value", + "module_version": "module_version_value", + } + ], "python_packages": [ { "repository": "repository_value", @@ -13849,6 +13954,7 @@ def test_create_build_trigger_rest_call_success(request_type): "secret_env": ["secret_env_value1", "secret_env_value2"], "volumes": {}, "default_logs_bucket_behavior": 1, + "enable_structured_logging": True, }, "log_url": "log_url_value", "substitutions": {}, @@ -14035,10 +14141,13 @@ def test_create_build_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_create_build_trigger" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_create_build_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_create_build_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.CreateBuildTriggerRequest.pb( cloudbuild.CreateBuildTriggerRequest() ) @@ -14062,6 +14171,7 @@ def test_create_build_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.BuildTrigger() + post_with_metadata.return_value = cloudbuild.BuildTrigger(), metadata client.create_build_trigger( request, @@ -14073,6 +14183,7 @@ def test_create_build_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_build_trigger_rest_bad_request( @@ -14176,10 +14287,13 @@ def test_get_build_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_get_build_trigger" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_build_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_get_build_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.GetBuildTriggerRequest.pb( cloudbuild.GetBuildTriggerRequest() ) @@ -14203,6 +14317,7 @@ def test_get_build_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.BuildTrigger() + post_with_metadata.return_value = cloudbuild.BuildTrigger(), metadata client.get_build_trigger( request, @@ -14214,6 +14329,7 @@ def test_get_build_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_build_triggers_rest_bad_request( @@ -14298,10 +14414,13 @@ def test_list_build_triggers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_list_build_triggers" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_list_build_triggers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_list_build_triggers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.ListBuildTriggersRequest.pb( cloudbuild.ListBuildTriggersRequest() ) @@ -14327,6 +14446,10 @@ def test_list_build_triggers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.ListBuildTriggersResponse() + post_with_metadata.return_value = ( + cloudbuild.ListBuildTriggersResponse(), + metadata, + ) client.list_build_triggers( request, @@ -14338,6 +14461,7 @@ def test_list_build_triggers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_build_trigger_rest_bad_request( @@ -14604,6 +14728,9 @@ def test_update_build_trigger_rest_call_success(request_type): "maven_artifacts": [ {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} ], + "go_modules": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], "npm_packages": [ {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} ], @@ -14630,6 +14757,16 @@ def test_update_build_trigger_rest_call_success(request_type): "version": "version_value", } ], + "go_modules": [ + { + "repository_name": "repository_name_value", + "repository_location": "repository_location_value", + "repository_project_id": "repository_project_id_value", + "source_path": "source_path_value", + "module_path": "module_path_value", + "module_version": "module_version_value", + } + ], "python_packages": [ { "repository": "repository_value", @@ -14667,6 +14804,7 @@ def test_update_build_trigger_rest_call_success(request_type): "secret_env": ["secret_env_value1", "secret_env_value2"], "volumes": {}, "default_logs_bucket_behavior": 1, + "enable_structured_logging": True, }, "log_url": "log_url_value", "substitutions": {}, @@ -14853,10 +14991,13 @@ def test_update_build_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_update_build_trigger" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_update_build_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_update_build_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.UpdateBuildTriggerRequest.pb( cloudbuild.UpdateBuildTriggerRequest() ) @@ -14880,6 +15021,7 @@ def test_update_build_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.BuildTrigger() + post_with_metadata.return_value = cloudbuild.BuildTrigger(), metadata client.update_build_trigger( request, @@ -14891,6 +15033,7 @@ def test_update_build_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_build_trigger_rest_bad_request( @@ -15048,10 +15191,13 @@ def test_run_build_trigger_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_run_build_trigger" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_run_build_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_run_build_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.RunBuildTriggerRequest.pb( cloudbuild.RunBuildTriggerRequest() ) @@ -15075,6 +15221,7 @@ def test_run_build_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_build_trigger( request, @@ -15086,6 +15233,7 @@ def test_run_build_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_receive_trigger_webhook_rest_bad_request( @@ -15244,10 +15392,14 @@ def test_receive_trigger_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_receive_trigger_webhook" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, + "post_receive_trigger_webhook_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_receive_trigger_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.ReceiveTriggerWebhookRequest.pb( cloudbuild.ReceiveTriggerWebhookRequest() ) @@ -15273,6 +15425,10 @@ def test_receive_trigger_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.ReceiveTriggerWebhookResponse() + post_with_metadata.return_value = ( + cloudbuild.ReceiveTriggerWebhookResponse(), + metadata, + ) client.receive_trigger_webhook( request, @@ -15284,6 +15440,7 @@ def test_receive_trigger_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_worker_pool_rest_bad_request( @@ -15458,10 +15615,13 @@ def test_create_worker_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_create_worker_pool" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_create_worker_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_create_worker_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.CreateWorkerPoolRequest.pb( cloudbuild.CreateWorkerPoolRequest() ) @@ -15485,6 +15645,7 @@ def test_create_worker_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_worker_pool( request, @@ -15496,6 +15657,7 @@ def test_create_worker_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_worker_pool_rest_bad_request(request_type=cloudbuild.GetWorkerPoolRequest): @@ -15586,10 +15748,13 @@ def test_get_worker_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_get_worker_pool" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_worker_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_get_worker_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.GetWorkerPoolRequest.pb( cloudbuild.GetWorkerPoolRequest() ) @@ -15613,6 +15778,7 @@ def test_get_worker_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.WorkerPool() + post_with_metadata.return_value = cloudbuild.WorkerPool(), metadata client.get_worker_pool( request, @@ -15624,6 +15790,7 @@ def test_get_worker_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_worker_pool_rest_bad_request( @@ -15704,10 +15871,13 @@ def test_delete_worker_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_delete_worker_pool" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_delete_worker_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_delete_worker_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.DeleteWorkerPoolRequest.pb( cloudbuild.DeleteWorkerPoolRequest() ) @@ -15731,6 +15901,7 @@ def test_delete_worker_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_worker_pool( request, @@ -15742,6 +15913,7 @@ def test_delete_worker_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_worker_pool_rest_bad_request( @@ -15924,10 +16096,13 @@ def test_update_worker_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudBuildRestInterceptor, "post_update_worker_pool" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_update_worker_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_update_worker_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.UpdateWorkerPoolRequest.pb( cloudbuild.UpdateWorkerPoolRequest() ) @@ -15951,6 +16126,7 @@ def test_update_worker_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_worker_pool( request, @@ -15962,6 +16138,7 @@ def test_update_worker_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_worker_pools_rest_bad_request( @@ -16046,10 +16223,13 @@ def test_list_worker_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudBuildRestInterceptor, "post_list_worker_pools" ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_list_worker_pools_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudBuildRestInterceptor, "pre_list_worker_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloudbuild.ListWorkerPoolsRequest.pb( cloudbuild.ListWorkerPoolsRequest() ) @@ -16075,6 +16255,7 @@ def test_list_worker_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloudbuild.ListWorkerPoolsResponse() + post_with_metadata.return_value = cloudbuild.ListWorkerPoolsResponse(), metadata client.list_worker_pools( request, @@ -16086,6 +16267,7 @@ def test_list_worker_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py index fb4f00e50466..f2f0807497c8 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v2/test_repository_manager.py @@ -77,6 +77,13 @@ ) from google.cloud.devtools.cloudbuild_v2.types import cloudbuild, repositories +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RepositoryManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RepositoryManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10312,10 +10362,14 @@ def test_create_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_create_connection" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_create_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_create_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.CreateConnectionRequest.pb( repositories.CreateConnectionRequest() ) @@ -10339,6 +10393,7 @@ def test_create_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_connection( request, @@ -10350,6 +10405,7 @@ def test_create_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_rest_bad_request( @@ -10440,10 +10496,13 @@ def test_get_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_get_connection" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, "post_get_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_get_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.GetConnectionRequest.pb( repositories.GetConnectionRequest() ) @@ -10467,6 +10526,7 @@ def test_get_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.Connection() + post_with_metadata.return_value = repositories.Connection(), metadata client.get_connection( request, @@ -10478,6 +10538,7 @@ def test_get_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_connections_rest_bad_request( @@ -10562,10 +10623,14 @@ def test_list_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_list_connections" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_list_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_list_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.ListConnectionsRequest.pb( repositories.ListConnectionsRequest() ) @@ -10591,6 +10656,10 @@ def test_list_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.ListConnectionsResponse() + post_with_metadata.return_value = ( + repositories.ListConnectionsResponse(), + metadata, + ) client.list_connections( request, @@ -10602,6 +10671,7 @@ def test_list_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_connection_rest_bad_request( @@ -10813,10 +10883,14 @@ def test_update_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_update_connection" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_update_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_update_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.UpdateConnectionRequest.pb( repositories.UpdateConnectionRequest() ) @@ -10840,6 +10914,7 @@ def test_update_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_connection( request, @@ -10851,6 +10926,7 @@ def test_update_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connection_rest_bad_request( @@ -10931,10 +11007,14 @@ def test_delete_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_delete_connection" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_delete_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_delete_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.DeleteConnectionRequest.pb( repositories.DeleteConnectionRequest() ) @@ -10958,6 +11038,7 @@ def test_delete_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_connection( request, @@ -10969,6 +11050,7 @@ def test_delete_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_repository_rest_bad_request( @@ -11125,10 +11207,14 @@ def test_create_repository_rest_interceptors(null_interceptor): ), mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_create_repository" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_create_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_create_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.CreateRepositoryRequest.pb( repositories.CreateRepositoryRequest() ) @@ -11152,6 +11238,7 @@ def test_create_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_repository( request, @@ -11163,6 +11250,7 @@ def test_create_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_repositories_rest_bad_request( @@ -11243,10 +11331,14 @@ def test_batch_create_repositories_rest_interceptors(null_interceptor): ), mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_batch_create_repositories" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_batch_create_repositories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_batch_create_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.BatchCreateRepositoriesRequest.pb( repositories.BatchCreateRepositoriesRequest() ) @@ -11270,6 +11362,7 @@ def test_batch_create_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_repositories( request, @@ -11281,6 +11374,7 @@ def test_batch_create_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_repository_rest_bad_request( @@ -11375,10 +11469,13 @@ def test_get_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_get_repository" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, "post_get_repository_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_get_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.GetRepositoryRequest.pb( repositories.GetRepositoryRequest() ) @@ -11402,6 +11499,7 @@ def test_get_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.Repository() + post_with_metadata.return_value = repositories.Repository(), metadata client.get_repository( request, @@ -11413,6 +11511,7 @@ def test_get_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_repositories_rest_bad_request( @@ -11497,10 +11596,14 @@ def test_list_repositories_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_list_repositories" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_list_repositories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_list_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.ListRepositoriesRequest.pb( repositories.ListRepositoriesRequest() ) @@ -11526,6 +11629,10 @@ def test_list_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.ListRepositoriesResponse() + post_with_metadata.return_value = ( + repositories.ListRepositoriesResponse(), + metadata, + ) client.list_repositories( request, @@ -11537,6 +11644,7 @@ def test_list_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_repository_rest_bad_request( @@ -11621,10 +11729,14 @@ def test_delete_repository_rest_interceptors(null_interceptor): ), mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_delete_repository" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_delete_repository_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_delete_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.DeleteRepositoryRequest.pb( repositories.DeleteRepositoryRequest() ) @@ -11648,6 +11760,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_repository( request, @@ -11659,6 +11772,7 @@ def test_delete_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_read_write_token_rest_bad_request( @@ -11747,10 +11861,14 @@ def test_fetch_read_write_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_fetch_read_write_token" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_fetch_read_write_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_fetch_read_write_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.FetchReadWriteTokenRequest.pb( repositories.FetchReadWriteTokenRequest() ) @@ -11776,6 +11894,10 @@ def test_fetch_read_write_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.FetchReadWriteTokenResponse() + post_with_metadata.return_value = ( + repositories.FetchReadWriteTokenResponse(), + metadata, + ) client.fetch_read_write_token( request, @@ -11787,6 +11909,7 @@ def test_fetch_read_write_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_read_token_rest_bad_request( @@ -11875,10 +11998,14 @@ def test_fetch_read_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_fetch_read_token" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_fetch_read_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_fetch_read_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.FetchReadTokenRequest.pb( repositories.FetchReadTokenRequest() ) @@ -11904,6 +12031,10 @@ def test_fetch_read_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.FetchReadTokenResponse() + post_with_metadata.return_value = ( + repositories.FetchReadTokenResponse(), + metadata, + ) client.fetch_read_token( request, @@ -11915,6 +12046,7 @@ def test_fetch_read_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_linkable_repositories_rest_bad_request( @@ -12003,10 +12135,14 @@ def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_fetch_linkable_repositories" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, + "post_fetch_linkable_repositories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_fetch_linkable_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.FetchLinkableRepositoriesRequest.pb( repositories.FetchLinkableRepositoriesRequest() ) @@ -12032,6 +12168,10 @@ def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.FetchLinkableRepositoriesResponse() + post_with_metadata.return_value = ( + repositories.FetchLinkableRepositoriesResponse(), + metadata, + ) client.fetch_linkable_repositories( request, @@ -12043,6 +12183,7 @@ def test_fetch_linkable_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_git_refs_rest_bad_request(request_type=repositories.FetchGitRefsRequest): @@ -12129,10 +12270,13 @@ def test_fetch_git_refs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RepositoryManagerRestInterceptor, "post_fetch_git_refs" ) as post, mock.patch.object( + transports.RepositoryManagerRestInterceptor, "post_fetch_git_refs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RepositoryManagerRestInterceptor, "pre_fetch_git_refs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = repositories.FetchGitRefsRequest.pb( repositories.FetchGitRefsRequest() ) @@ -12158,6 +12302,7 @@ def test_fetch_git_refs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = repositories.FetchGitRefsResponse() + post_with_metadata.return_value = repositories.FetchGitRefsResponse(), metadata client.fetch_git_refs( request, @@ -12169,6 +12314,7 @@ def test_fetch_git_refs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( diff --git a/packages/google-cloud-certificate-manager/CHANGELOG.md b/packages/google-cloud-certificate-manager/CHANGELOG.md index 484b6c3393d1..9983619883cc 100644 --- a/packages/google-cloud-certificate-manager/CHANGELOG.md +++ b/packages/google-cloud-certificate-manager/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-certificate-manager-v1.9.0...google-cloud-certificate-manager-v1.10.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-certificate-manager-v1.8.1...google-cloud-certificate-manager-v1.9.0) (2024-12-12) diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py index e894cc32e8f3..61581a9ddcaf 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -663,6 +665,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4666,16 +4695,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4721,16 +4754,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4887,16 +4924,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4942,16 +4983,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py index dd7ddc945d4c..87b4dcc376ea 100644 --- a/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py +++ b/packages/google-cloud-certificate-manager/google/cloud/certificate_manager_v1/services/certificate_manager/transports/rest.py @@ -334,12 +334,35 @@ def post_create_certificate( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_create_certificate` interceptor runs + before the `post_create_certificate_with_metadata` interceptor. """ return response + def post_create_certificate_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_create_certificate_with_metadata` + interceptor in new development instead of the `post_create_certificate` interceptor. + When both interceptors are used, this `post_create_certificate_with_metadata` interceptor runs after the + `post_create_certificate` interceptor. The (possibly modified) response returned by + `post_create_certificate` will be passed to + `post_create_certificate_with_metadata`. + """ + return response, metadata + def pre_create_certificate_issuance_config( self, request: gcc_certificate_issuance_config.CreateCertificateIssuanceConfigRequest, @@ -360,12 +383,35 @@ def post_create_certificate_issuance_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_certificate_issuance_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_certificate_issuance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_create_certificate_issuance_config` interceptor runs + before the `post_create_certificate_issuance_config_with_metadata` interceptor. """ return response + def post_create_certificate_issuance_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_certificate_issuance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_create_certificate_issuance_config_with_metadata` + interceptor in new development instead of the `post_create_certificate_issuance_config` interceptor. + When both interceptors are used, this `post_create_certificate_issuance_config_with_metadata` interceptor runs after the + `post_create_certificate_issuance_config` interceptor. The (possibly modified) response returned by + `post_create_certificate_issuance_config` will be passed to + `post_create_certificate_issuance_config_with_metadata`. + """ + return response, metadata + def pre_create_certificate_map( self, request: certificate_manager.CreateCertificateMapRequest, @@ -386,12 +432,35 @@ def post_create_certificate_map( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_certificate_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_certificate_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_create_certificate_map` interceptor runs + before the `post_create_certificate_map_with_metadata` interceptor. """ return response + def post_create_certificate_map_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_certificate_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_create_certificate_map_with_metadata` + interceptor in new development instead of the `post_create_certificate_map` interceptor. + When both interceptors are used, this `post_create_certificate_map_with_metadata` interceptor runs after the + `post_create_certificate_map` interceptor. The (possibly modified) response returned by + `post_create_certificate_map` will be passed to + `post_create_certificate_map_with_metadata`. + """ + return response, metadata + def pre_create_certificate_map_entry( self, request: certificate_manager.CreateCertificateMapEntryRequest, @@ -412,12 +481,35 @@ def post_create_certificate_map_entry( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_certificate_map_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_certificate_map_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_create_certificate_map_entry` interceptor runs + before the `post_create_certificate_map_entry_with_metadata` interceptor. """ return response + def post_create_certificate_map_entry_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_certificate_map_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_create_certificate_map_entry_with_metadata` + interceptor in new development instead of the `post_create_certificate_map_entry` interceptor. + When both interceptors are used, this `post_create_certificate_map_entry_with_metadata` interceptor runs after the + `post_create_certificate_map_entry` interceptor. The (possibly modified) response returned by + `post_create_certificate_map_entry` will be passed to + `post_create_certificate_map_entry_with_metadata`. + """ + return response, metadata + def pre_create_dns_authorization( self, request: certificate_manager.CreateDnsAuthorizationRequest, @@ -438,12 +530,35 @@ def post_create_dns_authorization( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_dns_authorization - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_dns_authorization_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_create_dns_authorization` interceptor runs + before the `post_create_dns_authorization_with_metadata` interceptor. """ return response + def post_create_dns_authorization_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_dns_authorization + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_create_dns_authorization_with_metadata` + interceptor in new development instead of the `post_create_dns_authorization` interceptor. + When both interceptors are used, this `post_create_dns_authorization_with_metadata` interceptor runs after the + `post_create_dns_authorization` interceptor. The (possibly modified) response returned by + `post_create_dns_authorization` will be passed to + `post_create_dns_authorization_with_metadata`. + """ + return response, metadata + def pre_create_trust_config( self, request: gcc_trust_config.CreateTrustConfigRequest, @@ -464,12 +579,35 @@ def post_create_trust_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_trust_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_trust_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_create_trust_config` interceptor runs + before the `post_create_trust_config_with_metadata` interceptor. """ return response + def post_create_trust_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_trust_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_create_trust_config_with_metadata` + interceptor in new development instead of the `post_create_trust_config` interceptor. + When both interceptors are used, this `post_create_trust_config_with_metadata` interceptor runs after the + `post_create_trust_config` interceptor. The (possibly modified) response returned by + `post_create_trust_config` will be passed to + `post_create_trust_config_with_metadata`. + """ + return response, metadata + def pre_delete_certificate( self, request: certificate_manager.DeleteCertificateRequest, @@ -490,12 +628,35 @@ def post_delete_certificate( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_certificate` interceptor runs + before the `post_delete_certificate_with_metadata` interceptor. """ return response + def post_delete_certificate_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_delete_certificate_with_metadata` + interceptor in new development instead of the `post_delete_certificate` interceptor. + When both interceptors are used, this `post_delete_certificate_with_metadata` interceptor runs after the + `post_delete_certificate` interceptor. The (possibly modified) response returned by + `post_delete_certificate` will be passed to + `post_delete_certificate_with_metadata`. + """ + return response, metadata + def pre_delete_certificate_issuance_config( self, request: certificate_issuance_config.DeleteCertificateIssuanceConfigRequest, @@ -516,12 +677,35 @@ def post_delete_certificate_issuance_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_certificate_issuance_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_certificate_issuance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_certificate_issuance_config` interceptor runs + before the `post_delete_certificate_issuance_config_with_metadata` interceptor. """ return response + def post_delete_certificate_issuance_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_certificate_issuance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_delete_certificate_issuance_config_with_metadata` + interceptor in new development instead of the `post_delete_certificate_issuance_config` interceptor. + When both interceptors are used, this `post_delete_certificate_issuance_config_with_metadata` interceptor runs after the + `post_delete_certificate_issuance_config` interceptor. The (possibly modified) response returned by + `post_delete_certificate_issuance_config` will be passed to + `post_delete_certificate_issuance_config_with_metadata`. + """ + return response, metadata + def pre_delete_certificate_map( self, request: certificate_manager.DeleteCertificateMapRequest, @@ -542,12 +726,35 @@ def post_delete_certificate_map( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_certificate_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_certificate_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_certificate_map` interceptor runs + before the `post_delete_certificate_map_with_metadata` interceptor. """ return response + def post_delete_certificate_map_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_certificate_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_delete_certificate_map_with_metadata` + interceptor in new development instead of the `post_delete_certificate_map` interceptor. + When both interceptors are used, this `post_delete_certificate_map_with_metadata` interceptor runs after the + `post_delete_certificate_map` interceptor. The (possibly modified) response returned by + `post_delete_certificate_map` will be passed to + `post_delete_certificate_map_with_metadata`. + """ + return response, metadata + def pre_delete_certificate_map_entry( self, request: certificate_manager.DeleteCertificateMapEntryRequest, @@ -568,12 +775,35 @@ def post_delete_certificate_map_entry( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_certificate_map_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_certificate_map_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_certificate_map_entry` interceptor runs + before the `post_delete_certificate_map_entry_with_metadata` interceptor. """ return response + def post_delete_certificate_map_entry_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_certificate_map_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_delete_certificate_map_entry_with_metadata` + interceptor in new development instead of the `post_delete_certificate_map_entry` interceptor. + When both interceptors are used, this `post_delete_certificate_map_entry_with_metadata` interceptor runs after the + `post_delete_certificate_map_entry` interceptor. The (possibly modified) response returned by + `post_delete_certificate_map_entry` will be passed to + `post_delete_certificate_map_entry_with_metadata`. + """ + return response, metadata + def pre_delete_dns_authorization( self, request: certificate_manager.DeleteDnsAuthorizationRequest, @@ -594,12 +824,35 @@ def post_delete_dns_authorization( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_dns_authorization - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_dns_authorization_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_dns_authorization` interceptor runs + before the `post_delete_dns_authorization_with_metadata` interceptor. """ return response + def post_delete_dns_authorization_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_dns_authorization + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_delete_dns_authorization_with_metadata` + interceptor in new development instead of the `post_delete_dns_authorization` interceptor. + When both interceptors are used, this `post_delete_dns_authorization_with_metadata` interceptor runs after the + `post_delete_dns_authorization` interceptor. The (possibly modified) response returned by + `post_delete_dns_authorization` will be passed to + `post_delete_dns_authorization_with_metadata`. + """ + return response, metadata + def pre_delete_trust_config( self, request: trust_config.DeleteTrustConfigRequest, @@ -619,12 +872,35 @@ def post_delete_trust_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trust_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_trust_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_trust_config` interceptor runs + before the `post_delete_trust_config_with_metadata` interceptor. """ return response + def post_delete_trust_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_trust_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_delete_trust_config_with_metadata` + interceptor in new development instead of the `post_delete_trust_config` interceptor. + When both interceptors are used, this `post_delete_trust_config_with_metadata` interceptor runs after the + `post_delete_trust_config` interceptor. The (possibly modified) response returned by + `post_delete_trust_config` will be passed to + `post_delete_trust_config_with_metadata`. + """ + return response, metadata + def pre_get_certificate( self, request: certificate_manager.GetCertificateRequest, @@ -645,12 +921,37 @@ def post_get_certificate( ) -> certificate_manager.Certificate: """Post-rpc interceptor for get_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_get_certificate` interceptor runs + before the `post_get_certificate_with_metadata` interceptor. """ return response + def post_get_certificate_with_metadata( + self, + response: certificate_manager.Certificate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.Certificate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_get_certificate_with_metadata` + interceptor in new development instead of the `post_get_certificate` interceptor. + When both interceptors are used, this `post_get_certificate_with_metadata` interceptor runs after the + `post_get_certificate` interceptor. The (possibly modified) response returned by + `post_get_certificate` will be passed to + `post_get_certificate_with_metadata`. + """ + return response, metadata + def pre_get_certificate_issuance_config( self, request: certificate_issuance_config.GetCertificateIssuanceConfigRequest, @@ -671,12 +972,38 @@ def post_get_certificate_issuance_config( ) -> certificate_issuance_config.CertificateIssuanceConfig: """Post-rpc interceptor for get_certificate_issuance_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_certificate_issuance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_get_certificate_issuance_config` interceptor runs + before the `post_get_certificate_issuance_config_with_metadata` interceptor. """ return response + def post_get_certificate_issuance_config_with_metadata( + self, + response: certificate_issuance_config.CertificateIssuanceConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_issuance_config.CertificateIssuanceConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_certificate_issuance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_get_certificate_issuance_config_with_metadata` + interceptor in new development instead of the `post_get_certificate_issuance_config` interceptor. + When both interceptors are used, this `post_get_certificate_issuance_config_with_metadata` interceptor runs after the + `post_get_certificate_issuance_config` interceptor. The (possibly modified) response returned by + `post_get_certificate_issuance_config` will be passed to + `post_get_certificate_issuance_config_with_metadata`. + """ + return response, metadata + def pre_get_certificate_map( self, request: certificate_manager.GetCertificateMapRequest, @@ -697,12 +1024,37 @@ def post_get_certificate_map( ) -> certificate_manager.CertificateMap: """Post-rpc interceptor for get_certificate_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_certificate_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_get_certificate_map` interceptor runs + before the `post_get_certificate_map_with_metadata` interceptor. """ return response + def post_get_certificate_map_with_metadata( + self, + response: certificate_manager.CertificateMap, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.CertificateMap, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_certificate_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_get_certificate_map_with_metadata` + interceptor in new development instead of the `post_get_certificate_map` interceptor. + When both interceptors are used, this `post_get_certificate_map_with_metadata` interceptor runs after the + `post_get_certificate_map` interceptor. The (possibly modified) response returned by + `post_get_certificate_map` will be passed to + `post_get_certificate_map_with_metadata`. + """ + return response, metadata + def pre_get_certificate_map_entry( self, request: certificate_manager.GetCertificateMapEntryRequest, @@ -723,12 +1075,37 @@ def post_get_certificate_map_entry( ) -> certificate_manager.CertificateMapEntry: """Post-rpc interceptor for get_certificate_map_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_certificate_map_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_get_certificate_map_entry` interceptor runs + before the `post_get_certificate_map_entry_with_metadata` interceptor. """ return response + def post_get_certificate_map_entry_with_metadata( + self, + response: certificate_manager.CertificateMapEntry, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.CertificateMapEntry, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_certificate_map_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_get_certificate_map_entry_with_metadata` + interceptor in new development instead of the `post_get_certificate_map_entry` interceptor. + When both interceptors are used, this `post_get_certificate_map_entry_with_metadata` interceptor runs after the + `post_get_certificate_map_entry` interceptor. The (possibly modified) response returned by + `post_get_certificate_map_entry` will be passed to + `post_get_certificate_map_entry_with_metadata`. + """ + return response, metadata + def pre_get_dns_authorization( self, request: certificate_manager.GetDnsAuthorizationRequest, @@ -749,12 +1126,37 @@ def post_get_dns_authorization( ) -> certificate_manager.DnsAuthorization: """Post-rpc interceptor for get_dns_authorization - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_dns_authorization_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_get_dns_authorization` interceptor runs + before the `post_get_dns_authorization_with_metadata` interceptor. """ return response + def post_get_dns_authorization_with_metadata( + self, + response: certificate_manager.DnsAuthorization, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.DnsAuthorization, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_dns_authorization + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_get_dns_authorization_with_metadata` + interceptor in new development instead of the `post_get_dns_authorization` interceptor. + When both interceptors are used, this `post_get_dns_authorization_with_metadata` interceptor runs after the + `post_get_dns_authorization` interceptor. The (possibly modified) response returned by + `post_get_dns_authorization` will be passed to + `post_get_dns_authorization_with_metadata`. + """ + return response, metadata + def pre_get_trust_config( self, request: trust_config.GetTrustConfigRequest, @@ -774,12 +1176,35 @@ def post_get_trust_config( ) -> trust_config.TrustConfig: """Post-rpc interceptor for get_trust_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_trust_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_get_trust_config` interceptor runs + before the `post_get_trust_config_with_metadata` interceptor. """ return response + def post_get_trust_config_with_metadata( + self, + response: trust_config.TrustConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[trust_config.TrustConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_trust_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_get_trust_config_with_metadata` + interceptor in new development instead of the `post_get_trust_config` interceptor. + When both interceptors are used, this `post_get_trust_config_with_metadata` interceptor runs after the + `post_get_trust_config` interceptor. The (possibly modified) response returned by + `post_get_trust_config` will be passed to + `post_get_trust_config_with_metadata`. + """ + return response, metadata + def pre_list_certificate_issuance_configs( self, request: certificate_issuance_config.ListCertificateIssuanceConfigsRequest, @@ -801,12 +1226,38 @@ def post_list_certificate_issuance_configs( ) -> certificate_issuance_config.ListCertificateIssuanceConfigsResponse: """Post-rpc interceptor for list_certificate_issuance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_certificate_issuance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_list_certificate_issuance_configs` interceptor runs + before the `post_list_certificate_issuance_configs_with_metadata` interceptor. """ return response + def post_list_certificate_issuance_configs_with_metadata( + self, + response: certificate_issuance_config.ListCertificateIssuanceConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_issuance_config.ListCertificateIssuanceConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_certificate_issuance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_list_certificate_issuance_configs_with_metadata` + interceptor in new development instead of the `post_list_certificate_issuance_configs` interceptor. + When both interceptors are used, this `post_list_certificate_issuance_configs_with_metadata` interceptor runs after the + `post_list_certificate_issuance_configs` interceptor. The (possibly modified) response returned by + `post_list_certificate_issuance_configs` will be passed to + `post_list_certificate_issuance_configs_with_metadata`. + """ + return response, metadata + def pre_list_certificate_map_entries( self, request: certificate_manager.ListCertificateMapEntriesRequest, @@ -827,12 +1278,38 @@ def post_list_certificate_map_entries( ) -> certificate_manager.ListCertificateMapEntriesResponse: """Post-rpc interceptor for list_certificate_map_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_certificate_map_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_list_certificate_map_entries` interceptor runs + before the `post_list_certificate_map_entries_with_metadata` interceptor. """ return response + def post_list_certificate_map_entries_with_metadata( + self, + response: certificate_manager.ListCertificateMapEntriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.ListCertificateMapEntriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_certificate_map_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_list_certificate_map_entries_with_metadata` + interceptor in new development instead of the `post_list_certificate_map_entries` interceptor. + When both interceptors are used, this `post_list_certificate_map_entries_with_metadata` interceptor runs after the + `post_list_certificate_map_entries` interceptor. The (possibly modified) response returned by + `post_list_certificate_map_entries` will be passed to + `post_list_certificate_map_entries_with_metadata`. + """ + return response, metadata + def pre_list_certificate_maps( self, request: certificate_manager.ListCertificateMapsRequest, @@ -853,12 +1330,38 @@ def post_list_certificate_maps( ) -> certificate_manager.ListCertificateMapsResponse: """Post-rpc interceptor for list_certificate_maps - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_certificate_maps_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_list_certificate_maps` interceptor runs + before the `post_list_certificate_maps_with_metadata` interceptor. """ return response + def post_list_certificate_maps_with_metadata( + self, + response: certificate_manager.ListCertificateMapsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.ListCertificateMapsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_certificate_maps + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_list_certificate_maps_with_metadata` + interceptor in new development instead of the `post_list_certificate_maps` interceptor. + When both interceptors are used, this `post_list_certificate_maps_with_metadata` interceptor runs after the + `post_list_certificate_maps` interceptor. The (possibly modified) response returned by + `post_list_certificate_maps` will be passed to + `post_list_certificate_maps_with_metadata`. + """ + return response, metadata + def pre_list_certificates( self, request: certificate_manager.ListCertificatesRequest, @@ -879,12 +1382,38 @@ def post_list_certificates( ) -> certificate_manager.ListCertificatesResponse: """Post-rpc interceptor for list_certificates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_certificates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_list_certificates` interceptor runs + before the `post_list_certificates_with_metadata` interceptor. """ return response + def post_list_certificates_with_metadata( + self, + response: certificate_manager.ListCertificatesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.ListCertificatesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_certificates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_list_certificates_with_metadata` + interceptor in new development instead of the `post_list_certificates` interceptor. + When both interceptors are used, this `post_list_certificates_with_metadata` interceptor runs after the + `post_list_certificates` interceptor. The (possibly modified) response returned by + `post_list_certificates` will be passed to + `post_list_certificates_with_metadata`. + """ + return response, metadata + def pre_list_dns_authorizations( self, request: certificate_manager.ListDnsAuthorizationsRequest, @@ -905,12 +1434,38 @@ def post_list_dns_authorizations( ) -> certificate_manager.ListDnsAuthorizationsResponse: """Post-rpc interceptor for list_dns_authorizations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_dns_authorizations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_list_dns_authorizations` interceptor runs + before the `post_list_dns_authorizations_with_metadata` interceptor. """ return response + def post_list_dns_authorizations_with_metadata( + self, + response: certificate_manager.ListDnsAuthorizationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + certificate_manager.ListDnsAuthorizationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_dns_authorizations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_list_dns_authorizations_with_metadata` + interceptor in new development instead of the `post_list_dns_authorizations` interceptor. + When both interceptors are used, this `post_list_dns_authorizations_with_metadata` interceptor runs after the + `post_list_dns_authorizations` interceptor. The (possibly modified) response returned by + `post_list_dns_authorizations` will be passed to + `post_list_dns_authorizations_with_metadata`. + """ + return response, metadata + def pre_list_trust_configs( self, request: trust_config.ListTrustConfigsRequest, @@ -930,12 +1485,37 @@ def post_list_trust_configs( ) -> trust_config.ListTrustConfigsResponse: """Post-rpc interceptor for list_trust_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_trust_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_list_trust_configs` interceptor runs + before the `post_list_trust_configs_with_metadata` interceptor. """ return response + def post_list_trust_configs_with_metadata( + self, + response: trust_config.ListTrustConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + trust_config.ListTrustConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_trust_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_list_trust_configs_with_metadata` + interceptor in new development instead of the `post_list_trust_configs` interceptor. + When both interceptors are used, this `post_list_trust_configs_with_metadata` interceptor runs after the + `post_list_trust_configs` interceptor. The (possibly modified) response returned by + `post_list_trust_configs` will be passed to + `post_list_trust_configs_with_metadata`. + """ + return response, metadata + def pre_update_certificate( self, request: certificate_manager.UpdateCertificateRequest, @@ -956,12 +1536,35 @@ def post_update_certificate( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_certificate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_certificate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_update_certificate` interceptor runs + before the `post_update_certificate_with_metadata` interceptor. """ return response + def post_update_certificate_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_certificate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_update_certificate_with_metadata` + interceptor in new development instead of the `post_update_certificate` interceptor. + When both interceptors are used, this `post_update_certificate_with_metadata` interceptor runs after the + `post_update_certificate` interceptor. The (possibly modified) response returned by + `post_update_certificate` will be passed to + `post_update_certificate_with_metadata`. + """ + return response, metadata + def pre_update_certificate_map( self, request: certificate_manager.UpdateCertificateMapRequest, @@ -982,12 +1585,35 @@ def post_update_certificate_map( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_certificate_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_certificate_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_update_certificate_map` interceptor runs + before the `post_update_certificate_map_with_metadata` interceptor. """ return response + def post_update_certificate_map_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_certificate_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_update_certificate_map_with_metadata` + interceptor in new development instead of the `post_update_certificate_map` interceptor. + When both interceptors are used, this `post_update_certificate_map_with_metadata` interceptor runs after the + `post_update_certificate_map` interceptor. The (possibly modified) response returned by + `post_update_certificate_map` will be passed to + `post_update_certificate_map_with_metadata`. + """ + return response, metadata + def pre_update_certificate_map_entry( self, request: certificate_manager.UpdateCertificateMapEntryRequest, @@ -1008,12 +1634,35 @@ def post_update_certificate_map_entry( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_certificate_map_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_certificate_map_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_update_certificate_map_entry` interceptor runs + before the `post_update_certificate_map_entry_with_metadata` interceptor. """ return response + def post_update_certificate_map_entry_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_certificate_map_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_update_certificate_map_entry_with_metadata` + interceptor in new development instead of the `post_update_certificate_map_entry` interceptor. + When both interceptors are used, this `post_update_certificate_map_entry_with_metadata` interceptor runs after the + `post_update_certificate_map_entry` interceptor. The (possibly modified) response returned by + `post_update_certificate_map_entry` will be passed to + `post_update_certificate_map_entry_with_metadata`. + """ + return response, metadata + def pre_update_dns_authorization( self, request: certificate_manager.UpdateDnsAuthorizationRequest, @@ -1034,12 +1683,35 @@ def post_update_dns_authorization( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_dns_authorization - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_dns_authorization_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_update_dns_authorization` interceptor runs + before the `post_update_dns_authorization_with_metadata` interceptor. """ return response + def post_update_dns_authorization_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_dns_authorization + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_update_dns_authorization_with_metadata` + interceptor in new development instead of the `post_update_dns_authorization` interceptor. + When both interceptors are used, this `post_update_dns_authorization_with_metadata` interceptor runs after the + `post_update_dns_authorization` interceptor. The (possibly modified) response returned by + `post_update_dns_authorization` will be passed to + `post_update_dns_authorization_with_metadata`. + """ + return response, metadata + def pre_update_trust_config( self, request: gcc_trust_config.UpdateTrustConfigRequest, @@ -1060,12 +1732,35 @@ def post_update_trust_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_trust_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_trust_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CertificateManager server but before - it is returned to user code. + it is returned to user code. This `post_update_trust_config` interceptor runs + before the `post_update_trust_config_with_metadata` interceptor. """ return response + def post_update_trust_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_trust_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CertificateManager server but before it is returned to user code. + + We recommend only using this `post_update_trust_config_with_metadata` + interceptor in new development instead of the `post_update_trust_config` interceptor. + When both interceptors are used, this `post_update_trust_config_with_metadata` interceptor runs after the + `post_update_trust_config` interceptor. The (possibly modified) response returned by + `post_update_trust_config` will be passed to + `post_update_trust_config_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1507,6 +2202,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1664,6 +2363,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_certificate_issuance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_certificate_issuance_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1816,6 +2522,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_certificate_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_certificate_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1967,6 +2677,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_certificate_map_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_certificate_map_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2119,6 +2833,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_dns_authorization(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dns_authorization_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2269,6 +2987,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_trust_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_trust_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2413,6 +3135,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2564,6 +3290,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_certificate_issuance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_certificate_issuance_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2710,6 +3443,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_certificate_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_certificate_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2855,6 +3592,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_certificate_map_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_certificate_map_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3001,6 +3742,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_dns_authorization(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_dns_authorization_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3145,6 +3890,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_trust_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_trust_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3286,6 +4035,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3432,6 +4185,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_certificate_issuance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_certificate_issuance_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3581,6 +4341,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_certificate_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_certificate_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3728,6 +4492,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_certificate_map_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_certificate_map_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3878,6 +4646,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_dns_authorization(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dns_authorization_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4023,6 +4795,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_trust_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_trust_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4176,6 +4952,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_certificate_issuance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_certificate_issuance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4322,6 +5105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_certificate_map_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_certificate_map_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4471,6 +5258,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_certificate_maps(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_certificate_maps_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4618,6 +5409,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_certificates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_certificates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4765,6 +5560,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_dns_authorizations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_dns_authorizations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4912,6 +5711,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_trust_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_trust_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5064,6 +5867,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_certificate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_certificate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5216,6 +6023,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_certificate_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_certificate_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5367,6 +6178,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_certificate_map_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_certificate_map_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5519,6 +6334,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_dns_authorization(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dns_authorization_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5669,6 +6488,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_trust_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_trust_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json index 2b92fd19d3ea..38bba2d83f89 100644 --- a/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json +++ b/packages/google-cloud-certificate-manager/samples/generated_samples/snippet_metadata_google.cloud.certificatemanager.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-certificate-manager", - "version": "1.9.0" + "version": "1.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py index 15cfd1ac8f70..a2c229b0619c 100644 --- a/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py +++ b/packages/google-cloud-certificate-manager/tests/unit/gapic/certificate_manager_v1/test_certificate_manager.py @@ -81,6 +81,13 @@ from google.cloud.certificate_manager_v1.types import certificate_manager from google.cloud.certificate_manager_v1.types import trust_config +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CertificateManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CertificateManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -20478,10 +20528,14 @@ def test_list_certificates_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_list_certificates" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_list_certificates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_list_certificates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.ListCertificatesRequest.pb( certificate_manager.ListCertificatesRequest() ) @@ -20507,6 +20561,10 @@ def test_list_certificates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.ListCertificatesResponse() + post_with_metadata.return_value = ( + certificate_manager.ListCertificatesResponse(), + metadata, + ) client.list_certificates( request, @@ -20518,6 +20576,7 @@ def test_list_certificates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_certificate_rest_bad_request( @@ -20610,10 +20669,14 @@ def test_get_certificate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_get_certificate" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_get_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_get_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.GetCertificateRequest.pb( certificate_manager.GetCertificateRequest() ) @@ -20639,6 +20702,7 @@ def test_get_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.Certificate() + post_with_metadata.return_value = certificate_manager.Certificate(), metadata client.get_certificate( request, @@ -20650,6 +20714,7 @@ def test_get_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_certificate_rest_bad_request( @@ -20830,10 +20895,14 @@ def test_create_certificate_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_create_certificate" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_create_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_create_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.CreateCertificateRequest.pb( certificate_manager.CreateCertificateRequest() ) @@ -20857,6 +20926,7 @@ def test_create_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_certificate( request, @@ -20868,6 +20938,7 @@ def test_create_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_certificate_rest_bad_request( @@ -21056,10 +21127,14 @@ def test_update_certificate_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_update_certificate" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_update_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_update_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.UpdateCertificateRequest.pb( certificate_manager.UpdateCertificateRequest() ) @@ -21083,6 +21158,7 @@ def test_update_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_certificate( request, @@ -21094,6 +21170,7 @@ def test_update_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_certificate_rest_bad_request( @@ -21174,10 +21251,14 @@ def test_delete_certificate_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_delete_certificate" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_delete_certificate_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_delete_certificate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.DeleteCertificateRequest.pb( certificate_manager.DeleteCertificateRequest() ) @@ -21201,6 +21282,7 @@ def test_delete_certificate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_certificate( request, @@ -21212,6 +21294,7 @@ def test_delete_certificate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_certificate_maps_rest_bad_request( @@ -21298,10 +21381,14 @@ def test_list_certificate_maps_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_list_certificate_maps" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_list_certificate_maps_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_list_certificate_maps" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.ListCertificateMapsRequest.pb( certificate_manager.ListCertificateMapsRequest() ) @@ -21327,6 +21414,10 @@ def test_list_certificate_maps_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.ListCertificateMapsResponse() + post_with_metadata.return_value = ( + certificate_manager.ListCertificateMapsResponse(), + metadata, + ) client.list_certificate_maps( request, @@ -21338,6 +21429,7 @@ def test_list_certificate_maps_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_certificate_map_rest_bad_request( @@ -21428,10 +21520,14 @@ def test_get_certificate_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_get_certificate_map" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_get_certificate_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_get_certificate_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.GetCertificateMapRequest.pb( certificate_manager.GetCertificateMapRequest() ) @@ -21457,6 +21553,7 @@ def test_get_certificate_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.CertificateMap() + post_with_metadata.return_value = certificate_manager.CertificateMap(), metadata client.get_certificate_map( request, @@ -21468,6 +21565,7 @@ def test_get_certificate_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_certificate_map_rest_bad_request( @@ -21631,10 +21729,14 @@ def test_create_certificate_map_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_create_certificate_map" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_create_certificate_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_create_certificate_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.CreateCertificateMapRequest.pb( certificate_manager.CreateCertificateMapRequest() ) @@ -21658,6 +21760,7 @@ def test_create_certificate_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_certificate_map( request, @@ -21669,6 +21772,7 @@ def test_create_certificate_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_certificate_map_rest_bad_request( @@ -21840,10 +21944,14 @@ def test_update_certificate_map_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_update_certificate_map" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_update_certificate_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_update_certificate_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.UpdateCertificateMapRequest.pb( certificate_manager.UpdateCertificateMapRequest() ) @@ -21867,6 +21975,7 @@ def test_update_certificate_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_certificate_map( request, @@ -21878,6 +21987,7 @@ def test_update_certificate_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_certificate_map_rest_bad_request( @@ -21962,10 +22072,14 @@ def test_delete_certificate_map_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_delete_certificate_map" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_delete_certificate_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_delete_certificate_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.DeleteCertificateMapRequest.pb( certificate_manager.DeleteCertificateMapRequest() ) @@ -21989,6 +22103,7 @@ def test_delete_certificate_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_certificate_map( request, @@ -22000,6 +22115,7 @@ def test_delete_certificate_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_certificate_map_entries_rest_bad_request( @@ -22093,10 +22209,14 @@ def test_list_certificate_map_entries_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_list_certificate_map_entries", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_list_certificate_map_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_list_certificate_map_entries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.ListCertificateMapEntriesRequest.pb( certificate_manager.ListCertificateMapEntriesRequest() ) @@ -22122,6 +22242,10 @@ def test_list_certificate_map_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.ListCertificateMapEntriesResponse() + post_with_metadata.return_value = ( + certificate_manager.ListCertificateMapEntriesResponse(), + metadata, + ) client.list_certificate_map_entries( request, @@ -22133,6 +22257,7 @@ def test_list_certificate_map_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_certificate_map_entry_rest_bad_request( @@ -22228,10 +22353,14 @@ def test_get_certificate_map_entry_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_get_certificate_map_entry" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_get_certificate_map_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_get_certificate_map_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.GetCertificateMapEntryRequest.pb( certificate_manager.GetCertificateMapEntryRequest() ) @@ -22257,6 +22386,10 @@ def test_get_certificate_map_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.CertificateMapEntry() + post_with_metadata.return_value = ( + certificate_manager.CertificateMapEntry(), + metadata, + ) client.get_certificate_map_entry( request, @@ -22268,6 +22401,7 @@ def test_get_certificate_map_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_certificate_map_entry_rest_bad_request( @@ -22435,10 +22569,14 @@ def test_create_certificate_map_entry_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_create_certificate_map_entry", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_create_certificate_map_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_create_certificate_map_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.CreateCertificateMapEntryRequest.pb( certificate_manager.CreateCertificateMapEntryRequest() ) @@ -22462,6 +22600,7 @@ def test_create_certificate_map_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_certificate_map_entry( request, @@ -22473,6 +22612,7 @@ def test_create_certificate_map_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_certificate_map_entry_rest_bad_request( @@ -22644,10 +22784,14 @@ def test_update_certificate_map_entry_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_update_certificate_map_entry", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_update_certificate_map_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_update_certificate_map_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.UpdateCertificateMapEntryRequest.pb( certificate_manager.UpdateCertificateMapEntryRequest() ) @@ -22671,6 +22815,7 @@ def test_update_certificate_map_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_certificate_map_entry( request, @@ -22682,6 +22827,7 @@ def test_update_certificate_map_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_certificate_map_entry_rest_bad_request( @@ -22767,10 +22913,14 @@ def test_delete_certificate_map_entry_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_delete_certificate_map_entry", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_delete_certificate_map_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_delete_certificate_map_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.DeleteCertificateMapEntryRequest.pb( certificate_manager.DeleteCertificateMapEntryRequest() ) @@ -22794,6 +22944,7 @@ def test_delete_certificate_map_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_certificate_map_entry( request, @@ -22805,6 +22956,7 @@ def test_delete_certificate_map_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_dns_authorizations_rest_bad_request( @@ -22893,10 +23045,14 @@ def test_list_dns_authorizations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_list_dns_authorizations" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_list_dns_authorizations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_list_dns_authorizations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.ListDnsAuthorizationsRequest.pb( certificate_manager.ListDnsAuthorizationsRequest() ) @@ -22922,6 +23078,10 @@ def test_list_dns_authorizations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.ListDnsAuthorizationsResponse() + post_with_metadata.return_value = ( + certificate_manager.ListDnsAuthorizationsResponse(), + metadata, + ) client.list_dns_authorizations( request, @@ -22933,6 +23093,7 @@ def test_list_dns_authorizations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_dns_authorization_rest_bad_request( @@ -23027,10 +23188,14 @@ def test_get_dns_authorization_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_get_dns_authorization" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_get_dns_authorization_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_get_dns_authorization" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.GetDnsAuthorizationRequest.pb( certificate_manager.GetDnsAuthorizationRequest() ) @@ -23056,6 +23221,10 @@ def test_get_dns_authorization_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_manager.DnsAuthorization() + post_with_metadata.return_value = ( + certificate_manager.DnsAuthorization(), + metadata, + ) client.get_dns_authorization( request, @@ -23067,6 +23236,7 @@ def test_get_dns_authorization_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_dns_authorization_rest_bad_request( @@ -23230,10 +23400,14 @@ def test_create_dns_authorization_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_create_dns_authorization" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_create_dns_authorization_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_create_dns_authorization" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.CreateDnsAuthorizationRequest.pb( certificate_manager.CreateDnsAuthorizationRequest() ) @@ -23257,6 +23431,7 @@ def test_create_dns_authorization_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_dns_authorization( request, @@ -23268,6 +23443,7 @@ def test_create_dns_authorization_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_dns_authorization_rest_bad_request( @@ -23439,10 +23615,14 @@ def test_update_dns_authorization_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_update_dns_authorization" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_update_dns_authorization_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_update_dns_authorization" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.UpdateDnsAuthorizationRequest.pb( certificate_manager.UpdateDnsAuthorizationRequest() ) @@ -23466,6 +23646,7 @@ def test_update_dns_authorization_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_dns_authorization( request, @@ -23477,6 +23658,7 @@ def test_update_dns_authorization_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_dns_authorization_rest_bad_request( @@ -23561,10 +23743,14 @@ def test_delete_dns_authorization_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_delete_dns_authorization" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_delete_dns_authorization_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_delete_dns_authorization" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_manager.DeleteDnsAuthorizationRequest.pb( certificate_manager.DeleteDnsAuthorizationRequest() ) @@ -23588,6 +23774,7 @@ def test_delete_dns_authorization_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_dns_authorization( request, @@ -23599,6 +23786,7 @@ def test_delete_dns_authorization_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_certificate_issuance_configs_rest_bad_request( @@ -23692,11 +23880,15 @@ def test_list_certificate_issuance_configs_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_list_certificate_issuance_configs", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_list_certificate_issuance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_list_certificate_issuance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( certificate_issuance_config.ListCertificateIssuanceConfigsRequest.pb( certificate_issuance_config.ListCertificateIssuanceConfigsRequest() @@ -23728,6 +23920,10 @@ def test_list_certificate_issuance_configs_rest_interceptors(null_interceptor): post.return_value = ( certificate_issuance_config.ListCertificateIssuanceConfigsResponse() ) + post_with_metadata.return_value = ( + certificate_issuance_config.ListCertificateIssuanceConfigsResponse(), + metadata, + ) client.list_certificate_issuance_configs( request, @@ -23739,6 +23935,7 @@ def test_list_certificate_issuance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_certificate_issuance_config_rest_bad_request( @@ -23839,11 +24036,15 @@ def test_get_certificate_issuance_config_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_get_certificate_issuance_config", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_get_certificate_issuance_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_get_certificate_issuance_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = certificate_issuance_config.GetCertificateIssuanceConfigRequest.pb( certificate_issuance_config.GetCertificateIssuanceConfigRequest() ) @@ -23869,6 +24070,10 @@ def test_get_certificate_issuance_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = certificate_issuance_config.CertificateIssuanceConfig() + post_with_metadata.return_value = ( + certificate_issuance_config.CertificateIssuanceConfig(), + metadata, + ) client.get_certificate_issuance_config( request, @@ -23880,6 +24085,7 @@ def test_get_certificate_issuance_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_certificate_issuance_config_rest_bad_request( @@ -24047,11 +24253,15 @@ def test_create_certificate_issuance_config_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_create_certificate_issuance_config", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_create_certificate_issuance_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_create_certificate_issuance_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( gcc_certificate_issuance_config.CreateCertificateIssuanceConfigRequest.pb( gcc_certificate_issuance_config.CreateCertificateIssuanceConfigRequest() @@ -24079,6 +24289,7 @@ def test_create_certificate_issuance_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_certificate_issuance_config( request, @@ -24090,6 +24301,7 @@ def test_create_certificate_issuance_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_certificate_issuance_config_rest_bad_request( @@ -24175,11 +24387,15 @@ def test_delete_certificate_issuance_config_rest_interceptors(null_interceptor): transports.CertificateManagerRestInterceptor, "post_delete_certificate_issuance_config", ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_delete_certificate_issuance_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_delete_certificate_issuance_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( certificate_issuance_config.DeleteCertificateIssuanceConfigRequest.pb( certificate_issuance_config.DeleteCertificateIssuanceConfigRequest() @@ -24205,6 +24421,7 @@ def test_delete_certificate_issuance_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_certificate_issuance_config( request, @@ -24216,6 +24433,7 @@ def test_delete_certificate_issuance_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_trust_configs_rest_bad_request( @@ -24302,10 +24520,14 @@ def test_list_trust_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_list_trust_configs" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_list_trust_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_list_trust_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = trust_config.ListTrustConfigsRequest.pb( trust_config.ListTrustConfigsRequest() ) @@ -24331,6 +24553,10 @@ def test_list_trust_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = trust_config.ListTrustConfigsResponse() + post_with_metadata.return_value = ( + trust_config.ListTrustConfigsResponse(), + metadata, + ) client.list_trust_configs( request, @@ -24342,6 +24568,7 @@ def test_list_trust_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_trust_config_rest_bad_request( @@ -24430,10 +24657,14 @@ def test_get_trust_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CertificateManagerRestInterceptor, "post_get_trust_config" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_get_trust_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_get_trust_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = trust_config.GetTrustConfigRequest.pb( trust_config.GetTrustConfigRequest() ) @@ -24457,6 +24688,7 @@ def test_get_trust_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = trust_config.TrustConfig() + post_with_metadata.return_value = trust_config.TrustConfig(), metadata client.get_trust_config( request, @@ -24468,6 +24700,7 @@ def test_get_trust_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_trust_config_rest_bad_request( @@ -24629,10 +24862,14 @@ def test_create_trust_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_create_trust_config" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_create_trust_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_create_trust_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcc_trust_config.CreateTrustConfigRequest.pb( gcc_trust_config.CreateTrustConfigRequest() ) @@ -24656,6 +24893,7 @@ def test_create_trust_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_trust_config( request, @@ -24667,6 +24905,7 @@ def test_create_trust_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_trust_config_rest_bad_request( @@ -24836,10 +25075,14 @@ def test_update_trust_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_update_trust_config" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_update_trust_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_update_trust_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcc_trust_config.UpdateTrustConfigRequest.pb( gcc_trust_config.UpdateTrustConfigRequest() ) @@ -24863,6 +25106,7 @@ def test_update_trust_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_trust_config( request, @@ -24874,6 +25118,7 @@ def test_update_trust_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_trust_config_rest_bad_request( @@ -24954,10 +25199,14 @@ def test_delete_trust_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CertificateManagerRestInterceptor, "post_delete_trust_config" ) as post, mock.patch.object( + transports.CertificateManagerRestInterceptor, + "post_delete_trust_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CertificateManagerRestInterceptor, "pre_delete_trust_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = trust_config.DeleteTrustConfigRequest.pb( trust_config.DeleteTrustConfigRequest() ) @@ -24981,6 +25230,7 @@ def test_delete_trust_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_trust_config( request, @@ -24992,6 +25242,7 @@ def test_delete_trust_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-channel/CHANGELOG.md b/packages/google-cloud-channel/CHANGELOG.md index 59989c93b8b2..1e23ed267904 100644 --- a/packages/google-cloud-channel/CHANGELOG.md +++ b/packages/google-cloud-channel/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.21.0...google-cloud-channel-v1.22.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-channel-v1.20.1...google-cloud-channel-v1.21.0) (2024-12-12) diff --git a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py index 785067d93b3c..8b80cf328714 100644 --- a/packages/google-cloud-channel/google/cloud/channel/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.0" # {x-release-please-version} +__version__ = "1.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py index 785067d93b3c..8b80cf328714 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.0" # {x-release-please-version} +__version__ = "1.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py index b55c36bc45a8..a98ed7ad601f 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_reports_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -511,6 +513,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1173,16 +1202,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1228,16 +1261,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py index 730a4113e20d..7870f33f4f0a 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -692,6 +694,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -6984,16 +7013,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -7039,16 +7072,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index ee5ff40f5ba0..bf355600a904 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-channel", - "version": "1.21.0" + "version": "1.22.0" }, "snippets": [ { diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py index e9c6f7cf56a8..b3a5a8719180 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_reports_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -68,6 +69,13 @@ ) from google.cloud.channel_v1.types import operations, reports_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudChannelReportsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudChannelReportsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index 765870dc1078..9aebf2206afe 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -83,6 +84,13 @@ service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -356,6 +364,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudChannelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudChannelServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md index 409585589006..bd08e7fd2f2e 100644 --- a/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md +++ b/packages/google-cloud-cloudcontrolspartner/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## [0.2.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.2.4...google-cloud-cloudcontrolspartner-v0.2.5) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + +## [0.2.4](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.2.3...google-cloud-cloudcontrolspartner-v0.2.4) (2025-01-27) + + +### Features + +* A new field `organization_domain` is added to message `.google.cloud.cloudcontrolspartner.v1beta.Customer` ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A new message `CreateCustomerRequest` is added ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A new message `DeleteCustomerRequest` is added ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A new message `UpdateCustomerRequest` is added ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A new method `CreateCustomer` is added to service `CloudControlsPartnerCore` ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A new method `DeleteCustomer` is added to service `CloudControlsPartnerCore` ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A new method `UpdateCustomer` is added to service `CloudControlsPartnerCore` ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) + + +### Documentation + +* A comment for enum value `VIRTRU` in enum `EkmSolution` is changed ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) +* A comment for field `requested_cancellation` in message `.google.cloud.cloudcontrolspartner.v1beta.OperationMetadata` is changed ([1913b6c](https://github.com/googleapis/google-cloud-python/commit/1913b6cc099c50650b2a35c2f05b7e0da1157791)) + ## [0.2.3](https://github.com/googleapis/google-cloud-python/compare/google-cloud-cloudcontrolspartner-v0.2.2...google-cloud-cloudcontrolspartner-v0.2.3) (2024-12-12) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py index 355df6b536f8..c1954d3635eb 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.5" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py index 355df6b536f8..c1954d3635eb 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.5" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py index 7af28d16e14b..f5c85d72c116 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -615,6 +617,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/rest.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/rest.py index 699221af62ff..503cbbef0d5b 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/rest.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_core/transports/rest.py @@ -160,12 +160,35 @@ def pre_get_customer( def post_get_customer(self, response: customers.Customer) -> customers.Customer: """Post-rpc interceptor for get_customer - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_customer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_customer` interceptor runs + before the `post_get_customer_with_metadata` interceptor. """ return response + def post_get_customer_with_metadata( + self, + response: customers.Customer, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[customers.Customer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_customer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_customer_with_metadata` + interceptor in new development instead of the `post_get_customer` interceptor. + When both interceptors are used, this `post_get_customer_with_metadata` interceptor runs after the + `post_get_customer` interceptor. The (possibly modified) response returned by + `post_get_customer` will be passed to + `post_get_customer_with_metadata`. + """ + return response, metadata + def pre_get_ekm_connections( self, request: ekm_connections.GetEkmConnectionsRequest, @@ -186,12 +209,35 @@ def post_get_ekm_connections( ) -> ekm_connections.EkmConnections: """Post-rpc interceptor for get_ekm_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ekm_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_ekm_connections` interceptor runs + before the `post_get_ekm_connections_with_metadata` interceptor. """ return response + def post_get_ekm_connections_with_metadata( + self, + response: ekm_connections.EkmConnections, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_connections.EkmConnections, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ekm_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_ekm_connections_with_metadata` + interceptor in new development instead of the `post_get_ekm_connections` interceptor. + When both interceptors are used, this `post_get_ekm_connections_with_metadata` interceptor runs after the + `post_get_ekm_connections` interceptor. The (possibly modified) response returned by + `post_get_ekm_connections` will be passed to + `post_get_ekm_connections_with_metadata`. + """ + return response, metadata + def pre_get_partner( self, request: partners.GetPartnerRequest, @@ -207,12 +253,35 @@ def pre_get_partner( def post_get_partner(self, response: partners.Partner) -> partners.Partner: """Post-rpc interceptor for get_partner - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_partner_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_partner` interceptor runs + before the `post_get_partner_with_metadata` interceptor. """ return response + def post_get_partner_with_metadata( + self, + response: partners.Partner, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[partners.Partner, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_partner + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_partner_with_metadata` + interceptor in new development instead of the `post_get_partner` interceptor. + When both interceptors are used, this `post_get_partner_with_metadata` interceptor runs after the + `post_get_partner` interceptor. The (possibly modified) response returned by + `post_get_partner` will be passed to + `post_get_partner_with_metadata`. + """ + return response, metadata + def pre_get_partner_permissions( self, request: partner_permissions.GetPartnerPermissionsRequest, @@ -233,12 +302,37 @@ def post_get_partner_permissions( ) -> partner_permissions.PartnerPermissions: """Post-rpc interceptor for get_partner_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_partner_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_partner_permissions` interceptor runs + before the `post_get_partner_permissions_with_metadata` interceptor. """ return response + def post_get_partner_permissions_with_metadata( + self, + response: partner_permissions.PartnerPermissions, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + partner_permissions.PartnerPermissions, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_partner_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_partner_permissions_with_metadata` + interceptor in new development instead of the `post_get_partner_permissions` interceptor. + When both interceptors are used, this `post_get_partner_permissions_with_metadata` interceptor runs after the + `post_get_partner_permissions` interceptor. The (possibly modified) response returned by + `post_get_partner_permissions` will be passed to + `post_get_partner_permissions_with_metadata`. + """ + return response, metadata + def pre_get_workload( self, request: customer_workloads.GetWorkloadRequest, @@ -258,12 +352,35 @@ def post_get_workload( ) -> customer_workloads.Workload: """Post-rpc interceptor for get_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_workload` interceptor runs + before the `post_get_workload_with_metadata` interceptor. """ return response + def post_get_workload_with_metadata( + self, + response: customer_workloads.Workload, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[customer_workloads.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_workload_with_metadata` + interceptor in new development instead of the `post_get_workload` interceptor. + When both interceptors are used, this `post_get_workload_with_metadata` interceptor runs after the + `post_get_workload` interceptor. The (possibly modified) response returned by + `post_get_workload` will be passed to + `post_get_workload_with_metadata`. + """ + return response, metadata + def pre_list_access_approval_requests( self, request: access_approval_requests.ListAccessApprovalRequestsRequest, @@ -284,12 +401,38 @@ def post_list_access_approval_requests( ) -> access_approval_requests.ListAccessApprovalRequestsResponse: """Post-rpc interceptor for list_access_approval_requests - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_access_approval_requests_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_list_access_approval_requests` interceptor runs + before the `post_list_access_approval_requests_with_metadata` interceptor. """ return response + def post_list_access_approval_requests_with_metadata( + self, + response: access_approval_requests.ListAccessApprovalRequestsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_approval_requests.ListAccessApprovalRequestsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_access_approval_requests + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_list_access_approval_requests_with_metadata` + interceptor in new development instead of the `post_list_access_approval_requests` interceptor. + When both interceptors are used, this `post_list_access_approval_requests_with_metadata` interceptor runs after the + `post_list_access_approval_requests` interceptor. The (possibly modified) response returned by + `post_list_access_approval_requests` will be passed to + `post_list_access_approval_requests_with_metadata`. + """ + return response, metadata + def pre_list_customers( self, request: customers.ListCustomersRequest, @@ -307,12 +450,37 @@ def post_list_customers( ) -> customers.ListCustomersResponse: """Post-rpc interceptor for list_customers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_customers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_list_customers` interceptor runs + before the `post_list_customers_with_metadata` interceptor. """ return response + def post_list_customers_with_metadata( + self, + response: customers.ListCustomersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customers.ListCustomersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_customers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_list_customers_with_metadata` + interceptor in new development instead of the `post_list_customers` interceptor. + When both interceptors are used, this `post_list_customers_with_metadata` interceptor runs after the + `post_list_customers` interceptor. The (possibly modified) response returned by + `post_list_customers` will be passed to + `post_list_customers_with_metadata`. + """ + return response, metadata + def pre_list_workloads( self, request: customer_workloads.ListWorkloadsRequest, @@ -332,12 +500,38 @@ def post_list_workloads( ) -> customer_workloads.ListWorkloadsResponse: """Post-rpc interceptor for list_workloads - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workloads_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_list_workloads` interceptor runs + before the `post_list_workloads_with_metadata` interceptor. """ return response + def post_list_workloads_with_metadata( + self, + response: customer_workloads.ListWorkloadsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customer_workloads.ListWorkloadsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_workloads + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_list_workloads_with_metadata` + interceptor in new development instead of the `post_list_workloads` interceptor. + When both interceptors are used, this `post_list_workloads_with_metadata` interceptor runs after the + `post_list_workloads` interceptor. The (possibly modified) response returned by + `post_list_workloads` will be passed to + `post_list_workloads_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudControlsPartnerCoreRestStub: @@ -545,6 +739,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_customer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_customer_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -693,6 +891,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ekm_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ekm_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -834,6 +1036,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_partner(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partner_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -980,6 +1186,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_partner_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partner_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1127,6 +1337,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1278,6 +1492,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_access_approval_requests(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_access_approval_requests_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1425,6 +1646,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_customers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_customers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1570,6 +1795,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workloads(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workloads_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py index 9b5941972753..57138d2f8c9f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -501,6 +503,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/transports/rest.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/transports/rest.py index ddf1bb4c5994..9114c7cecafd 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/transports/rest.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1/services/cloud_controls_partner_monitoring/transports/rest.py @@ -107,12 +107,35 @@ def post_get_violation( ) -> violations.Violation: """Post-rpc interceptor for get_violation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_violation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerMonitoring server but before - it is returned to user code. + it is returned to user code. This `post_get_violation` interceptor runs + before the `post_get_violation_with_metadata` interceptor. """ return response + def post_get_violation_with_metadata( + self, + response: violations.Violation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[violations.Violation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_violation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerMonitoring server but before it is returned to user code. + + We recommend only using this `post_get_violation_with_metadata` + interceptor in new development instead of the `post_get_violation` interceptor. + When both interceptors are used, this `post_get_violation_with_metadata` interceptor runs after the + `post_get_violation` interceptor. The (possibly modified) response returned by + `post_get_violation` will be passed to + `post_get_violation_with_metadata`. + """ + return response, metadata + def pre_list_violations( self, request: violations.ListViolationsRequest, @@ -132,12 +155,37 @@ def post_list_violations( ) -> violations.ListViolationsResponse: """Post-rpc interceptor for list_violations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_violations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerMonitoring server but before - it is returned to user code. + it is returned to user code. This `post_list_violations` interceptor runs + before the `post_list_violations_with_metadata` interceptor. """ return response + def post_list_violations_with_metadata( + self, + response: violations.ListViolationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + violations.ListViolationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_violations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerMonitoring server but before it is returned to user code. + + We recommend only using this `post_list_violations_with_metadata` + interceptor in new development instead of the `post_list_violations` interceptor. + When both interceptors are used, this `post_list_violations_with_metadata` interceptor runs after the + `post_list_violations` interceptor. The (possibly modified) response returned by + `post_list_violations` will be passed to + `post_list_violations_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudControlsPartnerMonitoringRestStub: @@ -349,6 +397,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_violation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_violation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -493,6 +545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_violations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_violations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/__init__.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/__init__.py index 2764cfb2053f..73a0bb72d236 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/__init__.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/__init__.py @@ -43,12 +43,15 @@ WorkloadOnboardingStep, ) from .types.customers import ( + CreateCustomerRequest, Customer, CustomerOnboardingState, CustomerOnboardingStep, + DeleteCustomerRequest, GetCustomerRequest, ListCustomersRequest, ListCustomersResponse, + UpdateCustomerRequest, ) from .types.ekm_connections import ( EkmConnection, @@ -72,9 +75,11 @@ "CloudControlsPartnerCoreClient", "CloudControlsPartnerMonitoringClient", "CompletionState", + "CreateCustomerRequest", "Customer", "CustomerOnboardingState", "CustomerOnboardingStep", + "DeleteCustomerRequest", "EkmConnection", "EkmConnections", "EkmMetadata", @@ -96,6 +101,7 @@ "Partner", "PartnerPermissions", "Sku", + "UpdateCustomerRequest", "Violation", "Workload", "WorkloadOnboardingState", diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_metadata.json b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_metadata.json index 5e2cd4e90ca7..4e9eafffa5aa 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_metadata.json +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_metadata.json @@ -10,6 +10,16 @@ "grpc": { "libraryClient": "CloudControlsPartnerCoreClient", "rpcs": { + "CreateCustomer": { + "methods": [ + "create_customer" + ] + }, + "DeleteCustomer": { + "methods": [ + "delete_customer" + ] + }, "GetCustomer": { "methods": [ "get_customer" @@ -49,12 +59,27 @@ "methods": [ "list_workloads" ] + }, + "UpdateCustomer": { + "methods": [ + "update_customer" + ] } } }, "grpc-async": { "libraryClient": "CloudControlsPartnerCoreAsyncClient", "rpcs": { + "CreateCustomer": { + "methods": [ + "create_customer" + ] + }, + "DeleteCustomer": { + "methods": [ + "delete_customer" + ] + }, "GetCustomer": { "methods": [ "get_customer" @@ -94,12 +119,27 @@ "methods": [ "list_workloads" ] + }, + "UpdateCustomer": { + "methods": [ + "update_customer" + ] } } }, "rest": { "libraryClient": "CloudControlsPartnerCoreClient", "rpcs": { + "CreateCustomer": { + "methods": [ + "create_customer" + ] + }, + "DeleteCustomer": { + "methods": [ + "delete_customer" + ] + }, "GetCustomer": { "methods": [ "get_customer" @@ -139,6 +179,11 @@ "methods": [ "list_workloads" ] + }, + "UpdateCustomer": { + "methods": [ + "update_customer" + ] } } } diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py index 355df6b536f8..c1954d3635eb 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.3" # {x-release-please-version} +__version__ = "0.2.5" # {x-release-please-version} diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py index c4124a528cd8..b1bc8734b16b 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/async_client.py @@ -44,6 +44,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.cloudcontrolspartner_v1beta.services.cloud_controls_partner_core import ( @@ -1253,6 +1254,356 @@ async def sample_get_partner(): # Done; return the response. return response + async def create_customer( + self, + request: Optional[Union[customers.CreateCustomerRequest, dict]] = None, + *, + parent: Optional[str] = None, + customer: Optional[customers.Customer] = None, + customer_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> customers.Customer: + r"""Creates a new customer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudcontrolspartner_v1beta + + async def sample_create_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.CreateCustomerRequest( + parent="parent_value", + customer=customer, + customer_id="customer_id_value", + ) + + # Make the request + response = await client.create_customer(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudcontrolspartner_v1beta.types.CreateCustomerRequest, dict]]): + The request object. Request to create a customer + parent (:class:`str`): + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + customer (:class:`google.cloud.cloudcontrolspartner_v1beta.types.Customer`): + Required. The customer to create. + This corresponds to the ``customer`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + customer_id (:class:`str`): + Required. The customer id to use for + the customer, which will become the + final component of the customer's + resource name. The specified value must + be a valid Google cloud organization id. + + This corresponds to the ``customer_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudcontrolspartner_v1beta.types.Customer: + Contains metadata around a Cloud + Controls Partner Customer + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, customer, customer_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, customers.CreateCustomerRequest): + request = customers.CreateCustomerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if customer is not None: + request.customer = customer + if customer_id is not None: + request.customer_id = customer_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_customer + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_customer( + self, + request: Optional[Union[customers.UpdateCustomerRequest, dict]] = None, + *, + customer: Optional[customers.Customer] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> customers.Customer: + r"""Update details of a single customer + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudcontrolspartner_v1beta + + async def sample_update_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.UpdateCustomerRequest( + customer=customer, + ) + + # Make the request + response = await client.update_customer(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudcontrolspartner_v1beta.types.UpdateCustomerRequest, dict]]): + The request object. Request to update a customer + customer (:class:`google.cloud.cloudcontrolspartner_v1beta.types.Customer`): + Required. The customer to update Format: + ``organizations/{organization}/locations/{location}/customers/{customer}`` + + This corresponds to the ``customer`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to + update + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudcontrolspartner_v1beta.types.Customer: + Contains metadata around a Cloud + Controls Partner Customer + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([customer, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, customers.UpdateCustomerRequest): + request = customers.UpdateCustomerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if customer is not None: + request.customer = customer + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_customer + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("customer.name", request.customer.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_customer( + self, + request: Optional[Union[customers.DeleteCustomerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete details of a single customer + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudcontrolspartner_v1beta + + async def sample_delete_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient() + + # Initialize request argument(s) + request = cloudcontrolspartner_v1beta.DeleteCustomerRequest( + name="name_value", + ) + + # Make the request + await client.delete_customer(request=request) + + Args: + request (Optional[Union[google.cloud.cloudcontrolspartner_v1beta.types.DeleteCustomerRequest, dict]]): + The request object. Message for deleting customer + name (:class:`str`): + Required. name of the resource to be deleted format: + name=organizations/\ */locations/*/customers/\* + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, customers.DeleteCustomerRequest): + request = customers.DeleteCustomerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_customer + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + async def __aenter__(self) -> "CloudControlsPartnerCoreAsyncClient": return self diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py index 9140a999b7d2..7554329c9c4f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -58,6 +60,7 @@ _LOGGER = std_logging.getLogger(__name__) +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.cloudcontrolspartner_v1beta.services.cloud_controls_partner_core import ( @@ -615,6 +618,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1715,6 +1745,347 @@ def sample_get_partner(): # Done; return the response. return response + def create_customer( + self, + request: Optional[Union[customers.CreateCustomerRequest, dict]] = None, + *, + parent: Optional[str] = None, + customer: Optional[customers.Customer] = None, + customer_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> customers.Customer: + r"""Creates a new customer. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudcontrolspartner_v1beta + + def sample_create_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.CreateCustomerRequest( + parent="parent_value", + customer=customer, + customer_id="customer_id_value", + ) + + # Make the request + response = client.create_customer(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudcontrolspartner_v1beta.types.CreateCustomerRequest, dict]): + The request object. Request to create a customer + parent (str): + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + customer (google.cloud.cloudcontrolspartner_v1beta.types.Customer): + Required. The customer to create. + This corresponds to the ``customer`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + customer_id (str): + Required. The customer id to use for + the customer, which will become the + final component of the customer's + resource name. The specified value must + be a valid Google cloud organization id. + + This corresponds to the ``customer_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudcontrolspartner_v1beta.types.Customer: + Contains metadata around a Cloud + Controls Partner Customer + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, customer, customer_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, customers.CreateCustomerRequest): + request = customers.CreateCustomerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if customer is not None: + request.customer = customer + if customer_id is not None: + request.customer_id = customer_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_customer] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_customer( + self, + request: Optional[Union[customers.UpdateCustomerRequest, dict]] = None, + *, + customer: Optional[customers.Customer] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> customers.Customer: + r"""Update details of a single customer + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudcontrolspartner_v1beta + + def sample_update_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.UpdateCustomerRequest( + customer=customer, + ) + + # Make the request + response = client.update_customer(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudcontrolspartner_v1beta.types.UpdateCustomerRequest, dict]): + The request object. Request to update a customer + customer (google.cloud.cloudcontrolspartner_v1beta.types.Customer): + Required. The customer to update Format: + ``organizations/{organization}/locations/{location}/customers/{customer}`` + + This corresponds to the ``customer`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to + update + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudcontrolspartner_v1beta.types.Customer: + Contains metadata around a Cloud + Controls Partner Customer + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([customer, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, customers.UpdateCustomerRequest): + request = customers.UpdateCustomerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if customer is not None: + request.customer = customer + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_customer] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("customer.name", request.customer.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_customer( + self, + request: Optional[Union[customers.DeleteCustomerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete details of a single customer + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudcontrolspartner_v1beta + + def sample_delete_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient() + + # Initialize request argument(s) + request = cloudcontrolspartner_v1beta.DeleteCustomerRequest( + name="name_value", + ) + + # Make the request + client.delete_customer(request=request) + + Args: + request (Union[google.cloud.cloudcontrolspartner_v1beta.types.DeleteCustomerRequest, dict]): + The request object. Message for deleting customer + name (str): + Required. name of the resource to be deleted format: + name=organizations/\ */locations/*/customers/\* + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, customers.DeleteCustomerRequest): + request = customers.DeleteCustomerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_customer] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + def __enter__(self) -> "CloudControlsPartnerCoreClient": return self diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/base.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/base.py index 2212f2ea1f2c..8b56222060f5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/base.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/base.py @@ -23,6 +23,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.cloud.cloudcontrolspartner_v1beta import gapic_version as package_version from google.cloud.cloudcontrolspartner_v1beta.types import ( @@ -238,6 +239,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_customer: gapic_v1.method.wrap_method( + self.create_customer, + default_timeout=None, + client_info=client_info, + ), + self.update_customer: gapic_v1.method.wrap_method( + self.update_customer, + default_timeout=None, + client_info=client_info, + ), + self.delete_customer: gapic_v1.method.wrap_method( + self.delete_customer, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -334,6 +350,33 @@ def get_partner( ]: raise NotImplementedError() + @property + def create_customer( + self, + ) -> Callable[ + [customers.CreateCustomerRequest], + Union[customers.Customer, Awaitable[customers.Customer]], + ]: + raise NotImplementedError() + + @property + def update_customer( + self, + ) -> Callable[ + [customers.UpdateCustomerRequest], + Union[customers.Customer, Awaitable[customers.Customer]], + ]: + raise NotImplementedError() + + @property + def delete_customer( + self, + ) -> Callable[ + [customers.DeleteCustomerRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py index 1f8acd3bd795..759851fa0f4d 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc.py @@ -23,6 +23,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message import grpc # type: ignore @@ -548,6 +549,84 @@ def get_partner(self) -> Callable[[partners.GetPartnerRequest], partners.Partner ) return self._stubs["get_partner"] + @property + def create_customer( + self, + ) -> Callable[[customers.CreateCustomerRequest], customers.Customer]: + r"""Return a callable for the create customer method over gRPC. + + Creates a new customer. + + Returns: + Callable[[~.CreateCustomerRequest], + ~.Customer]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_customer" not in self._stubs: + self._stubs["create_customer"] = self._logged_channel.unary_unary( + "/google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore/CreateCustomer", + request_serializer=customers.CreateCustomerRequest.serialize, + response_deserializer=customers.Customer.deserialize, + ) + return self._stubs["create_customer"] + + @property + def update_customer( + self, + ) -> Callable[[customers.UpdateCustomerRequest], customers.Customer]: + r"""Return a callable for the update customer method over gRPC. + + Update details of a single customer + + Returns: + Callable[[~.UpdateCustomerRequest], + ~.Customer]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_customer" not in self._stubs: + self._stubs["update_customer"] = self._logged_channel.unary_unary( + "/google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore/UpdateCustomer", + request_serializer=customers.UpdateCustomerRequest.serialize, + response_deserializer=customers.Customer.deserialize, + ) + return self._stubs["update_customer"] + + @property + def delete_customer( + self, + ) -> Callable[[customers.DeleteCustomerRequest], empty_pb2.Empty]: + r"""Return a callable for the delete customer method over gRPC. + + Delete details of a single customer + + Returns: + Callable[[~.DeleteCustomerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_customer" not in self._stubs: + self._stubs["delete_customer"] = self._logged_channel.unary_unary( + "/google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore/DeleteCustomer", + request_serializer=customers.DeleteCustomerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_customer"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py index 4b57600956f6..5ab6c8c5d0d7 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message import grpc # type: ignore @@ -562,6 +563,84 @@ def get_partner( ) return self._stubs["get_partner"] + @property + def create_customer( + self, + ) -> Callable[[customers.CreateCustomerRequest], Awaitable[customers.Customer]]: + r"""Return a callable for the create customer method over gRPC. + + Creates a new customer. + + Returns: + Callable[[~.CreateCustomerRequest], + Awaitable[~.Customer]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_customer" not in self._stubs: + self._stubs["create_customer"] = self._logged_channel.unary_unary( + "/google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore/CreateCustomer", + request_serializer=customers.CreateCustomerRequest.serialize, + response_deserializer=customers.Customer.deserialize, + ) + return self._stubs["create_customer"] + + @property + def update_customer( + self, + ) -> Callable[[customers.UpdateCustomerRequest], Awaitable[customers.Customer]]: + r"""Return a callable for the update customer method over gRPC. + + Update details of a single customer + + Returns: + Callable[[~.UpdateCustomerRequest], + Awaitable[~.Customer]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_customer" not in self._stubs: + self._stubs["update_customer"] = self._logged_channel.unary_unary( + "/google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore/UpdateCustomer", + request_serializer=customers.UpdateCustomerRequest.serialize, + response_deserializer=customers.Customer.deserialize, + ) + return self._stubs["update_customer"] + + @property + def delete_customer( + self, + ) -> Callable[[customers.DeleteCustomerRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete customer method over gRPC. + + Delete details of a single customer + + Returns: + Callable[[~.DeleteCustomerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_customer" not in self._stubs: + self._stubs["delete_customer"] = self._logged_channel.unary_unary( + "/google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore/DeleteCustomer", + request_serializer=customers.DeleteCustomerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_customer"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -668,6 +747,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_customer: self._wrap_method( + self.create_customer, + default_timeout=None, + client_info=client_info, + ), + self.update_customer: self._wrap_method( + self.update_customer, + default_timeout=None, + client_info=client_info, + ), + self.delete_customer: self._wrap_method( + self.delete_customer, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest.py index 2abfb01680ee..ab1b5962bdb7 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest.py @@ -24,6 +24,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format from requests import __version__ as requests_version @@ -75,6 +76,18 @@ class CloudControlsPartnerCoreRestInterceptor: .. code-block:: python class MyCustomCloudControlsPartnerCoreInterceptor(CloudControlsPartnerCoreRestInterceptor): + def pre_create_customer(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_customer(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_customer(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_get_customer(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -139,12 +152,80 @@ def post_list_workloads(self, response): logging.log(f"Received response: {response}") return response + def pre_update_customer(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_customer(self, response): + logging.log(f"Received response: {response}") + return response + transport = CloudControlsPartnerCoreRestTransport(interceptor=MyCustomCloudControlsPartnerCoreInterceptor()) client = CloudControlsPartnerCoreClient(transport=transport) """ + def pre_create_customer( + self, + request: customers.CreateCustomerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customers.CreateCustomerRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_customer + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudControlsPartnerCore server. + """ + return request, metadata + + def post_create_customer(self, response: customers.Customer) -> customers.Customer: + """Post-rpc interceptor for create_customer + + DEPRECATED. Please use the `post_create_customer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudControlsPartnerCore server but before + it is returned to user code. This `post_create_customer` interceptor runs + before the `post_create_customer_with_metadata` interceptor. + """ + return response + + def post_create_customer_with_metadata( + self, + response: customers.Customer, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[customers.Customer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_customer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_create_customer_with_metadata` + interceptor in new development instead of the `post_create_customer` interceptor. + When both interceptors are used, this `post_create_customer_with_metadata` interceptor runs after the + `post_create_customer` interceptor. The (possibly modified) response returned by + `post_create_customer` will be passed to + `post_create_customer_with_metadata`. + """ + return response, metadata + + def pre_delete_customer( + self, + request: customers.DeleteCustomerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customers.DeleteCustomerRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_customer + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudControlsPartnerCore server. + """ + return request, metadata + def pre_get_customer( self, request: customers.GetCustomerRequest, @@ -160,12 +241,35 @@ def pre_get_customer( def post_get_customer(self, response: customers.Customer) -> customers.Customer: """Post-rpc interceptor for get_customer - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_customer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_customer` interceptor runs + before the `post_get_customer_with_metadata` interceptor. """ return response + def post_get_customer_with_metadata( + self, + response: customers.Customer, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[customers.Customer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_customer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_customer_with_metadata` + interceptor in new development instead of the `post_get_customer` interceptor. + When both interceptors are used, this `post_get_customer_with_metadata` interceptor runs after the + `post_get_customer` interceptor. The (possibly modified) response returned by + `post_get_customer` will be passed to + `post_get_customer_with_metadata`. + """ + return response, metadata + def pre_get_ekm_connections( self, request: ekm_connections.GetEkmConnectionsRequest, @@ -186,12 +290,35 @@ def post_get_ekm_connections( ) -> ekm_connections.EkmConnections: """Post-rpc interceptor for get_ekm_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ekm_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_ekm_connections` interceptor runs + before the `post_get_ekm_connections_with_metadata` interceptor. """ return response + def post_get_ekm_connections_with_metadata( + self, + response: ekm_connections.EkmConnections, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_connections.EkmConnections, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ekm_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_ekm_connections_with_metadata` + interceptor in new development instead of the `post_get_ekm_connections` interceptor. + When both interceptors are used, this `post_get_ekm_connections_with_metadata` interceptor runs after the + `post_get_ekm_connections` interceptor. The (possibly modified) response returned by + `post_get_ekm_connections` will be passed to + `post_get_ekm_connections_with_metadata`. + """ + return response, metadata + def pre_get_partner( self, request: partners.GetPartnerRequest, @@ -207,12 +334,35 @@ def pre_get_partner( def post_get_partner(self, response: partners.Partner) -> partners.Partner: """Post-rpc interceptor for get_partner - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_partner_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_partner` interceptor runs + before the `post_get_partner_with_metadata` interceptor. """ return response + def post_get_partner_with_metadata( + self, + response: partners.Partner, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[partners.Partner, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_partner + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_partner_with_metadata` + interceptor in new development instead of the `post_get_partner` interceptor. + When both interceptors are used, this `post_get_partner_with_metadata` interceptor runs after the + `post_get_partner` interceptor. The (possibly modified) response returned by + `post_get_partner` will be passed to + `post_get_partner_with_metadata`. + """ + return response, metadata + def pre_get_partner_permissions( self, request: partner_permissions.GetPartnerPermissionsRequest, @@ -233,12 +383,37 @@ def post_get_partner_permissions( ) -> partner_permissions.PartnerPermissions: """Post-rpc interceptor for get_partner_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_partner_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_partner_permissions` interceptor runs + before the `post_get_partner_permissions_with_metadata` interceptor. """ return response + def post_get_partner_permissions_with_metadata( + self, + response: partner_permissions.PartnerPermissions, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + partner_permissions.PartnerPermissions, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_partner_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_partner_permissions_with_metadata` + interceptor in new development instead of the `post_get_partner_permissions` interceptor. + When both interceptors are used, this `post_get_partner_permissions_with_metadata` interceptor runs after the + `post_get_partner_permissions` interceptor. The (possibly modified) response returned by + `post_get_partner_permissions` will be passed to + `post_get_partner_permissions_with_metadata`. + """ + return response, metadata + def pre_get_workload( self, request: customer_workloads.GetWorkloadRequest, @@ -258,12 +433,35 @@ def post_get_workload( ) -> customer_workloads.Workload: """Post-rpc interceptor for get_workload - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workload_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_get_workload` interceptor runs + before the `post_get_workload_with_metadata` interceptor. """ return response + def post_get_workload_with_metadata( + self, + response: customer_workloads.Workload, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[customer_workloads.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workload + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_get_workload_with_metadata` + interceptor in new development instead of the `post_get_workload` interceptor. + When both interceptors are used, this `post_get_workload_with_metadata` interceptor runs after the + `post_get_workload` interceptor. The (possibly modified) response returned by + `post_get_workload` will be passed to + `post_get_workload_with_metadata`. + """ + return response, metadata + def pre_list_access_approval_requests( self, request: access_approval_requests.ListAccessApprovalRequestsRequest, @@ -284,12 +482,38 @@ def post_list_access_approval_requests( ) -> access_approval_requests.ListAccessApprovalRequestsResponse: """Post-rpc interceptor for list_access_approval_requests - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_access_approval_requests_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_list_access_approval_requests` interceptor runs + before the `post_list_access_approval_requests_with_metadata` interceptor. """ return response + def post_list_access_approval_requests_with_metadata( + self, + response: access_approval_requests.ListAccessApprovalRequestsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_approval_requests.ListAccessApprovalRequestsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_access_approval_requests + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_list_access_approval_requests_with_metadata` + interceptor in new development instead of the `post_list_access_approval_requests` interceptor. + When both interceptors are used, this `post_list_access_approval_requests_with_metadata` interceptor runs after the + `post_list_access_approval_requests` interceptor. The (possibly modified) response returned by + `post_list_access_approval_requests` will be passed to + `post_list_access_approval_requests_with_metadata`. + """ + return response, metadata + def pre_list_customers( self, request: customers.ListCustomersRequest, @@ -307,12 +531,37 @@ def post_list_customers( ) -> customers.ListCustomersResponse: """Post-rpc interceptor for list_customers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_customers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_list_customers` interceptor runs + before the `post_list_customers_with_metadata` interceptor. """ return response + def post_list_customers_with_metadata( + self, + response: customers.ListCustomersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customers.ListCustomersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_customers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_list_customers_with_metadata` + interceptor in new development instead of the `post_list_customers` interceptor. + When both interceptors are used, this `post_list_customers_with_metadata` interceptor runs after the + `post_list_customers` interceptor. The (possibly modified) response returned by + `post_list_customers` will be passed to + `post_list_customers_with_metadata`. + """ + return response, metadata + def pre_list_workloads( self, request: customer_workloads.ListWorkloadsRequest, @@ -332,12 +581,84 @@ def post_list_workloads( ) -> customer_workloads.ListWorkloadsResponse: """Post-rpc interceptor for list_workloads - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workloads_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerCore server but before - it is returned to user code. + it is returned to user code. This `post_list_workloads` interceptor runs + before the `post_list_workloads_with_metadata` interceptor. """ return response + def post_list_workloads_with_metadata( + self, + response: customer_workloads.ListWorkloadsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customer_workloads.ListWorkloadsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_workloads + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_list_workloads_with_metadata` + interceptor in new development instead of the `post_list_workloads` interceptor. + When both interceptors are used, this `post_list_workloads_with_metadata` interceptor runs after the + `post_list_workloads` interceptor. The (possibly modified) response returned by + `post_list_workloads` will be passed to + `post_list_workloads_with_metadata`. + """ + return response, metadata + + def pre_update_customer( + self, + request: customers.UpdateCustomerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + customers.UpdateCustomerRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_customer + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudControlsPartnerCore server. + """ + return request, metadata + + def post_update_customer(self, response: customers.Customer) -> customers.Customer: + """Post-rpc interceptor for update_customer + + DEPRECATED. Please use the `post_update_customer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudControlsPartnerCore server but before + it is returned to user code. This `post_update_customer` interceptor runs + before the `post_update_customer_with_metadata` interceptor. + """ + return response + + def post_update_customer_with_metadata( + self, + response: customers.Customer, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[customers.Customer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_customer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerCore server but before it is returned to user code. + + We recommend only using this `post_update_customer_with_metadata` + interceptor in new development instead of the `post_update_customer` interceptor. + When both interceptors are used, this `post_update_customer_with_metadata` interceptor runs after the + `post_update_customer` interceptor. The (possibly modified) response returned by + `post_update_customer` will be passed to + `post_update_customer_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudControlsPartnerCoreRestStub: @@ -358,72 +679,336 @@ class CloudControlsPartnerCoreRestTransport(_BaseCloudControlsPartnerCoreRestTra It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__( - self, - *, - host: str = "cloudcontrolspartner.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CloudControlsPartnerCoreRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. + def __init__( + self, + *, + host: str = "cloudcontrolspartner.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudControlsPartnerCoreRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudcontrolspartner.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudControlsPartnerCoreRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateCustomer( + _BaseCloudControlsPartnerCoreRestTransport._BaseCreateCustomer, + CloudControlsPartnerCoreRestStub, + ): + def __hash__(self): + return hash("CloudControlsPartnerCoreRestTransport.CreateCustomer") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: customers.CreateCustomerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> customers.Customer: + r"""Call the create customer method over HTTP. + + Args: + request (~.customers.CreateCustomerRequest): + The request object. Request to create a customer + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.customers.Customer: + Contains metadata around a Cloud + Controls Partner Customer + + """ + + http_options = ( + _BaseCloudControlsPartnerCoreRestTransport._BaseCreateCustomer._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_customer(request, metadata) + transcoded_request = _BaseCloudControlsPartnerCoreRestTransport._BaseCreateCustomer._get_transcoded_request( + http_options, request + ) + + body = _BaseCloudControlsPartnerCoreRestTransport._BaseCreateCustomer._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseCloudControlsPartnerCoreRestTransport._BaseCreateCustomer._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.CreateCustomer", + extra={ + "serviceName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "rpcName": "CreateCustomer", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + CloudControlsPartnerCoreRestTransport._CreateCustomer._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = customers.Customer() + pb_resp = customers.Customer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_customer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_customer_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = customers.Customer.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.create_customer", + extra={ + "serviceName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "rpcName": "CreateCustomer", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteCustomer( + _BaseCloudControlsPartnerCoreRestTransport._BaseDeleteCustomer, + CloudControlsPartnerCoreRestStub, + ): + def __hash__(self): + return hash("CloudControlsPartnerCoreRestTransport.DeleteCustomer") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: customers.DeleteCustomerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete customer method over HTTP. + + Args: + request (~.customers.DeleteCustomerRequest): + The request object. Message for deleting customer + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseCloudControlsPartnerCoreRestTransport._BaseDeleteCustomer._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_customer(request, metadata) + transcoded_request = _BaseCloudControlsPartnerCoreRestTransport._BaseDeleteCustomer._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCloudControlsPartnerCoreRestTransport._BaseDeleteCustomer._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.DeleteCustomer", + extra={ + "serviceName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "rpcName": "DeleteCustomer", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudcontrolspartner.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. + # Send the request + response = ( + CloudControlsPartnerCoreRestTransport._DeleteCustomer._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CloudControlsPartnerCoreRestInterceptor() - self._prep_wrapped_messages(client_info) + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) class _GetCustomer( _BaseCloudControlsPartnerCoreRestTransport._BaseGetCustomer, @@ -545,6 +1130,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_customer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_customer_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -693,6 +1282,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ekm_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ekm_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -834,6 +1427,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_partner(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partner_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -980,6 +1577,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_partner_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_partner_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1127,6 +1728,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workload(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workload_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1278,6 +1883,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_access_approval_requests(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_access_approval_requests_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1425,6 +2037,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_customers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_customers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1570,6 +2186,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workloads(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workloads_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1595,6 +2215,177 @@ def __call__( ) return resp + class _UpdateCustomer( + _BaseCloudControlsPartnerCoreRestTransport._BaseUpdateCustomer, + CloudControlsPartnerCoreRestStub, + ): + def __hash__(self): + return hash("CloudControlsPartnerCoreRestTransport.UpdateCustomer") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: customers.UpdateCustomerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> customers.Customer: + r"""Call the update customer method over HTTP. + + Args: + request (~.customers.UpdateCustomerRequest): + The request object. Request to update a customer + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.customers.Customer: + Contains metadata around a Cloud + Controls Partner Customer + + """ + + http_options = ( + _BaseCloudControlsPartnerCoreRestTransport._BaseUpdateCustomer._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_customer(request, metadata) + transcoded_request = _BaseCloudControlsPartnerCoreRestTransport._BaseUpdateCustomer._get_transcoded_request( + http_options, request + ) + + body = _BaseCloudControlsPartnerCoreRestTransport._BaseUpdateCustomer._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseCloudControlsPartnerCoreRestTransport._BaseUpdateCustomer._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.UpdateCustomer", + extra={ + "serviceName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "rpcName": "UpdateCustomer", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + CloudControlsPartnerCoreRestTransport._UpdateCustomer._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = customers.Customer() + pb_resp = customers.Customer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_customer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_customer_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = customers.Customer.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.update_customer", + extra={ + "serviceName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "rpcName": "UpdateCustomer", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_customer( + self, + ) -> Callable[[customers.CreateCustomerRequest], customers.Customer]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateCustomer(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_customer( + self, + ) -> Callable[[customers.DeleteCustomerRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteCustomer(self._session, self._host, self._interceptor) # type: ignore + @property def get_customer( self, @@ -1668,6 +2459,14 @@ def list_workloads( # In C++ this would require a dynamic_cast return self._ListWorkloads(self._session, self._host, self._interceptor) # type: ignore + @property + def update_customer( + self, + ) -> Callable[[customers.UpdateCustomerRequest], customers.Customer]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCustomer(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest_base.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest_base.py index eff3ecbdaf52..4a14660c8b61 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest_base.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_core/transports/rest_base.py @@ -18,6 +18,7 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1, path_template +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format from google.cloud.cloudcontrolspartner_v1beta.types import ( @@ -94,6 +95,112 @@ def __init__( api_audience=api_audience, ) + class _BaseCreateCustomer: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "customerId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=organizations/*/locations/*}/customers", + "body": "customer", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = customers.CreateCustomerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCloudControlsPartnerCoreRestTransport._BaseCreateCustomer._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteCustomer: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta/{name=organizations/*/locations/*/customers/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = customers.DeleteCustomerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCloudControlsPartnerCoreRestTransport._BaseDeleteCustomer._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetCustomer: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -472,5 +579,62 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateCustomer: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta/{customer.name=organizations/*/locations/*/customers/*}", + "body": "customer", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = customers.UpdateCustomerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCloudControlsPartnerCoreRestTransport._BaseUpdateCustomer._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + __all__ = ("_BaseCloudControlsPartnerCoreRestTransport",) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py index 67f7db23d20d..8879e496e673 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -501,6 +503,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/transports/rest.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/transports/rest.py index 1d6f57440b70..64064d2340db 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/transports/rest.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/services/cloud_controls_partner_monitoring/transports/rest.py @@ -107,12 +107,35 @@ def post_get_violation( ) -> violations.Violation: """Post-rpc interceptor for get_violation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_violation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerMonitoring server but before - it is returned to user code. + it is returned to user code. This `post_get_violation` interceptor runs + before the `post_get_violation_with_metadata` interceptor. """ return response + def post_get_violation_with_metadata( + self, + response: violations.Violation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[violations.Violation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_violation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerMonitoring server but before it is returned to user code. + + We recommend only using this `post_get_violation_with_metadata` + interceptor in new development instead of the `post_get_violation` interceptor. + When both interceptors are used, this `post_get_violation_with_metadata` interceptor runs after the + `post_get_violation` interceptor. The (possibly modified) response returned by + `post_get_violation` will be passed to + `post_get_violation_with_metadata`. + """ + return response, metadata + def pre_list_violations( self, request: violations.ListViolationsRequest, @@ -132,12 +155,37 @@ def post_list_violations( ) -> violations.ListViolationsResponse: """Post-rpc interceptor for list_violations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_violations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudControlsPartnerMonitoring server but before - it is returned to user code. + it is returned to user code. This `post_list_violations` interceptor runs + before the `post_list_violations_with_metadata` interceptor. """ return response + def post_list_violations_with_metadata( + self, + response: violations.ListViolationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + violations.ListViolationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_violations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudControlsPartnerMonitoring server but before it is returned to user code. + + We recommend only using this `post_list_violations_with_metadata` + interceptor in new development instead of the `post_list_violations` interceptor. + When both interceptors are used, this `post_list_violations_with_metadata` interceptor runs after the + `post_list_violations` interceptor. The (possibly modified) response returned by + `post_list_violations` will be passed to + `post_list_violations_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class CloudControlsPartnerMonitoringRestStub: @@ -349,6 +397,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_violation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_violation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -493,6 +545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_violations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_violations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/__init__.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/__init__.py index 38c13f856a8a..86299b7a0160 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/__init__.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/__init__.py @@ -30,12 +30,15 @@ WorkloadOnboardingStep, ) from .customers import ( + CreateCustomerRequest, Customer, CustomerOnboardingState, CustomerOnboardingStep, + DeleteCustomerRequest, GetCustomerRequest, ListCustomersRequest, ListCustomersResponse, + UpdateCustomerRequest, ) from .ekm_connections import EkmConnection, EkmConnections, GetEkmConnectionsRequest from .partner_permissions import GetPartnerPermissionsRequest, PartnerPermissions @@ -60,12 +63,15 @@ "Workload", "WorkloadOnboardingState", "WorkloadOnboardingStep", + "CreateCustomerRequest", "Customer", "CustomerOnboardingState", "CustomerOnboardingStep", + "DeleteCustomerRequest", "GetCustomerRequest", "ListCustomersRequest", "ListCustomersResponse", + "UpdateCustomerRequest", "EkmConnection", "EkmConnections", "GetEkmConnectionsRequest", diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/core.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/core.py index acbd67d95f12..e271b011007f 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/core.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/core.py @@ -50,9 +50,10 @@ class OperationMetadata(proto.Message): requested_cancellation (bool): Output only. Identifies whether the user has requested cancellation of the operation. Operations that have been - cancelled successfully have [Operation.error][] value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. + cancelled successfully have + [Operation.error][google.longrunning.Operation.error] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. api_version (str): Output only. API version used to start the operation. diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py index 2237867d884f..e3e023f339e5 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/customers.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore @@ -30,9 +31,12 @@ "Customer", "ListCustomersRequest", "ListCustomersResponse", + "CreateCustomerRequest", "GetCustomerRequest", "CustomerOnboardingState", "CustomerOnboardingStep", + "UpdateCustomerRequest", + "DeleteCustomerRequest", }, ) @@ -52,6 +56,10 @@ class Customer(proto.Message): is_onboarded (bool): Output only. Indicates whether a customer is fully onboarded + organization_domain (str): + Output only. The customer organization domain, extracted + from CRM Organization’s display_name field. e.g. + "google.com". """ name: str = proto.Field( @@ -71,6 +79,10 @@ class Customer(proto.Message): proto.BOOL, number=4, ) + organization_domain: str = proto.Field( + proto.STRING, + number=5, + ) class ListCustomersRequest(proto.Message): @@ -149,6 +161,38 @@ def raw_page(self): ) +class CreateCustomerRequest(proto.Message): + r"""Request to create a customer + + Attributes: + parent (str): + Required. Parent resource Format: + ``organizations/{organization}/locations/{location}`` + customer (google.cloud.cloudcontrolspartner_v1beta.types.Customer): + Required. The customer to create. + customer_id (str): + Required. The customer id to use for the + customer, which will become the final component + of the customer's resource name. The specified + value must be a valid Google cloud organization + id. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + customer: "Customer" = proto.Field( + proto.MESSAGE, + number=2, + message="Customer", + ) + customer_id: str = proto.Field( + proto.STRING, + number=3, + ) + + class GetCustomerRequest(proto.Message): r"""Message for getting a customer @@ -230,4 +274,42 @@ class Step(proto.Enum): ) +class UpdateCustomerRequest(proto.Message): + r"""Request to update a customer + + Attributes: + customer (google.cloud.cloudcontrolspartner_v1beta.types.Customer): + Required. The customer to update Format: + ``organizations/{organization}/locations/{location}/customers/{customer}`` + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update + """ + + customer: "Customer" = proto.Field( + proto.MESSAGE, + number=1, + message="Customer", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteCustomerRequest(proto.Message): + r"""Message for deleting customer + + Attributes: + name (str): + Required. name of the resource to be deleted format: + name=organizations/\ */locations/*/customers/\* + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py index 60daac5eb67b..354c8333ea66 100644 --- a/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py +++ b/packages/google-cloud-cloudcontrolspartner/google/cloud/cloudcontrolspartner_v1beta/types/partners.py @@ -166,7 +166,7 @@ class EkmSolution(proto.Enum): THALES (3): EKM Partner Thales VIRTRU (4): - EKM Partner Virtu + This enum value is never used. """ EKM_SOLUTION_UNSPECIFIED = 0 FORTANIX = 1 diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_async.py b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_async.py new file mode 100644 index 000000000000..e3dce7370fa8 --- /dev/null +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCustomer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudcontrolspartner + + +# [START cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_CreateCustomer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudcontrolspartner_v1beta + + +async def sample_create_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.CreateCustomerRequest( + parent="parent_value", + customer=customer, + customer_id="customer_id_value", + ) + + # Make the request + response = await client.create_customer(request=request) + + # Handle the response + print(response) + +# [END cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_CreateCustomer_async] diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_sync.py b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_sync.py new file mode 100644 index 000000000000..e9bfd3b602e6 --- /dev/null +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateCustomer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudcontrolspartner + + +# [START cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_CreateCustomer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudcontrolspartner_v1beta + + +def sample_create_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.CreateCustomerRequest( + parent="parent_value", + customer=customer, + customer_id="customer_id_value", + ) + + # Make the request + response = client.create_customer(request=request) + + # Handle the response + print(response) + +# [END cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_CreateCustomer_sync] diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_async.py b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_async.py new file mode 100644 index 000000000000..06c7b51a8eee --- /dev/null +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCustomer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudcontrolspartner + + +# [START cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_DeleteCustomer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudcontrolspartner_v1beta + + +async def sample_delete_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient() + + # Initialize request argument(s) + request = cloudcontrolspartner_v1beta.DeleteCustomerRequest( + name="name_value", + ) + + # Make the request + await client.delete_customer(request=request) + + +# [END cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_DeleteCustomer_async] diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_sync.py b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_sync.py new file mode 100644 index 000000000000..21b57014851f --- /dev/null +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteCustomer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudcontrolspartner + + +# [START cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_DeleteCustomer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudcontrolspartner_v1beta + + +def sample_delete_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient() + + # Initialize request argument(s) + request = cloudcontrolspartner_v1beta.DeleteCustomerRequest( + name="name_value", + ) + + # Make the request + client.delete_customer(request=request) + + +# [END cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_DeleteCustomer_sync] diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_async.py b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_async.py new file mode 100644 index 000000000000..8a28914d844b --- /dev/null +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCustomer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudcontrolspartner + + +# [START cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_UpdateCustomer_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudcontrolspartner_v1beta + + +async def sample_update_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.UpdateCustomerRequest( + customer=customer, + ) + + # Make the request + response = await client.update_customer(request=request) + + # Handle the response + print(response) + +# [END cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_UpdateCustomer_async] diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_sync.py b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_sync.py new file mode 100644 index 000000000000..61fc0231454a --- /dev/null +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCustomer +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudcontrolspartner + + +# [START cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_UpdateCustomer_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudcontrolspartner_v1beta + + +def sample_update_customer(): + # Create a client + client = cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient() + + # Initialize request argument(s) + customer = cloudcontrolspartner_v1beta.Customer() + customer.display_name = "display_name_value" + + request = cloudcontrolspartner_v1beta.UpdateCustomerRequest( + customer=customer, + ) + + # Make the request + response = client.update_customer(request=request) + + # Handle the response + print(response) + +# [END cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_UpdateCustomer_sync] diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json index 1e2b897c1a41..1ee30d0f97d4 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.2.3" + "version": "0.2.5" }, "snippets": [ { diff --git a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json index d565ece67fbe..66a1560ca353 100644 --- a/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json +++ b/packages/google-cloud-cloudcontrolspartner/samples/generated_samples/snippet_metadata_google.cloud.cloudcontrolspartner.v1beta.json @@ -8,9 +8,341 @@ ], "language": "PYTHON", "name": "google-cloud-cloudcontrolspartner", - "version": "0.2.3" + "version": "0.2.5" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient", + "shortName": "CloudControlsPartnerCoreAsyncClient" + }, + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient.create_customer", + "method": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore.CreateCustomer", + "service": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "shortName": "CloudControlsPartnerCore" + }, + "shortName": "CreateCustomer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.CreateCustomerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "customer", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.Customer" + }, + { + "name": "customer_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudcontrolspartner_v1beta.types.Customer", + "shortName": "create_customer" + }, + "description": "Sample for CreateCustomer", + "file": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_CreateCustomer_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient", + "shortName": "CloudControlsPartnerCoreClient" + }, + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.create_customer", + "method": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore.CreateCustomer", + "service": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "shortName": "CloudControlsPartnerCore" + }, + "shortName": "CreateCustomer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.CreateCustomerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "customer", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.Customer" + }, + { + "name": "customer_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudcontrolspartner_v1beta.types.Customer", + "shortName": "create_customer" + }, + "description": "Sample for CreateCustomer", + "file": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_CreateCustomer_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_create_customer_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient", + "shortName": "CloudControlsPartnerCoreAsyncClient" + }, + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient.delete_customer", + "method": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore.DeleteCustomer", + "service": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "shortName": "CloudControlsPartnerCore" + }, + "shortName": "DeleteCustomer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.DeleteCustomerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_customer" + }, + "description": "Sample for DeleteCustomer", + "file": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_DeleteCustomer_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient", + "shortName": "CloudControlsPartnerCoreClient" + }, + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.delete_customer", + "method": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore.DeleteCustomer", + "service": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "shortName": "CloudControlsPartnerCore" + }, + "shortName": "DeleteCustomer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.DeleteCustomerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_customer" + }, + "description": "Sample for DeleteCustomer", + "file": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_DeleteCustomer_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_delete_customer_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1299,6 +1631,175 @@ ], "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_list_workloads_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient", + "shortName": "CloudControlsPartnerCoreAsyncClient" + }, + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreAsyncClient.update_customer", + "method": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore.UpdateCustomer", + "service": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "shortName": "CloudControlsPartnerCore" + }, + "shortName": "UpdateCustomer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.UpdateCustomerRequest" + }, + { + "name": "customer", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.Customer" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudcontrolspartner_v1beta.types.Customer", + "shortName": "update_customer" + }, + "description": "Sample for UpdateCustomer", + "file": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_UpdateCustomer_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient", + "shortName": "CloudControlsPartnerCoreClient" + }, + "fullName": "google.cloud.cloudcontrolspartner_v1beta.CloudControlsPartnerCoreClient.update_customer", + "method": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore.UpdateCustomer", + "service": { + "fullName": "google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCore", + "shortName": "CloudControlsPartnerCore" + }, + "shortName": "UpdateCustomer" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.UpdateCustomerRequest" + }, + { + "name": "customer", + "type": "google.cloud.cloudcontrolspartner_v1beta.types.Customer" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudcontrolspartner_v1beta.types.Customer", + "shortName": "update_customer" + }, + "description": "Sample for UpdateCustomer", + "file": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudcontrolspartner_v1beta_generated_CloudControlsPartnerCore_UpdateCustomer_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudcontrolspartner_v1beta_generated_cloud_controls_partner_core_update_customer_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-cloudcontrolspartner/scripts/fixup_cloudcontrolspartner_v1beta_keywords.py b/packages/google-cloud-cloudcontrolspartner/scripts/fixup_cloudcontrolspartner_v1beta_keywords.py index fd1193a7b0c7..64b1d9bdfb00 100644 --- a/packages/google-cloud-cloudcontrolspartner/scripts/fixup_cloudcontrolspartner_v1beta_keywords.py +++ b/packages/google-cloud-cloudcontrolspartner/scripts/fixup_cloudcontrolspartner_v1beta_keywords.py @@ -39,6 +39,8 @@ def partition( class cloudcontrolspartnerCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_customer': ('parent', 'customer', 'customer_id', ), + 'delete_customer': ('name', ), 'get_customer': ('name', ), 'get_ekm_connections': ('name', ), 'get_partner': ('name', ), @@ -49,6 +51,7 @@ class cloudcontrolspartnerCallTransformer(cst.CSTTransformer): 'list_customers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_violations': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'interval', ), 'list_workloads': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'update_customer': ('customer', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py index 673f4cb40d7c..4e558ad8b865 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_core.py @@ -68,6 +68,13 @@ partners, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudControlsPartnerCoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudControlsPartnerCoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6813,10 +6863,14 @@ def test_get_workload_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "post_get_workload" ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_workload" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = customer_workloads.GetWorkloadRequest.pb( customer_workloads.GetWorkloadRequest() ) @@ -6842,6 +6896,7 @@ def test_get_workload_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = customer_workloads.Workload() + post_with_metadata.return_value = customer_workloads.Workload(), metadata client.get_workload( request, @@ -6853,6 +6908,7 @@ def test_get_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workloads_rest_bad_request( @@ -6943,10 +6999,14 @@ def test_list_workloads_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "post_list_workloads" ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_workloads_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_workloads" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = customer_workloads.ListWorkloadsRequest.pb( customer_workloads.ListWorkloadsRequest() ) @@ -6972,6 +7032,10 @@ def test_list_workloads_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = customer_workloads.ListWorkloadsResponse() + post_with_metadata.return_value = ( + customer_workloads.ListWorkloadsResponse(), + metadata, + ) client.list_workloads( request, @@ -6983,6 +7047,7 @@ def test_list_workloads_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_customer_rest_bad_request(request_type=customers.GetCustomerRequest): @@ -7069,10 +7134,14 @@ def test_get_customer_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "post_get_customer" ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_customer_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_customer" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = customers.GetCustomerRequest.pb(customers.GetCustomerRequest()) transcode.return_value = { "method": "post", @@ -7094,6 +7163,7 @@ def test_get_customer_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = customers.Customer() + post_with_metadata.return_value = customers.Customer(), metadata client.get_customer( request, @@ -7105,6 +7175,7 @@ def test_get_customer_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_customers_rest_bad_request(request_type=customers.ListCustomersRequest): @@ -7189,10 +7260,14 @@ def test_list_customers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "post_list_customers" ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_customers_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_customers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = customers.ListCustomersRequest.pb(customers.ListCustomersRequest()) transcode.return_value = { "method": "post", @@ -7216,6 +7291,7 @@ def test_list_customers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = customers.ListCustomersResponse() + post_with_metadata.return_value = customers.ListCustomersResponse(), metadata client.list_customers( request, @@ -7227,6 +7303,7 @@ def test_list_customers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_ekm_connections_rest_bad_request( @@ -7315,10 +7392,14 @@ def test_get_ekm_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "post_get_ekm_connections" ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_ekm_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_ekm_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_connections.GetEkmConnectionsRequest.pb( ekm_connections.GetEkmConnectionsRequest() ) @@ -7344,6 +7425,7 @@ def test_get_ekm_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_connections.EkmConnections() + post_with_metadata.return_value = ekm_connections.EkmConnections(), metadata client.get_ekm_connections( request, @@ -7355,6 +7437,7 @@ def test_get_ekm_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_partner_permissions_rest_bad_request( @@ -7450,11 +7533,15 @@ def test_get_partner_permissions_rest_interceptors(null_interceptor): transports.CloudControlsPartnerCoreRestInterceptor, "post_get_partner_permissions", ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_partner_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_partner_permissions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = partner_permissions.GetPartnerPermissionsRequest.pb( partner_permissions.GetPartnerPermissionsRequest() ) @@ -7480,6 +7567,10 @@ def test_get_partner_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = partner_permissions.PartnerPermissions() + post_with_metadata.return_value = ( + partner_permissions.PartnerPermissions(), + metadata, + ) client.get_partner_permissions( request, @@ -7491,6 +7582,7 @@ def test_get_partner_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_access_approval_requests_rest_bad_request( @@ -7584,11 +7676,15 @@ def test_list_access_approval_requests_rest_interceptors(null_interceptor): transports.CloudControlsPartnerCoreRestInterceptor, "post_list_access_approval_requests", ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_access_approval_requests_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_access_approval_requests", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = access_approval_requests.ListAccessApprovalRequestsRequest.pb( access_approval_requests.ListAccessApprovalRequestsRequest() ) @@ -7618,6 +7714,10 @@ def test_list_access_approval_requests_rest_interceptors(null_interceptor): post.return_value = ( access_approval_requests.ListAccessApprovalRequestsResponse() ) + post_with_metadata.return_value = ( + access_approval_requests.ListAccessApprovalRequestsResponse(), + metadata, + ) client.list_access_approval_requests( request, @@ -7629,6 +7729,7 @@ def test_list_access_approval_requests_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_partner_rest_bad_request(request_type=partners.GetPartnerRequest): @@ -7715,10 +7816,14 @@ def test_get_partner_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "post_get_partner" ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_partner_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_partner" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = partners.GetPartnerRequest.pb(partners.GetPartnerRequest()) transcode.return_value = { "method": "post", @@ -7740,6 +7845,7 @@ def test_get_partner_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = partners.Partner() + post_with_metadata.return_value = partners.Partner(), metadata client.get_partner( request, @@ -7751,6 +7857,7 @@ def test_get_partner_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py index 4ca06b66d61d..abd85374fb0d 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1/test_cloud_controls_partner_monitoring.py @@ -62,6 +62,13 @@ ) from google.cloud.cloudcontrolspartner_v1.types import violations +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudControlsPartnerMonitoringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudControlsPartnerMonitoringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2782,10 +2832,14 @@ def test_list_violations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "post_list_violations" ) as post, mock.patch.object( + transports.CloudControlsPartnerMonitoringRestInterceptor, + "post_list_violations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "pre_list_violations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = violations.ListViolationsRequest.pb( violations.ListViolationsRequest() ) @@ -2811,6 +2865,7 @@ def test_list_violations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = violations.ListViolationsResponse() + post_with_metadata.return_value = violations.ListViolationsResponse(), metadata client.list_violations( request, @@ -2822,6 +2877,7 @@ def test_list_violations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_violation_rest_bad_request(request_type=violations.GetViolationRequest): @@ -2918,10 +2974,14 @@ def test_get_violation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "post_get_violation" ) as post, mock.patch.object( + transports.CloudControlsPartnerMonitoringRestInterceptor, + "post_get_violation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "pre_get_violation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = violations.GetViolationRequest.pb(violations.GetViolationRequest()) transcode.return_value = { "method": "post", @@ -2943,6 +3003,7 @@ def test_get_violation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = violations.Violation() + post_with_metadata.return_value = violations.Violation(), metadata client.get_violation( request, @@ -2954,6 +3015,7 @@ def test_get_violation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py index 71bdcb7de06b..bc04f01bff51 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_core.py @@ -51,6 +51,7 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.cloud.cloudcontrolspartner_v1beta.services.cloud_controls_partner_core import ( @@ -61,6 +62,7 @@ ) from google.cloud.cloudcontrolspartner_v1beta.types import ( access_approval_requests, + completion_state, customer_workloads, customers, ekm_connections, @@ -68,6 +70,13 @@ partners, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +352,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudControlsPartnerCoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudControlsPartnerCoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2043,6 +2095,7 @@ def test_get_customer(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", is_onboarded=True, + organization_domain="organization_domain_value", ) response = client.get_customer(request) @@ -2057,6 +2110,7 @@ def test_get_customer(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" def test_get_customer_non_empty_request_with_auto_populated_field(): @@ -2185,6 +2239,7 @@ async def test_get_customer_async( name="name_value", display_name="display_name_value", is_onboarded=True, + organization_domain="organization_domain_value", ) ) response = await client.get_customer(request) @@ -2200,6 +2255,7 @@ async def test_get_customer_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" @pytest.mark.asyncio @@ -4466,13 +4522,85 @@ async def test_get_partner_flattened_error_async(): ) -def test_get_workload_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + customers.CreateCustomerRequest, + dict, + ], +) +def test_create_customer(request_type, transport: str = "grpc"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + response = client.create_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = customers.CreateCustomerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, customers.Customer) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" + + +def test_create_customer_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = customers.CreateCustomerRequest( + parent="parent_value", + customer_id="customer_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_customer(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == customers.CreateCustomerRequest( + parent="parent_value", + customer_id="customer_id_value", + ) + + +def test_create_customer_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4480,179 +4608,346 @@ def test_get_workload_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_workload in client._transport._wrapped_methods + assert client._transport.create_customer in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_workload] = mock_rpc - + client._transport._wrapped_methods[client._transport.create_customer] = mock_rpc request = {} - client.get_workload(request) + client.create_customer(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_workload(request) + client.create_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_workload_rest_required_fields( - request_type=customer_workloads.GetWorkloadRequest, +@pytest.mark.asyncio +async def test_create_customer_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.CloudControlsPartnerCoreRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify required fields with default values are now present + # Ensure method has been cached + assert ( + client._client._transport.create_customer + in client._client._transport._wrapped_methods + ) - jsonified_request["name"] = "name_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_customer + ] = mock_rpc - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request = {} + await client.create_customer(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.create_customer(request) - # Designate an appropriate value for the returned response. - return_value = customer_workloads.Workload() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = customer_workloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_create_customer_async( + transport: str = "grpc_asyncio", request_type=customers.CreateCustomerRequest +): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.get_workload(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + ) + response = await client.create_customer(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = customers.CreateCustomerRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, customers.Customer) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" -def test_get_workload_rest_unset_required_fields(): - transport = transports.CloudControlsPartnerCoreRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.get_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) +@pytest.mark.asyncio +async def test_create_customer_async_from_dict(): + await test_create_customer_async(request_type=dict) -def test_get_workload_rest_flattened(): +def test_create_customer_field_headers(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = customer_workloads.Workload() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" - } + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = customers.CreateCustomerRequest() - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + request.parent = "parent_value" - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = customer_workloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + call.return_value = customers.Customer() + client.create_customer(request) - client.get_workload(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{name=organizations/*/locations/*/customers/*/workloads/*}" - % client.transport._host, - args[1], - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_get_workload_rest_flattened_error(transport: str = "rest"): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_create_customer_field_headers_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = customers.CreateCustomerRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(customers.Customer()) + await client.create_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_customer_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = customers.Customer() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_customer( + parent="parent_value", + customer=customers.Customer(name="name_value"), + customer_id="customer_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].customer + mock_val = customers.Customer(name="name_value") + assert arg == mock_val + arg = args[0].customer_id + mock_val = "customer_id_value" + assert arg == mock_val + + +def test_create_customer_flattened_error(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_workload( - customer_workloads.GetWorkloadRequest(), + client.create_customer( + customers.CreateCustomerRequest(), + parent="parent_value", + customer=customers.Customer(name="name_value"), + customer_id="customer_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_customer_flattened_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = customers.Customer() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(customers.Customer()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_customer( + parent="parent_value", + customer=customers.Customer(name="name_value"), + customer_id="customer_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].customer + mock_val = customers.Customer(name="name_value") + assert arg == mock_val + arg = args[0].customer_id + mock_val = "customer_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_customer_flattened_error_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_customer( + customers.CreateCustomerRequest(), + parent="parent_value", + customer=customers.Customer(name="name_value"), + customer_id="customer_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + customers.UpdateCustomerRequest, + dict, + ], +) +def test_update_customer(request_type, transport: str = "grpc"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = customers.Customer( name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", ) + response = client.update_customer(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = customers.UpdateCustomerRequest() + assert args[0] == request -def test_list_workloads_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, customers.Customer) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" + + +def test_update_customer_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = customers.UpdateCustomerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_customer(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == customers.UpdateCustomerRequest() + + +def test_update_customer_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4660,263 +4955,331 @@ def test_list_workloads_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_workloads in client._transport._wrapped_methods + assert client._transport.update_customer in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_workloads] = mock_rpc - + client._transport._wrapped_methods[client._transport.update_customer] = mock_rpc request = {} - client.list_workloads(request) + client.update_customer(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_workloads(request) + client.update_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_workloads_rest_required_fields( - request_type=customer_workloads.ListWorkloadsRequest, +@pytest.mark.asyncio +async def test_update_customer_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.CloudControlsPartnerCoreRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.update_customer + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_workloads._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_customer + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.update_customer(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_workloads._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) + await client.update_customer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_customer_async( + transport: str = "grpc_asyncio", request_type=customers.UpdateCustomerRequest +): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + ) + response = await client.update_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = customers.UpdateCustomerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, customers.Customer) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" + + +@pytest.mark.asyncio +async def test_update_customer_async_from_dict(): + await test_update_customer_async(request_type=dict) + +def test_update_customer_field_headers(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = customer_workloads.ListWorkloadsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = customers.UpdateCustomerRequest() - response_value = Response() - response_value.status_code = 200 + request.customer.name = "name_value" - # Convert return value to protobuf type - return_value = customer_workloads.ListWorkloadsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + call.return_value = customers.Customer() + client.update_customer(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - response = client.list_workloads(request) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "customer.name=name_value", + ) in kw["metadata"] - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_update_customer_field_headers_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_list_workloads_rest_unset_required_fields(): - transport = transports.CloudControlsPartnerCoreRestTransport( - credentials=ga_credentials.AnonymousCredentials + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = customers.UpdateCustomerRequest() + + request.customer.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(customers.Customer()) + await client.update_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "customer.name=name_value", + ) in kw["metadata"] + + +def test_update_customer_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_workloads._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = customers.Customer() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_customer( + customer=customers.Customer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - & set(("parent",)) - ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].customer + mock_val = customers.Customer(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_list_workloads_rest_flattened(): +def test_update_customer_flattened_error(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = customer_workloads.ListWorkloadsResponse() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_customer( + customers.UpdateCustomerRequest(), + customer=customers.Customer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "organizations/sample1/locations/sample2/customers/sample3" - } - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) +@pytest.mark.asyncio +async def test_update_customer_flattened_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = customer_workloads.ListWorkloadsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = customers.Customer() - client.list_workloads(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(customers.Customer()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_customer( + customer=customers.Customer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta/{parent=organizations/*/locations/*/customers/*}/workloads" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].customer + mock_val = customers.Customer(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_list_workloads_rest_flattened_error(transport: str = "rest"): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_update_customer_flattened_error_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_workloads( - customer_workloads.ListWorkloadsRequest(), - parent="parent_value", + await client.update_customer( + customers.UpdateCustomerRequest(), + customer=customers.Customer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_workloads_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + customers.DeleteCustomerRequest, + dict, + ], +) +def test_delete_customer(request_type, transport: str = "grpc"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - customer_workloads.ListWorkloadsResponse( - workloads=[ - customer_workloads.Workload(), - customer_workloads.Workload(), - customer_workloads.Workload(), - ], - next_page_token="abc", - ), - customer_workloads.ListWorkloadsResponse( - workloads=[], - next_page_token="def", - ), - customer_workloads.ListWorkloadsResponse( - workloads=[ - customer_workloads.Workload(), - ], - next_page_token="ghi", - ), - customer_workloads.ListWorkloadsResponse( - workloads=[ - customer_workloads.Workload(), - customer_workloads.Workload(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - customer_workloads.ListWorkloadsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_customer(request) - sample_request = { - "parent": "organizations/sample1/locations/sample2/customers/sample3" - } + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = customers.DeleteCustomerRequest() + assert args[0] == request - pager = client.list_workloads(request=sample_request) + # Establish that the response is the type that we expect. + assert response is None - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, customer_workloads.Workload) for i in results) - pages = list(client.list_workloads(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_delete_customer_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = customers.DeleteCustomerRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_customer(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == customers.DeleteCustomerRequest( + name="name_value", + ) -def test_get_customer_rest_use_cached_wrapped_rpc(): +def test_delete_customer_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -4924,32 +5287,284 @@ def test_get_customer_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_customer in client._transport._wrapped_methods + assert client._transport.delete_customer in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_customer] = mock_rpc - + client._transport._wrapped_methods[client._transport.delete_customer] = mock_rpc request = {} - client.get_customer(request) + client.delete_customer(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_customer(request) + client.delete_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_customer_rest_required_fields(request_type=customers.GetCustomerRequest): - transport_class = transports.CloudControlsPartnerCoreRestTransport +@pytest.mark.asyncio +async def test_delete_customer_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_customer + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_customer + ] = mock_rpc + + request = {} + await client.delete_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_customer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_customer_async( + transport: str = "grpc_asyncio", request_type=customers.DeleteCustomerRequest +): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = customers.DeleteCustomerRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_customer_async_from_dict(): + await test_delete_customer_async(request_type=dict) + + +def test_delete_customer_field_headers(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = customers.DeleteCustomerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + call.return_value = None + client.delete_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_customer_field_headers_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = customers.DeleteCustomerRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_customer_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_customer( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_customer_flattened_error(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_customer( + customers.DeleteCustomerRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_customer_flattened_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_customer( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_customer_flattened_error_async(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_customer( + customers.DeleteCustomerRequest(), + name="name_value", + ) + + +def test_get_workload_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_workload in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_workload] = mock_rpc + + request = {} + client.get_workload(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_workload(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_workload_rest_required_fields( + request_type=customer_workloads.GetWorkloadRequest, +): + transport_class = transports.CloudControlsPartnerCoreRestTransport + + request_init = {} request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) @@ -4961,7 +5576,7 @@ def test_get_customer_rest_required_fields(request_type=customers.GetCustomerReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_customer._get_unset_required_fields(jsonified_request) + ).get_workload._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4970,7 +5585,7 @@ def test_get_customer_rest_required_fields(request_type=customers.GetCustomerReq unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_customer._get_unset_required_fields(jsonified_request) + ).get_workload._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4984,7 +5599,7 @@ def test_get_customer_rest_required_fields(request_type=customers.GetCustomerReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = customers.Customer() + return_value = customer_workloads.Workload() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5005,30 +5620,30 @@ def test_get_customer_rest_required_fields(request_type=customers.GetCustomerReq response_value.status_code = 200 # Convert return value to protobuf type - return_value = customers.Customer.pb(return_value) + return_value = customer_workloads.Workload.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_customer(request) + response = client.get_workload(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_customer_rest_unset_required_fields(): +def test_get_workload_rest_unset_required_fields(): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_customer._get_unset_required_fields({}) + unset_fields = transport.get_workload._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_customer_rest_flattened(): +def test_get_workload_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5037,11 +5652,11 @@ def test_get_customer_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = customers.Customer() + return_value = customer_workloads.Workload() # get arguments that satisfy an http rule for this method sample_request = { - "name": "organizations/sample1/locations/sample2/customers/sample3" + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" } # get truthy value for each flattened field @@ -5054,26 +5669,26 @@ def test_get_customer_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = customers.Customer.pb(return_value) + return_value = customer_workloads.Workload.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_customer(**mock_args) + client.get_workload(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=organizations/*/locations/*/customers/*}" + "%s/v1beta/{name=organizations/*/locations/*/customers/*/workloads/*}" % client.transport._host, args[1], ) -def test_get_customer_rest_flattened_error(transport: str = "rest"): +def test_get_workload_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5082,13 +5697,13 @@ def test_get_customer_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_customer( - customers.GetCustomerRequest(), + client.get_workload( + customer_workloads.GetWorkloadRequest(), name="name_value", ) -def test_list_customers_rest_use_cached_wrapped_rpc(): +def test_list_workloads_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5102,30 +5717,30 @@ def test_list_customers_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_customers in client._transport._wrapped_methods + assert client._transport.list_workloads in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_customers] = mock_rpc + client._transport._wrapped_methods[client._transport.list_workloads] = mock_rpc request = {} - client.list_customers(request) + client.list_workloads(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_customers(request) + client.list_workloads(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_customers_rest_required_fields( - request_type=customers.ListCustomersRequest, +def test_list_workloads_rest_required_fields( + request_type=customer_workloads.ListWorkloadsRequest, ): transport_class = transports.CloudControlsPartnerCoreRestTransport @@ -5141,7 +5756,7 @@ def test_list_customers_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_customers._get_unset_required_fields(jsonified_request) + ).list_workloads._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5150,7 +5765,7 @@ def test_list_customers_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_customers._get_unset_required_fields(jsonified_request) + ).list_workloads._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -5173,7 +5788,7 @@ def test_list_customers_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = customers.ListCustomersResponse() + return_value = customer_workloads.ListWorkloadsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5194,26 +5809,26 @@ def test_list_customers_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = customers.ListCustomersResponse.pb(return_value) + return_value = customer_workloads.ListWorkloadsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_customers(request) + response = client.list_workloads(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_customers_rest_unset_required_fields(): +def test_list_workloads_rest_unset_required_fields(): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_customers._get_unset_required_fields({}) + unset_fields = transport.list_workloads._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -5227,7 +5842,7 @@ def test_list_customers_rest_unset_required_fields(): ) -def test_list_customers_rest_flattened(): +def test_list_workloads_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5236,10 +5851,12 @@ def test_list_customers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = customers.ListCustomersResponse() + return_value = customer_workloads.ListWorkloadsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "organizations/sample1/locations/sample2"} + sample_request = { + "parent": "organizations/sample1/locations/sample2/customers/sample3" + } # get truthy value for each flattened field mock_args = dict( @@ -5251,26 +5868,26 @@ def test_list_customers_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = customers.ListCustomersResponse.pb(return_value) + return_value = customer_workloads.ListWorkloadsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_customers(**mock_args) + client.list_workloads(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{parent=organizations/*/locations/*}/customers" + "%s/v1beta/{parent=organizations/*/locations/*/customers/*}/workloads" % client.transport._host, args[1], ) -def test_list_customers_rest_flattened_error(transport: str = "rest"): +def test_list_workloads_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5279,13 +5896,13 @@ def test_list_customers_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_customers( - customers.ListCustomersRequest(), + client.list_workloads( + customer_workloads.ListWorkloadsRequest(), parent="parent_value", ) -def test_list_customers_rest_pager(transport: str = "rest"): +def test_list_workloads_rest_pager(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5297,28 +5914,28 @@ def test_list_customers_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - customers.ListCustomersResponse( - customers=[ - customers.Customer(), - customers.Customer(), - customers.Customer(), + customer_workloads.ListWorkloadsResponse( + workloads=[ + customer_workloads.Workload(), + customer_workloads.Workload(), + customer_workloads.Workload(), ], next_page_token="abc", ), - customers.ListCustomersResponse( - customers=[], + customer_workloads.ListWorkloadsResponse( + workloads=[], next_page_token="def", ), - customers.ListCustomersResponse( - customers=[ - customers.Customer(), + customer_workloads.ListWorkloadsResponse( + workloads=[ + customer_workloads.Workload(), ], next_page_token="ghi", ), - customers.ListCustomersResponse( - customers=[ - customers.Customer(), - customers.Customer(), + customer_workloads.ListWorkloadsResponse( + workloads=[ + customer_workloads.Workload(), + customer_workloads.Workload(), ], ), ) @@ -5326,27 +5943,31 @@ def test_list_customers_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(customers.ListCustomersResponse.to_json(x) for x in response) + response = tuple( + customer_workloads.ListWorkloadsResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "organizations/sample1/locations/sample2"} + sample_request = { + "parent": "organizations/sample1/locations/sample2/customers/sample3" + } - pager = client.list_customers(request=sample_request) + pager = client.list_workloads(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, customers.Customer) for i in results) + assert all(isinstance(i, customer_workloads.Workload) for i in results) - pages = list(client.list_customers(request=sample_request).pages) + pages = list(client.list_workloads(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_ekm_connections_rest_use_cached_wrapped_rpc(): +def test_get_customer_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5360,35 +5981,29 @@ def test_get_ekm_connections_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_ekm_connections in client._transport._wrapped_methods - ) + assert client._transport.get_customer in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_ekm_connections - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_customer] = mock_rpc request = {} - client.get_ekm_connections(request) + client.get_customer(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_ekm_connections(request) + client.get_customer(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_ekm_connections_rest_required_fields( - request_type=ekm_connections.GetEkmConnectionsRequest, -): +def test_get_customer_rest_required_fields(request_type=customers.GetCustomerRequest): transport_class = transports.CloudControlsPartnerCoreRestTransport request_init = {} @@ -5403,7 +6018,7 @@ def test_get_ekm_connections_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ekm_connections._get_unset_required_fields(jsonified_request) + ).get_customer._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5412,7 +6027,7 @@ def test_get_ekm_connections_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_ekm_connections._get_unset_required_fields(jsonified_request) + ).get_customer._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5426,7 +6041,7 @@ def test_get_ekm_connections_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = ekm_connections.EkmConnections() + return_value = customers.Customer() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5447,30 +6062,30 @@ def test_get_ekm_connections_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ekm_connections.EkmConnections.pb(return_value) + return_value = customers.Customer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_ekm_connections(request) + response = client.get_customer(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_ekm_connections_rest_unset_required_fields(): +def test_get_customer_rest_unset_required_fields(): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_ekm_connections._get_unset_required_fields({}) + unset_fields = transport.get_customer._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_ekm_connections_rest_flattened(): +def test_get_customer_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5479,11 +6094,11 @@ def test_get_ekm_connections_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ekm_connections.EkmConnections() + return_value = customers.Customer() # get arguments that satisfy an http rule for this method sample_request = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/ekmConnections" + "name": "organizations/sample1/locations/sample2/customers/sample3" } # get truthy value for each flattened field @@ -5496,26 +6111,26 @@ def test_get_ekm_connections_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = ekm_connections.EkmConnections.pb(return_value) + return_value = customers.Customer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_ekm_connections(**mock_args) + client.get_customer(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=organizations/*/locations/*/customers/*/workloads/*/ekmConnections}" + "%s/v1beta/{name=organizations/*/locations/*/customers/*}" % client.transport._host, args[1], ) -def test_get_ekm_connections_rest_flattened_error(transport: str = "rest"): +def test_get_customer_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5524,13 +6139,13 @@ def test_get_ekm_connections_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_ekm_connections( - ekm_connections.GetEkmConnectionsRequest(), + client.get_customer( + customers.GetCustomerRequest(), name="name_value", ) -def test_get_partner_permissions_rest_use_cached_wrapped_rpc(): +def test_list_customers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5544,40 +6159,35 @@ def test_get_partner_permissions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_partner_permissions - in client._transport._wrapped_methods - ) + assert client._transport.list_customers in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_partner_permissions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_customers] = mock_rpc request = {} - client.get_partner_permissions(request) + client.list_customers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_partner_permissions(request) + client.list_customers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_partner_permissions_rest_required_fields( - request_type=partner_permissions.GetPartnerPermissionsRequest, +def test_list_customers_rest_required_fields( + request_type=customers.ListCustomersRequest, ): transport_class = transports.CloudControlsPartnerCoreRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5588,21 +6198,30 @@ def test_get_partner_permissions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_partner_permissions._get_unset_required_fields(jsonified_request) + ).list_customers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_partner_permissions._get_unset_required_fields(jsonified_request) + ).list_customers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5611,7 +6230,7 @@ def test_get_partner_permissions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = partner_permissions.PartnerPermissions() + return_value = customers.ListCustomersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5632,30 +6251,40 @@ def test_get_partner_permissions_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = partner_permissions.PartnerPermissions.pb(return_value) + return_value = customers.ListCustomersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_partner_permissions(request) + response = client.list_customers(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_partner_permissions_rest_unset_required_fields(): +def test_list_customers_rest_unset_required_fields(): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_partner_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_customers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_get_partner_permissions_rest_flattened(): +def test_list_customers_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5664,16 +6293,14 @@ def test_get_partner_permissions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = partner_permissions.PartnerPermissions() + return_value = customers.ListCustomersResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/partnerPermissions" - } + sample_request = {"parent": "organizations/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -5681,26 +6308,26 @@ def test_get_partner_permissions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = partner_permissions.PartnerPermissions.pb(return_value) + return_value = customers.ListCustomersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_partner_permissions(**mock_args) + client.list_customers(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=organizations/*/locations/*/customers/*/workloads/*/partnerPermissions}" + "%s/v1beta/{parent=organizations/*/locations/*}/customers" % client.transport._host, args[1], ) -def test_get_partner_permissions_rest_flattened_error(transport: str = "rest"): +def test_list_customers_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5709,19 +6336,80 @@ def test_get_partner_permissions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_partner_permissions( - partner_permissions.GetPartnerPermissionsRequest(), - name="name_value", + client.list_customers( + customers.ListCustomersRequest(), + parent="parent_value", ) -def test_list_access_approval_requests_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +def test_list_customers_rest_pager(transport: str = "rest"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + customers.ListCustomersResponse( + customers=[ + customers.Customer(), + customers.Customer(), + customers.Customer(), + ], + next_page_token="abc", + ), + customers.ListCustomersResponse( + customers=[], + next_page_token="def", + ), + customers.ListCustomersResponse( + customers=[ + customers.Customer(), + ], + next_page_token="ghi", + ), + customers.ListCustomersResponse( + customers=[ + customers.Customer(), + customers.Customer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(customers.ListCustomersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_customers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, customers.Customer) for i in results) + + pages = list(client.list_customers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_ekm_connections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) # Should wrap all calls on client creation @@ -5730,8 +6418,7 @@ def test_list_access_approval_requests_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_access_approval_requests - in client._transport._wrapped_methods + client._transport.get_ekm_connections in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -5740,29 +6427,29 @@ def test_list_access_approval_requests_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_access_approval_requests + client._transport.get_ekm_connections ] = mock_rpc request = {} - client.list_access_approval_requests(request) + client.get_ekm_connections(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_access_approval_requests(request) + client.get_ekm_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_access_approval_requests_rest_required_fields( - request_type=access_approval_requests.ListAccessApprovalRequestsRequest, +def test_get_ekm_connections_rest_required_fields( + request_type=ekm_connections.GetEkmConnectionsRequest, ): transport_class = transports.CloudControlsPartnerCoreRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -5773,30 +6460,21 @@ def test_list_access_approval_requests_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_access_approval_requests._get_unset_required_fields(jsonified_request) + ).get_ekm_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_access_approval_requests._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_ekm_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5805,7 +6483,7 @@ def test_list_access_approval_requests_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = access_approval_requests.ListAccessApprovalRequestsResponse() + return_value = ekm_connections.EkmConnections() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -5826,46 +6504,30 @@ def test_list_access_approval_requests_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = ( - access_approval_requests.ListAccessApprovalRequestsResponse.pb( - return_value - ) - ) + return_value = ekm_connections.EkmConnections.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_access_approval_requests(request) + response = client.get_ekm_connections(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_access_approval_requests_rest_unset_required_fields(): +def test_get_ekm_connections_rest_unset_required_fields(): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_access_approval_requests._get_unset_required_fields( - {} - ) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_ekm_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_access_approval_requests_rest_flattened(): +def test_get_ekm_connections_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5874,16 +6536,16 @@ def test_list_access_approval_requests_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = access_approval_requests.ListAccessApprovalRequestsResponse() + return_value = ekm_connections.EkmConnections() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/ekmConnections" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -5891,28 +6553,26 @@ def test_list_access_approval_requests_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = access_approval_requests.ListAccessApprovalRequestsResponse.pb( - return_value - ) + return_value = ekm_connections.EkmConnections.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_access_approval_requests(**mock_args) + client.get_ekm_connections(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{parent=organizations/*/locations/*/customers/*/workloads/*}/accessApprovalRequests" + "%s/v1beta/{name=organizations/*/locations/*/customers/*/workloads/*/ekmConnections}" % client.transport._host, args[1], ) -def test_list_access_approval_requests_rest_flattened_error(transport: str = "rest"): +def test_get_ekm_connections_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5921,82 +6581,13 @@ def test_list_access_approval_requests_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_access_approval_requests( - access_approval_requests.ListAccessApprovalRequestsRequest(), - parent="parent_value", - ) - - -def test_list_access_approval_requests_rest_pager(transport: str = "rest"): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - access_approval_requests.ListAccessApprovalRequestsResponse( - access_approval_requests=[ - access_approval_requests.AccessApprovalRequest(), - access_approval_requests.AccessApprovalRequest(), - access_approval_requests.AccessApprovalRequest(), - ], - next_page_token="abc", - ), - access_approval_requests.ListAccessApprovalRequestsResponse( - access_approval_requests=[], - next_page_token="def", - ), - access_approval_requests.ListAccessApprovalRequestsResponse( - access_approval_requests=[ - access_approval_requests.AccessApprovalRequest(), - ], - next_page_token="ghi", - ), - access_approval_requests.ListAccessApprovalRequestsResponse( - access_approval_requests=[ - access_approval_requests.AccessApprovalRequest(), - access_approval_requests.AccessApprovalRequest(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - access_approval_requests.ListAccessApprovalRequestsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" - } - - pager = client.list_access_approval_requests(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, access_approval_requests.AccessApprovalRequest) - for i in results + client.get_ekm_connections( + ekm_connections.GetEkmConnectionsRequest(), + name="name_value", ) - pages = list(client.list_access_approval_requests(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -def test_get_partner_rest_use_cached_wrapped_rpc(): +def test_get_partner_permissions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6010,29 +6601,36 @@ def test_get_partner_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_partner in client._transport._wrapped_methods + assert ( + client._transport.get_partner_permissions + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_partner] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_partner_permissions + ] = mock_rpc request = {} - client.get_partner(request) + client.get_partner_permissions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_partner(request) + client.get_partner_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_partner_rest_required_fields(request_type=partners.GetPartnerRequest): +def test_get_partner_permissions_rest_required_fields( + request_type=partner_permissions.GetPartnerPermissionsRequest, +): transport_class = transports.CloudControlsPartnerCoreRestTransport request_init = {} @@ -6047,7 +6645,7 @@ def test_get_partner_rest_required_fields(request_type=partners.GetPartnerReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_partner._get_unset_required_fields(jsonified_request) + ).get_partner_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6056,7 +6654,7 @@ def test_get_partner_rest_required_fields(request_type=partners.GetPartnerReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_partner._get_unset_required_fields(jsonified_request) + ).get_partner_permissions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6070,7 +6668,7 @@ def test_get_partner_rest_required_fields(request_type=partners.GetPartnerReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = partners.Partner() + return_value = partner_permissions.PartnerPermissions() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -6091,30 +6689,30 @@ def test_get_partner_rest_required_fields(request_type=partners.GetPartnerReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = partners.Partner.pb(return_value) + return_value = partner_permissions.PartnerPermissions.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_partner(request) + response = client.get_partner_permissions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_partner_rest_unset_required_fields(): +def test_get_partner_permissions_rest_unset_required_fields(): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_partner._get_unset_required_fields({}) + unset_fields = transport.get_partner_permissions._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_partner_rest_flattened(): +def test_get_partner_permissions_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -6123,10 +6721,12 @@ def test_get_partner_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = partners.Partner() + return_value = partner_permissions.PartnerPermissions() # get arguments that satisfy an http rule for this method - sample_request = {"name": "organizations/sample1/locations/sample2/partner"} + sample_request = { + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/partnerPermissions" + } # get truthy value for each flattened field mock_args = dict( @@ -6138,26 +6738,26 @@ def test_get_partner_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = partners.Partner.pb(return_value) + return_value = partner_permissions.PartnerPermissions.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_partner(**mock_args) + client.get_partner_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=organizations/*/locations/*/partner}" + "%s/v1beta/{name=organizations/*/locations/*/customers/*/workloads/*/partnerPermissions}" % client.transport._host, args[1], ) -def test_get_partner_rest_flattened_error(transport: str = "rest"): +def test_get_partner_permissions_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6166,563 +6766,2145 @@ def test_get_partner_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_partner( - partners.GetPartnerRequest(), + client.get_partner_permissions( + partner_permissions.GetPartnerPermissionsRequest(), name="name_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudControlsPartnerCoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_list_access_approval_requests_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudControlsPartnerCoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudControlsPartnerCoreClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudControlsPartnerCoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudControlsPartnerCoreClient( - client_options=options, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudControlsPartnerCoreClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Ensure method has been cached + assert ( + client._transport.list_access_approval_requests + in client._transport._wrapped_methods ) - # It is an error to provide scopes and a transport instance. - transport = transports.CloudControlsPartnerCoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudControlsPartnerCoreClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.list_access_approval_requests + ] = mock_rpc + request = {} + client.list_access_approval_requests(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudControlsPartnerCoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudControlsPartnerCoreClient(transport=transport) - assert client.transport is transport + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_access_approval_requests(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudControlsPartnerCoreGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.CloudControlsPartnerCoreGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_list_access_approval_requests_rest_required_fields( + request_type=access_approval_requests.ListAccessApprovalRequestsRequest, +): + transport_class = transports.CloudControlsPartnerCoreRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudControlsPartnerCoreGrpcTransport, - transports.CloudControlsPartnerCoreGrpcAsyncIOTransport, - transports.CloudControlsPartnerCoreRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_transport_kind_grpc(): - transport = CloudControlsPartnerCoreClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).list_access_approval_requests._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -def test_initialize_client_w_grpc(): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_access_approval_requests._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) - assert client is not None + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_workload_empty_call_grpc(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_workload), "__call__") as call: - call.return_value = customer_workloads.Workload() - client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customer_workloads.GetWorkloadRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = access_approval_requests.ListAccessApprovalRequestsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_workloads_empty_call_grpc(): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = ( + access_approval_requests.ListAccessApprovalRequestsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: - call.return_value = customer_workloads.ListWorkloadsResponse() - client.list_workloads(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customer_workloads.ListWorkloadsRequest() + response = client.list_access_approval_requests(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_customer_empty_call_grpc(): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_list_access_approval_requests_rest_unset_required_fields(): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_customer), "__call__") as call: - call.return_value = customers.Customer() - client.get_customer(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customers.GetCustomerRequest() - - assert args[0] == request_msg + unset_fields = transport.list_access_approval_requests._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_customers_empty_call_grpc(): +def test_list_access_approval_requests_rest_flattened(): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_customers), "__call__") as call: - call.return_value = customers.ListCustomersResponse() - client.list_customers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customers.ListCustomersRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_approval_requests.ListAccessApprovalRequestsResponse() - assert args[0] == request_msg + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + } + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_ekm_connections_empty_call_grpc(): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_ekm_connections), "__call__" - ) as call: - call.return_value = ekm_connections.EkmConnections() - client.get_ekm_connections(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = access_approval_requests.ListAccessApprovalRequestsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ekm_connections.GetEkmConnectionsRequest() + client.list_access_approval_requests(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=organizations/*/locations/*/customers/*/workloads/*}/accessApprovalRequests" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_partner_permissions_empty_call_grpc(): +def test_list_access_approval_requests_rest_flattened_error(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partner_permissions), "__call__" - ) as call: - call.return_value = partner_permissions.PartnerPermissions() - client.get_partner_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = partner_permissions.GetPartnerPermissionsRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_access_approval_requests( + access_approval_requests.ListAccessApprovalRequestsRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_access_approval_requests_empty_call_grpc(): +def test_list_access_approval_requests_rest_pager(transport: str = "rest"): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_access_approval_requests), "__call__" - ) as call: - call.return_value = ( - access_approval_requests.ListAccessApprovalRequestsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + access_approval_requests.ListAccessApprovalRequestsResponse( + access_approval_requests=[ + access_approval_requests.AccessApprovalRequest(), + access_approval_requests.AccessApprovalRequest(), + access_approval_requests.AccessApprovalRequest(), + ], + next_page_token="abc", + ), + access_approval_requests.ListAccessApprovalRequestsResponse( + access_approval_requests=[], + next_page_token="def", + ), + access_approval_requests.ListAccessApprovalRequestsResponse( + access_approval_requests=[ + access_approval_requests.AccessApprovalRequest(), + ], + next_page_token="ghi", + ), + access_approval_requests.ListAccessApprovalRequestsResponse( + access_approval_requests=[ + access_approval_requests.AccessApprovalRequest(), + access_approval_requests.AccessApprovalRequest(), + ], + ), ) - client.list_access_approval_requests(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = access_approval_requests.ListAccessApprovalRequestsRequest() + # Two responses for two calls + response = response + response - assert args[0] == request_msg + # Wrap the values into proper Response objs + response = tuple( + access_approval_requests.ListAccessApprovalRequestsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = { + "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_partner_empty_call_grpc(): - client = CloudControlsPartnerCoreClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + pager = client.list_access_approval_requests(request=sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_partner), "__call__") as call: - call.return_value = partners.Partner() - client.get_partner(request=None) + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, access_approval_requests.AccessApprovalRequest) + for i in results + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = partners.GetPartnerRequest() + pages = list(client.list_access_approval_requests(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - assert args[0] == request_msg +def test_get_partner_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -def test_transport_kind_grpc_asyncio(): - transport = CloudControlsPartnerCoreAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + # Ensure method has been cached + assert client._transport.get_partner in client._transport._wrapped_methods -def test_initialize_client_w_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_partner] = mock_rpc + request = {} + client.get_partner(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_workload_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_workload), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - customer_workloads.Workload( - name="name_value", - folder_id=936, - folder="folder_value", - is_onboarded=True, - key_management_project_id="key_management_project_id_value", - location="location_value", - partner=customer_workloads.Workload.Partner.PARTNER_LOCAL_CONTROLS_BY_S3NS, - ) - ) - await client.get_workload(request=None) + client.get_partner(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customer_workloads.GetWorkloadRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_get_partner_rest_required_fields(request_type=partners.GetPartnerRequest): + transport_class = transports.CloudControlsPartnerCoreRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_workloads_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - customer_workloads.ListWorkloadsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_workloads(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customer_workloads.ListWorkloadsRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_partner._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_customer_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_partner._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_customer), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - customers.Customer( - name="name_value", - display_name="display_name_value", - is_onboarded=True, - ) - ) - await client.get_customer(request=None) + # Designate an appropriate value for the returned response. + return_value = partners.Partner() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customers.GetCustomerRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = partners.Partner.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_customers_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response = client.get_partner(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_customers), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - customers.ListCustomersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_customers(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = customers.ListCustomersRequest() - assert args[0] == request_msg +def test_get_partner_rest_unset_required_fields(): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.get_partner._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_ekm_connections_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_get_partner_rest_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_ekm_connections), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - ekm_connections.EkmConnections( - name="name_value", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = partners.Partner() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1/locations/sample2/partner"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.get_ekm_connections(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ekm_connections.GetEkmConnectionsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = partners.Partner.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.get_partner(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=organizations/*/locations/*/partner}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_partner_permissions_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_partner_rest_flattened_error(transport: str = "rest"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_partner_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - partner_permissions.PartnerPermissions( - name="name_value", - partner_permissions=[ - partner_permissions.PartnerPermissions.Permission.ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS - ], - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_partner( + partners.GetPartnerRequest(), + name="name_value", ) - await client.get_partner_permissions(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = partner_permissions.GetPartnerPermissionsRequest() - assert args[0] == request_msg +def test_create_customer_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_access_approval_requests_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.create_customer in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_access_approval_requests), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - access_approval_requests.ListAccessApprovalRequestsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.list_access_approval_requests(request=None) + client._transport._wrapped_methods[client._transport.create_customer] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = access_approval_requests.ListAccessApprovalRequestsRequest() + request = {} + client.create_customer(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_customer(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_partner_empty_call_grpc_asyncio(): - client = CloudControlsPartnerCoreAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_customer_rest_required_fields( + request_type=customers.CreateCustomerRequest, +): + transport_class = transports.CloudControlsPartnerCoreRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["customer_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_partner), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - partners.Partner( - name="name_value", - operated_cloud_regions=["operated_cloud_regions_value"], - partner_project_id="partner_project_id_value", + # verify fields with default values are dropped + assert "customerId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_customer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "customerId" in jsonified_request + assert jsonified_request["customerId"] == request_init["customer_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["customerId"] = "customer_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_customer._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("customer_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "customerId" in jsonified_request + assert jsonified_request["customerId"] == "customer_id_value" + + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = customers.Customer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = customers.Customer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_customer(request) + + expected_params = [ + ( + "customerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_customer_rest_unset_required_fields(): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_customer._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("customerId",)) + & set( + ( + "parent", + "customer", + "customerId", ) ) - await client.get_partner(request=None) + ) + + +def test_create_customer_rest_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = customers.Customer() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + customer=customers.Customer(name="name_value"), + customer_id="customer_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = customers.Customer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_customer(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=organizations/*/locations/*}/customers" + % client.transport._host, + args[1], + ) + + +def test_create_customer_rest_flattened_error(transport: str = "rest"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_customer( + customers.CreateCustomerRequest(), + parent="parent_value", + customer=customers.Customer(name="name_value"), + customer_id="customer_id_value", + ) + + +def test_update_customer_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_customer in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_customer] = mock_rpc + + request = {} + client.update_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_customer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_customer_rest_required_fields( + request_type=customers.UpdateCustomerRequest, +): + transport_class = transports.CloudControlsPartnerCoreRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_customer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_customer._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = customers.Customer() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = customers.Customer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_customer(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_customer_rest_unset_required_fields(): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_customer._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("customer",))) + + +def test_update_customer_rest_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = customers.Customer() + + # get arguments that satisfy an http rule for this method + sample_request = { + "customer": { + "name": "organizations/sample1/locations/sample2/customers/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + customer=customers.Customer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = customers.Customer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_customer(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{customer.name=organizations/*/locations/*/customers/*}" + % client.transport._host, + args[1], + ) + + +def test_update_customer_rest_flattened_error(transport: str = "rest"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_customer( + customers.UpdateCustomerRequest(), + customer=customers.Customer(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_customer_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_customer in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_customer] = mock_rpc + + request = {} + client.delete_customer(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_customer(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_customer_rest_required_fields( + request_type=customers.DeleteCustomerRequest, +): + transport_class = transports.CloudControlsPartnerCoreRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_customer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_customer._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_customer(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_customer_rest_unset_required_fields(): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_customer._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_customer_rest_flattened(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/customers/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_customer(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=organizations/*/locations/*/customers/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_customer_rest_flattened_error(transport: str = "rest"): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_customer( + customers.DeleteCustomerRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudControlsPartnerCoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudControlsPartnerCoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudControlsPartnerCoreClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudControlsPartnerCoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudControlsPartnerCoreClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudControlsPartnerCoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudControlsPartnerCoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudControlsPartnerCoreClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudControlsPartnerCoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudControlsPartnerCoreClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudControlsPartnerCoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudControlsPartnerCoreGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudControlsPartnerCoreGrpcTransport, + transports.CloudControlsPartnerCoreGrpcAsyncIOTransport, + transports.CloudControlsPartnerCoreRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = CloudControlsPartnerCoreClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_workload_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + call.return_value = customer_workloads.Workload() + client.get_workload(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customer_workloads.GetWorkloadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_workloads_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + call.return_value = customer_workloads.ListWorkloadsResponse() + client.list_workloads(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customer_workloads.ListWorkloadsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_customer_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_customer), "__call__") as call: + call.return_value = customers.Customer() + client.get_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.GetCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_customers_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_customers), "__call__") as call: + call.return_value = customers.ListCustomersResponse() + client.list_customers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.ListCustomersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_ekm_connections_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_ekm_connections), "__call__" + ) as call: + call.return_value = ekm_connections.EkmConnections() + client.get_ekm_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ekm_connections.GetEkmConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partner_permissions_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_permissions), "__call__" + ) as call: + call.return_value = partner_permissions.PartnerPermissions() + client.get_partner_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = partner_permissions.GetPartnerPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_access_approval_requests_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_access_approval_requests), "__call__" + ) as call: + call.return_value = ( + access_approval_requests.ListAccessApprovalRequestsResponse() + ) + client.list_access_approval_requests(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_approval_requests.ListAccessApprovalRequestsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_partner_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_partner), "__call__") as call: + call.return_value = partners.Partner() + client.get_partner(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = partners.GetPartnerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_customer_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + call.return_value = customers.Customer() + client.create_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.CreateCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_customer_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + call.return_value = customers.Customer() + client.update_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.UpdateCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_customer_empty_call_grpc(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + call.return_value = None + client.delete_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.DeleteCustomerRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CloudControlsPartnerCoreAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_workload_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_workload), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customer_workloads.Workload( + name="name_value", + folder_id=936, + folder="folder_value", + is_onboarded=True, + key_management_project_id="key_management_project_id_value", + location="location_value", + partner=customer_workloads.Workload.Partner.PARTNER_LOCAL_CONTROLS_BY_S3NS, + ) + ) + await client.get_workload(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customer_workloads.GetWorkloadRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_workloads_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_workloads), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customer_workloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_workloads(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customer_workloads.ListWorkloadsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_customer_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + ) + await client.get_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.GetCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_customers_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_customers), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customers.ListCustomersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_customers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.ListCustomersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_ekm_connections_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_ekm_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + ekm_connections.EkmConnections( + name="name_value", + ) + ) + await client.get_ekm_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ekm_connections.GetEkmConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_partner_permissions_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_partner_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + partner_permissions.PartnerPermissions( + name="name_value", + partner_permissions=[ + partner_permissions.PartnerPermissions.Permission.ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS + ], + ) + ) + await client.get_partner_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = partner_permissions.GetPartnerPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_access_approval_requests_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_access_approval_requests), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_approval_requests.ListAccessApprovalRequestsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_access_approval_requests(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_approval_requests.ListAccessApprovalRequestsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_partner_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_partner), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + partners.Partner( + name="name_value", + operated_cloud_regions=["operated_cloud_regions_value"], + partner_project_id="partner_project_id_value", + ) + ) + await client.get_partner(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = partners.GetPartnerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_customer_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + ) + await client.create_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.CreateCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_customer_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + ) + await client.update_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.UpdateCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_customer_empty_call_grpc_asyncio(): + client = CloudControlsPartnerCoreAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.DeleteCustomerRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CloudControlsPartnerCoreClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_workload_rest_bad_request( + request_type=customer_workloads.GetWorkloadRequest, +): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_workload(request) + + +@pytest.mark.parametrize( + "request_type", + [ + customer_workloads.GetWorkloadRequest, + dict, + ], +) +def test_get_workload_rest_call_success(request_type): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = customer_workloads.Workload( + name="name_value", + folder_id=936, + folder="folder_value", + is_onboarded=True, + key_management_project_id="key_management_project_id_value", + location="location_value", + partner=customer_workloads.Workload.Partner.PARTNER_LOCAL_CONTROLS_BY_S3NS, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = customer_workloads.Workload.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_workload(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, customer_workloads.Workload) + assert response.name == "name_value" + assert response.folder_id == 936 + assert response.folder == "folder_value" + assert response.is_onboarded is True + assert response.key_management_project_id == "key_management_project_id_value" + assert response.location == "location_value" + assert ( + response.partner + == customer_workloads.Workload.Partner.PARTNER_LOCAL_CONTROLS_BY_S3NS + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_workload_rest_interceptors(null_interceptor): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudControlsPartnerCoreRestInterceptor(), + ) + client = CloudControlsPartnerCoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "post_get_workload" + ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_workload_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_workload" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = customer_workloads.GetWorkloadRequest.pb( + customer_workloads.GetWorkloadRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = customer_workloads.Workload.to_json( + customer_workloads.Workload() + ) + req.return_value.content = return_value + + request = customer_workloads.GetWorkloadRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = customer_workloads.Workload() + post_with_metadata.return_value = customer_workloads.Workload(), metadata + + client.get_workload( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_workloads_rest_bad_request( + request_type=customer_workloads.ListWorkloadsRequest, +): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "organizations/sample1/locations/sample2/customers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_workloads(request) + + +@pytest.mark.parametrize( + "request_type", + [ + customer_workloads.ListWorkloadsRequest, + dict, + ], +) +def test_list_workloads_rest_call_success(request_type): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "organizations/sample1/locations/sample2/customers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = customer_workloads.ListWorkloadsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = customer_workloads.ListWorkloadsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_workloads(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkloadsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_workloads_rest_interceptors(null_interceptor): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudControlsPartnerCoreRestInterceptor(), + ) + client = CloudControlsPartnerCoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "post_list_workloads" + ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_workloads_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_workloads" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = customer_workloads.ListWorkloadsRequest.pb( + customer_workloads.ListWorkloadsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = customer_workloads.ListWorkloadsResponse.to_json( + customer_workloads.ListWorkloadsResponse() + ) + req.return_value.content = return_value + + request = customer_workloads.ListWorkloadsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = customer_workloads.ListWorkloadsResponse() + post_with_metadata.return_value = ( + customer_workloads.ListWorkloadsResponse(), + metadata, + ) + + client.list_workloads( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_customer_rest_bad_request(request_type=customers.GetCustomerRequest): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/customers/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_customer(request) + + +@pytest.mark.parametrize( + "request_type", + [ + customers.GetCustomerRequest, + dict, + ], +) +def test_get_customer_rest_call_success(request_type): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1/locations/sample2/customers/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = customers.Customer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_customer(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, customers.Customer) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_customer_rest_interceptors(null_interceptor): + transport = transports.CloudControlsPartnerCoreRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudControlsPartnerCoreRestInterceptor(), + ) + client = CloudControlsPartnerCoreClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "post_get_customer" + ) as post, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_customer_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_customer" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = customers.GetCustomerRequest.pb(customers.GetCustomerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = partners.GetPartnerRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = customers.Customer.to_json(customers.Customer()) + req.return_value.content = return_value - assert args[0] == request_msg + request = customers.GetCustomerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = customers.Customer() + post_with_metadata.return_value = customers.Customer(), metadata + client.get_customer( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = CloudControlsPartnerCoreClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_workload_rest_bad_request( - request_type=customer_workloads.GetWorkloadRequest, -): +def test_list_customers_rest_bad_request(request_type=customers.ListCustomersRequest): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" - } + request_init = {"parent": "organizations/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6737,38 +8919,31 @@ def test_get_workload_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_workload(request) + client.list_customers(request) @pytest.mark.parametrize( "request_type", [ - customer_workloads.GetWorkloadRequest, + customers.ListCustomersRequest, dict, ], ) -def test_get_workload_rest_call_success(request_type): +def test_list_customers_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" - } + request_init = {"parent": "organizations/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = customer_workloads.Workload( - name="name_value", - folder_id=936, - folder="folder_value", - is_onboarded=True, - key_management_project_id="key_management_project_id_value", - location="location_value", - partner=customer_workloads.Workload.Partner.PARTNER_LOCAL_CONTROLS_BY_S3NS, + return_value = customers.ListCustomersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -6776,29 +8951,21 @@ def test_get_workload_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = customer_workloads.Workload.pb(return_value) + return_value = customers.ListCustomersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_workload(request) + response = client.list_customers(request) # Establish that the response is the type that we expect. - assert isinstance(response, customer_workloads.Workload) - assert response.name == "name_value" - assert response.folder_id == 936 - assert response.folder == "folder_value" - assert response.is_onboarded is True - assert response.key_management_project_id == "key_management_project_id_value" - assert response.location == "location_value" - assert ( - response.partner - == customer_workloads.Workload.Partner.PARTNER_LOCAL_CONTROLS_BY_S3NS - ) + assert isinstance(response, pagers.ListCustomersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_workload_rest_interceptors(null_interceptor): +def test_list_customers_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6812,15 +8979,17 @@ def test_get_workload_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "post_get_workload" + transports.CloudControlsPartnerCoreRestInterceptor, "post_list_customers" ) as post, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_workload" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_customers_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_customers" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = customer_workloads.GetWorkloadRequest.pb( - customer_workloads.GetWorkloadRequest() - ) + post_with_metadata.assert_not_called() + pb_message = customers.ListCustomersRequest.pb(customers.ListCustomersRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6831,20 +9000,21 @@ def test_get_workload_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = customer_workloads.Workload.to_json( - customer_workloads.Workload() + return_value = customers.ListCustomersResponse.to_json( + customers.ListCustomersResponse() ) req.return_value.content = return_value - request = customer_workloads.GetWorkloadRequest() + request = customers.ListCustomersRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = customer_workloads.Workload() + post.return_value = customers.ListCustomersResponse() + post_with_metadata.return_value = customers.ListCustomersResponse(), metadata - client.get_workload( + client.list_customers( request, metadata=[ ("key", "val"), @@ -6854,17 +9024,18 @@ def test_get_workload_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_workloads_rest_bad_request( - request_type=customer_workloads.ListWorkloadsRequest, +def test_get_ekm_connections_rest_bad_request( + request_type=ekm_connections.GetEkmConnectionsRequest, ): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "organizations/sample1/locations/sample2/customers/sample3" + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/ekmConnections" } request = request_type(**request_init) @@ -6880,33 +9051,32 @@ def test_list_workloads_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_workloads(request) + client.get_ekm_connections(request) @pytest.mark.parametrize( "request_type", [ - customer_workloads.ListWorkloadsRequest, + ekm_connections.GetEkmConnectionsRequest, dict, ], ) -def test_list_workloads_rest_call_success(request_type): +def test_get_ekm_connections_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "organizations/sample1/locations/sample2/customers/sample3" + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/ekmConnections" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = customer_workloads.ListWorkloadsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = ekm_connections.EkmConnections( + name="name_value", ) # Wrap the value into a proper Response obj @@ -6914,21 +9084,20 @@ def test_list_workloads_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = customer_workloads.ListWorkloadsResponse.pb(return_value) + return_value = ekm_connections.EkmConnections.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_workloads(request) + response = client.get_ekm_connections(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkloadsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, ekm_connections.EkmConnections) + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_workloads_rest_interceptors(null_interceptor): +def test_get_ekm_connections_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -6942,14 +9111,18 @@ def test_list_workloads_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "post_list_workloads" + transports.CloudControlsPartnerCoreRestInterceptor, "post_get_ekm_connections" ) as post, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_workloads" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_ekm_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_ekm_connections" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = customer_workloads.ListWorkloadsRequest.pb( - customer_workloads.ListWorkloadsRequest() + post_with_metadata.assert_not_called() + pb_message = ekm_connections.GetEkmConnectionsRequest.pb( + ekm_connections.GetEkmConnectionsRequest() ) transcode.return_value = { "method": "post", @@ -6961,20 +9134,21 @@ def test_list_workloads_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = customer_workloads.ListWorkloadsResponse.to_json( - customer_workloads.ListWorkloadsResponse() + return_value = ekm_connections.EkmConnections.to_json( + ekm_connections.EkmConnections() ) req.return_value.content = return_value - request = customer_workloads.ListWorkloadsRequest() + request = ekm_connections.GetEkmConnectionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = customer_workloads.ListWorkloadsResponse() + post.return_value = ekm_connections.EkmConnections() + post_with_metadata.return_value = ekm_connections.EkmConnections(), metadata - client.list_workloads( + client.get_ekm_connections( request, metadata=[ ("key", "val"), @@ -6984,14 +9158,19 @@ def test_list_workloads_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_customer_rest_bad_request(request_type=customers.GetCustomerRequest): +def test_get_partner_permissions_rest_bad_request( + request_type=partner_permissions.GetPartnerPermissionsRequest, +): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "organizations/sample1/locations/sample2/customers/sample3"} + request_init = { + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/partnerPermissions" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7006,32 +9185,35 @@ def test_get_customer_rest_bad_request(request_type=customers.GetCustomerRequest response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_customer(request) + client.get_partner_permissions(request) @pytest.mark.parametrize( "request_type", [ - customers.GetCustomerRequest, + partner_permissions.GetPartnerPermissionsRequest, dict, ], ) -def test_get_customer_rest_call_success(request_type): +def test_get_partner_permissions_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "organizations/sample1/locations/sample2/customers/sample3"} + request_init = { + "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/partnerPermissions" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = customers.Customer( + return_value = partner_permissions.PartnerPermissions( name="name_value", - display_name="display_name_value", - is_onboarded=True, + partner_permissions=[ + partner_permissions.PartnerPermissions.Permission.ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS + ], ) # Wrap the value into a proper Response obj @@ -7039,22 +9221,23 @@ def test_get_customer_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = customers.Customer.pb(return_value) + return_value = partner_permissions.PartnerPermissions.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_customer(request) + response = client.get_partner_permissions(request) # Establish that the response is the type that we expect. - assert isinstance(response, customers.Customer) + assert isinstance(response, partner_permissions.PartnerPermissions) assert response.name == "name_value" - assert response.display_name == "display_name_value" - assert response.is_onboarded is True + assert response.partner_permissions == [ + partner_permissions.PartnerPermissions.Permission.ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS + ] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_customer_rest_interceptors(null_interceptor): +def test_get_partner_permissions_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7068,13 +9251,21 @@ def test_get_customer_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "post_get_customer" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_partner_permissions", ) as post, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_customer" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_partner_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "pre_get_partner_permissions", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = customers.GetCustomerRequest.pb(customers.GetCustomerRequest()) + post_with_metadata.assert_not_called() + pb_message = partner_permissions.GetPartnerPermissionsRequest.pb( + partner_permissions.GetPartnerPermissionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7085,18 +9276,24 @@ def test_get_customer_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = customers.Customer.to_json(customers.Customer()) + return_value = partner_permissions.PartnerPermissions.to_json( + partner_permissions.PartnerPermissions() + ) req.return_value.content = return_value - request = customers.GetCustomerRequest() + request = partner_permissions.GetPartnerPermissionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = customers.Customer() + post.return_value = partner_permissions.PartnerPermissions() + post_with_metadata.return_value = ( + partner_permissions.PartnerPermissions(), + metadata, + ) - client.get_customer( + client.get_partner_permissions( request, metadata=[ ("key", "val"), @@ -7106,14 +9303,19 @@ def test_get_customer_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_customers_rest_bad_request(request_type=customers.ListCustomersRequest): +def test_list_access_approval_requests_rest_bad_request( + request_type=access_approval_requests.ListAccessApprovalRequestsRequest, +): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init = { + "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7128,29 +9330,31 @@ def test_list_customers_rest_bad_request(request_type=customers.ListCustomersReq response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_customers(request) + client.list_access_approval_requests(request) @pytest.mark.parametrize( "request_type", [ - customers.ListCustomersRequest, + access_approval_requests.ListAccessApprovalRequestsRequest, dict, ], ) -def test_list_customers_rest_call_success(request_type): +def test_list_access_approval_requests_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init = { + "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = customers.ListCustomersResponse( + return_value = access_approval_requests.ListAccessApprovalRequestsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -7160,21 +9364,23 @@ def test_list_customers_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = customers.ListCustomersResponse.pb(return_value) + return_value = access_approval_requests.ListAccessApprovalRequestsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_customers(request) + response = client.list_access_approval_requests(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCustomersPager) + assert isinstance(response, pagers.ListAccessApprovalRequestsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_customers_rest_interceptors(null_interceptor): +def test_list_access_approval_requests_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7188,13 +9394,21 @@ def test_list_customers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "post_list_customers" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_access_approval_requests", ) as post, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "pre_list_customers" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_list_access_approval_requests_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, + "pre_list_access_approval_requests", ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = customers.ListCustomersRequest.pb(customers.ListCustomersRequest()) + post_with_metadata.assert_not_called() + pb_message = access_approval_requests.ListAccessApprovalRequestsRequest.pb( + access_approval_requests.ListAccessApprovalRequestsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7204,21 +9418,29 @@ def test_list_customers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = customers.ListCustomersResponse.to_json( - customers.ListCustomersResponse() + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + access_approval_requests.ListAccessApprovalRequestsResponse.to_json( + access_approval_requests.ListAccessApprovalRequestsResponse() + ) ) req.return_value.content = return_value - request = customers.ListCustomersRequest() + request = access_approval_requests.ListAccessApprovalRequestsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = customers.ListCustomersResponse() + post.return_value = ( + access_approval_requests.ListAccessApprovalRequestsResponse() + ) + post_with_metadata.return_value = ( + access_approval_requests.ListAccessApprovalRequestsResponse(), + metadata, + ) - client.list_customers( + client.list_access_approval_requests( request, metadata=[ ("key", "val"), @@ -7228,18 +9450,15 @@ def test_list_customers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_ekm_connections_rest_bad_request( - request_type=ekm_connections.GetEkmConnectionsRequest, -): +def test_get_partner_rest_bad_request(request_type=partners.GetPartnerRequest): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/ekmConnections" - } + request_init = {"name": "organizations/sample1/locations/sample2/partner"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7254,32 +9473,32 @@ def test_get_ekm_connections_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_ekm_connections(request) + client.get_partner(request) @pytest.mark.parametrize( "request_type", [ - ekm_connections.GetEkmConnectionsRequest, + partners.GetPartnerRequest, dict, ], ) -def test_get_ekm_connections_rest_call_success(request_type): +def test_get_partner_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/ekmConnections" - } + request_init = {"name": "organizations/sample1/locations/sample2/partner"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = ekm_connections.EkmConnections( + return_value = partners.Partner( name="name_value", + operated_cloud_regions=["operated_cloud_regions_value"], + partner_project_id="partner_project_id_value", ) # Wrap the value into a proper Response obj @@ -7287,20 +9506,22 @@ def test_get_ekm_connections_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = ekm_connections.EkmConnections.pb(return_value) + return_value = partners.Partner.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_ekm_connections(request) + response = client.get_partner(request) # Establish that the response is the type that we expect. - assert isinstance(response, ekm_connections.EkmConnections) + assert isinstance(response, partners.Partner) assert response.name == "name_value" + assert response.operated_cloud_regions == ["operated_cloud_regions_value"] + assert response.partner_project_id == "partner_project_id_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_ekm_connections_rest_interceptors(null_interceptor): +def test_get_partner_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7314,15 +9535,17 @@ def test_get_ekm_connections_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "post_get_ekm_connections" + transports.CloudControlsPartnerCoreRestInterceptor, "post_get_partner" ) as post, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_ekm_connections" + transports.CloudControlsPartnerCoreRestInterceptor, + "post_get_partner_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_partner" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = ekm_connections.GetEkmConnectionsRequest.pb( - ekm_connections.GetEkmConnectionsRequest() - ) + post_with_metadata.assert_not_called() + pb_message = partners.GetPartnerRequest.pb(partners.GetPartnerRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7333,20 +9556,19 @@ def test_get_ekm_connections_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = ekm_connections.EkmConnections.to_json( - ekm_connections.EkmConnections() - ) + return_value = partners.Partner.to_json(partners.Partner()) req.return_value.content = return_value - request = ekm_connections.GetEkmConnectionsRequest() + request = partners.GetPartnerRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ekm_connections.EkmConnections() + post.return_value = partners.Partner() + post_with_metadata.return_value = partners.Partner(), metadata - client.get_ekm_connections( + client.get_partner( request, metadata=[ ("key", "val"), @@ -7356,18 +9578,15 @@ def test_get_ekm_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_partner_permissions_rest_bad_request( - request_type=partner_permissions.GetPartnerPermissionsRequest, -): +def test_create_customer_rest_bad_request(request_type=customers.CreateCustomerRequest): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/partnerPermissions" - } + request_init = {"parent": "organizations/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7382,35 +9601,116 @@ def test_get_partner_permissions_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_partner_permissions(request) + client.create_customer(request) @pytest.mark.parametrize( "request_type", [ - partner_permissions.GetPartnerPermissionsRequest, + customers.CreateCustomerRequest, dict, ], ) -def test_get_partner_permissions_rest_call_success(request_type): +def test_create_customer_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4/partnerPermissions" + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["customer"] = { + "name": "name_value", + "display_name": "display_name_value", + "customer_onboarding_state": { + "onboarding_steps": [ + { + "step": 1, + "start_time": {"seconds": 751, "nanos": 543}, + "completion_time": {}, + "completion_state": 1, + } + ] + }, + "is_onboarded": True, + "organization_domain": "organization_domain_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = customers.CreateCustomerRequest.meta.fields["customer"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["customer"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["customer"][field])): + del request_init["customer"][field][i][subfield] + else: + del request_init["customer"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = partner_permissions.PartnerPermissions( + return_value = customers.Customer( name="name_value", - partner_permissions=[ - partner_permissions.PartnerPermissions.Permission.ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS - ], + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", ) # Wrap the value into a proper Response obj @@ -7418,23 +9718,23 @@ def test_get_partner_permissions_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = partner_permissions.PartnerPermissions.pb(return_value) + return_value = customers.Customer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_partner_permissions(request) + response = client.create_customer(request) # Establish that the response is the type that we expect. - assert isinstance(response, partner_permissions.PartnerPermissions) + assert isinstance(response, customers.Customer) assert response.name == "name_value" - assert response.partner_permissions == [ - partner_permissions.PartnerPermissions.Permission.ACCESS_TRANSPARENCY_AND_EMERGENCY_ACCESS_LOGS - ] + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_partner_permissions_rest_interceptors(null_interceptor): +def test_create_customer_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7448,16 +9748,18 @@ def test_get_partner_permissions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, - "post_get_partner_permissions", + transports.CloudControlsPartnerCoreRestInterceptor, "post_create_customer" ) as post, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, - "pre_get_partner_permissions", + "post_create_customer_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_create_customer" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = partner_permissions.GetPartnerPermissionsRequest.pb( - partner_permissions.GetPartnerPermissionsRequest() + post_with_metadata.assert_not_called() + pb_message = customers.CreateCustomerRequest.pb( + customers.CreateCustomerRequest() ) transcode.return_value = { "method": "post", @@ -7469,20 +9771,19 @@ def test_get_partner_permissions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = partner_permissions.PartnerPermissions.to_json( - partner_permissions.PartnerPermissions() - ) + return_value = customers.Customer.to_json(customers.Customer()) req.return_value.content = return_value - request = partner_permissions.GetPartnerPermissionsRequest() + request = customers.CreateCustomerRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = partner_permissions.PartnerPermissions() + post.return_value = customers.Customer() + post_with_metadata.return_value = customers.Customer(), metadata - client.get_partner_permissions( + client.create_customer( request, metadata=[ ("key", "val"), @@ -7492,17 +9793,18 @@ def test_get_partner_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_access_approval_requests_rest_bad_request( - request_type=access_approval_requests.ListAccessApprovalRequestsRequest, -): +def test_update_customer_rest_bad_request(request_type=customers.UpdateCustomerRequest): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + "customer": { + "name": "organizations/sample1/locations/sample2/customers/sample3" + } } request = request_type(**request_init) @@ -7518,33 +9820,120 @@ def test_list_access_approval_requests_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_access_approval_requests(request) + client.update_customer(request) @pytest.mark.parametrize( "request_type", [ - access_approval_requests.ListAccessApprovalRequestsRequest, + customers.UpdateCustomerRequest, dict, ], ) -def test_list_access_approval_requests_rest_call_success(request_type): +def test_update_customer_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "parent": "organizations/sample1/locations/sample2/customers/sample3/workloads/sample4" + "customer": { + "name": "organizations/sample1/locations/sample2/customers/sample3" + } } + request_init["customer"] = { + "name": "organizations/sample1/locations/sample2/customers/sample3", + "display_name": "display_name_value", + "customer_onboarding_state": { + "onboarding_steps": [ + { + "step": 1, + "start_time": {"seconds": 751, "nanos": 543}, + "completion_time": {}, + "completion_state": 1, + } + ] + }, + "is_onboarded": True, + "organization_domain": "organization_domain_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = customers.UpdateCustomerRequest.meta.fields["customer"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["customer"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["customer"][field])): + del request_init["customer"][field][i][subfield] + else: + del request_init["customer"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = access_approval_requests.ListAccessApprovalRequestsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = customers.Customer( + name="name_value", + display_name="display_name_value", + is_onboarded=True, + organization_domain="organization_domain_value", ) # Wrap the value into a proper Response obj @@ -7552,23 +9941,23 @@ def test_list_access_approval_requests_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = access_approval_requests.ListAccessApprovalRequestsResponse.pb( - return_value - ) + return_value = customers.Customer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_access_approval_requests(request) + response = client.update_customer(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAccessApprovalRequestsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, customers.Customer) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.is_onboarded is True + assert response.organization_domain == "organization_domain_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_access_approval_requests_rest_interceptors(null_interceptor): +def test_update_customer_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7582,16 +9971,18 @@ def test_list_access_approval_requests_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, - "post_list_access_approval_requests", + transports.CloudControlsPartnerCoreRestInterceptor, "post_update_customer" ) as post, mock.patch.object( transports.CloudControlsPartnerCoreRestInterceptor, - "pre_list_access_approval_requests", + "post_update_customer_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudControlsPartnerCoreRestInterceptor, "pre_update_customer" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = access_approval_requests.ListAccessApprovalRequestsRequest.pb( - access_approval_requests.ListAccessApprovalRequestsRequest() + post_with_metadata.assert_not_called() + pb_message = customers.UpdateCustomerRequest.pb( + customers.UpdateCustomerRequest() ) transcode.return_value = { "method": "post", @@ -7603,24 +9994,19 @@ def test_list_access_approval_requests_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = ( - access_approval_requests.ListAccessApprovalRequestsResponse.to_json( - access_approval_requests.ListAccessApprovalRequestsResponse() - ) - ) + return_value = customers.Customer.to_json(customers.Customer()) req.return_value.content = return_value - request = access_approval_requests.ListAccessApprovalRequestsRequest() + request = customers.UpdateCustomerRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = ( - access_approval_requests.ListAccessApprovalRequestsResponse() - ) + post.return_value = customers.Customer() + post_with_metadata.return_value = customers.Customer(), metadata - client.list_access_approval_requests( + client.update_customer( request, metadata=[ ("key", "val"), @@ -7630,14 +10016,15 @@ def test_list_access_approval_requests_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_partner_rest_bad_request(request_type=partners.GetPartnerRequest): +def test_delete_customer_rest_bad_request(request_type=customers.DeleteCustomerRequest): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "organizations/sample1/locations/sample2/partner"} + request_init = {"name": "organizations/sample1/locations/sample2/customers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7652,55 +10039,45 @@ def test_get_partner_rest_bad_request(request_type=partners.GetPartnerRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_partner(request) + client.delete_customer(request) @pytest.mark.parametrize( "request_type", [ - partners.GetPartnerRequest, + customers.DeleteCustomerRequest, dict, ], ) -def test_get_partner_rest_call_success(request_type): +def test_delete_customer_rest_call_success(request_type): client = CloudControlsPartnerCoreClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "organizations/sample1/locations/sample2/partner"} + request_init = {"name": "organizations/sample1/locations/sample2/customers/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = partners.Partner( - name="name_value", - operated_cloud_regions=["operated_cloud_regions_value"], - partner_project_id="partner_project_id_value", - ) + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = partners.Partner.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_partner(request) + response = client.delete_customer(request) # Establish that the response is the type that we expect. - assert isinstance(response, partners.Partner) - assert response.name == "name_value" - assert response.operated_cloud_regions == ["operated_cloud_regions_value"] - assert response.partner_project_id == "partner_project_id_value" + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_partner_rest_interceptors(null_interceptor): +def test_delete_customer_rest_interceptors(null_interceptor): transport = transports.CloudControlsPartnerCoreRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -7714,13 +10091,12 @@ def test_get_partner_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "post_get_partner" - ) as post, mock.patch.object( - transports.CloudControlsPartnerCoreRestInterceptor, "pre_get_partner" + transports.CloudControlsPartnerCoreRestInterceptor, "pre_delete_customer" ) as pre: pre.assert_not_called() - post.assert_not_called() - pb_message = partners.GetPartnerRequest.pb(partners.GetPartnerRequest()) + pb_message = customers.DeleteCustomerRequest.pb( + customers.DeleteCustomerRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -7731,18 +10107,15 @@ def test_get_partner_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = partners.Partner.to_json(partners.Partner()) - req.return_value.content = return_value - request = partners.GetPartnerRequest() + request = customers.DeleteCustomerRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = partners.Partner() - client.get_partner( + client.delete_customer( request, metadata=[ ("key", "val"), @@ -7751,7 +10124,6 @@ def test_get_partner_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() def test_initialize_client_w_rest(): @@ -7927,6 +10299,66 @@ def test_get_partner_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_customer_empty_call_rest(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_customer), "__call__") as call: + client.create_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.CreateCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_customer_empty_call_rest(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_customer), "__call__") as call: + client.update_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.UpdateCustomerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_customer_empty_call_rest(): + client = CloudControlsPartnerCoreClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_customer), "__call__") as call: + client.delete_customer(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = customers.DeleteCustomerRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudControlsPartnerCoreClient( @@ -7968,6 +10400,9 @@ def test_cloud_controls_partner_core_base_transport(): "get_partner_permissions", "list_access_approval_requests", "get_partner", + "create_customer", + "update_customer", + "delete_customer", ) for method in methods: with pytest.raises(NotImplementedError): @@ -8254,6 +10689,15 @@ def test_cloud_controls_partner_core_client_transport_session_collision(transpor session1 = client1.transport.get_partner._session session2 = client2.transport.get_partner._session assert session1 != session2 + session1 = client1.transport.create_customer._session + session2 = client2.transport.create_customer._session + assert session1 != session2 + session1 = client1.transport.update_customer._session + session2 = client2.transport.update_customer._session + assert session1 != session2 + session1 = client1.transport.delete_customer._session + session2 = client2.transport.delete_customer._session + assert session1 != session2 def test_cloud_controls_partner_core_grpc_transport_channel(): diff --git a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py index 8f9319b093ae..622ddcdb9bad 100644 --- a/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py +++ b/packages/google-cloud-cloudcontrolspartner/tests/unit/gapic/cloudcontrolspartner_v1beta/test_cloud_controls_partner_monitoring.py @@ -62,6 +62,13 @@ ) from google.cloud.cloudcontrolspartner_v1beta.types import violations +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudControlsPartnerMonitoringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudControlsPartnerMonitoringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2782,10 +2832,14 @@ def test_list_violations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "post_list_violations" ) as post, mock.patch.object( + transports.CloudControlsPartnerMonitoringRestInterceptor, + "post_list_violations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "pre_list_violations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = violations.ListViolationsRequest.pb( violations.ListViolationsRequest() ) @@ -2811,6 +2865,7 @@ def test_list_violations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = violations.ListViolationsResponse() + post_with_metadata.return_value = violations.ListViolationsResponse(), metadata client.list_violations( request, @@ -2822,6 +2877,7 @@ def test_list_violations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_violation_rest_bad_request(request_type=violations.GetViolationRequest): @@ -2918,10 +2974,14 @@ def test_get_violation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "post_get_violation" ) as post, mock.patch.object( + transports.CloudControlsPartnerMonitoringRestInterceptor, + "post_get_violation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudControlsPartnerMonitoringRestInterceptor, "pre_get_violation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = violations.GetViolationRequest.pb(violations.GetViolationRequest()) transcode.return_value = { "method": "post", @@ -2943,6 +3003,7 @@ def test_get_violation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = violations.Violation() + post_with_metadata.return_value = violations.Violation(), metadata client.get_violation( request, @@ -2954,6 +3015,7 @@ def test_get_violation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md index 152535f3a26d..d48165a6f1a6 100644 --- a/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md +++ b/packages/google-cloud-commerce-consumer-procurement/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.11...google-cloud-commerce-consumer-procurement-v0.1.12) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [0.1.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-commerce-consumer-procurement-v0.1.10...google-cloud-commerce-consumer-procurement-v0.1.11) (2024-12-12) diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py index 4b834789ba9e..17bbab4c1877 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py index 4b834789ba9e..17bbab4c1877 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py index 5ade0f639a10..2866eb28b824 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -520,6 +522,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1372,16 +1401,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py index f826ffcf49c1..429c901481d9 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/consumer_procurement_service/transports/rest.py @@ -137,12 +137,35 @@ def post_cancel_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for cancel_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_cancel_order` interceptor runs + before the `post_cancel_order_with_metadata` interceptor. """ return response + def post_cancel_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_cancel_order_with_metadata` + interceptor in new development instead of the `post_cancel_order` interceptor. + When both interceptors are used, this `post_cancel_order_with_metadata` interceptor runs after the + `post_cancel_order` interceptor. The (possibly modified) response returned by + `post_cancel_order` will be passed to + `post_cancel_order_with_metadata`. + """ + return response, metadata + def pre_get_order( self, request: procurement_service.GetOrderRequest, @@ -160,12 +183,33 @@ def pre_get_order( def post_get_order(self, response: order.Order) -> order.Order: """Post-rpc interceptor for get_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_get_order` interceptor runs + before the `post_get_order_with_metadata` interceptor. """ return response + def post_get_order_with_metadata( + self, response: order.Order, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[order.Order, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_get_order_with_metadata` + interceptor in new development instead of the `post_get_order` interceptor. + When both interceptors are used, this `post_get_order_with_metadata` interceptor runs after the + `post_get_order` interceptor. The (possibly modified) response returned by + `post_get_order` will be passed to + `post_get_order_with_metadata`. + """ + return response, metadata + def pre_list_orders( self, request: procurement_service.ListOrdersRequest, @@ -185,12 +229,37 @@ def post_list_orders( ) -> procurement_service.ListOrdersResponse: """Post-rpc interceptor for list_orders - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_orders_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_list_orders` interceptor runs + before the `post_list_orders_with_metadata` interceptor. """ return response + def post_list_orders_with_metadata( + self, + response: procurement_service.ListOrdersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + procurement_service.ListOrdersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_orders + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_list_orders_with_metadata` + interceptor in new development instead of the `post_list_orders` interceptor. + When both interceptors are used, this `post_list_orders_with_metadata` interceptor runs after the + `post_list_orders` interceptor. The (possibly modified) response returned by + `post_list_orders` will be passed to + `post_list_orders_with_metadata`. + """ + return response, metadata + def pre_modify_order( self, request: procurement_service.ModifyOrderRequest, @@ -210,12 +279,35 @@ def post_modify_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for modify_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_modify_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_modify_order` interceptor runs + before the `post_modify_order_with_metadata` interceptor. """ return response + def post_modify_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for modify_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_modify_order_with_metadata` + interceptor in new development instead of the `post_modify_order` interceptor. + When both interceptors are used, this `post_modify_order_with_metadata` interceptor runs after the + `post_modify_order` interceptor. The (possibly modified) response returned by + `post_modify_order` will be passed to + `post_modify_order_with_metadata`. + """ + return response, metadata + def pre_place_order( self, request: procurement_service.PlaceOrderRequest, @@ -235,12 +327,35 @@ def post_place_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for place_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_place_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_place_order` interceptor runs + before the `post_place_order_with_metadata` interceptor. """ return response + def post_place_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for place_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_place_order_with_metadata` + interceptor in new development instead of the `post_place_order` interceptor. + When both interceptors are used, this `post_place_order_with_metadata` interceptor runs after the + `post_place_order` interceptor. The (possibly modified) response returned by + `post_place_order` will be passed to + `post_place_order_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -525,6 +640,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -685,6 +804,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -831,6 +954,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_orders(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_orders_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -984,6 +1111,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_modify_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_modify_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1135,6 +1266,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_place_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_place_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py index d8d488348037..2bcad5dac59a 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1330,16 +1359,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py index 28c9b57e26ce..e48c416ba3bb 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1/services/license_management_service/transports/rest.py @@ -137,12 +137,38 @@ def post_assign( ) -> license_management_service.AssignResponse: """Post-rpc interceptor for assign - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_assign_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseManagementService server but before - it is returned to user code. + it is returned to user code. This `post_assign` interceptor runs + before the `post_assign_with_metadata` interceptor. """ return response + def post_assign_with_metadata( + self, + response: license_management_service.AssignResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + license_management_service.AssignResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for assign + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseManagementService server but before it is returned to user code. + + We recommend only using this `post_assign_with_metadata` + interceptor in new development instead of the `post_assign` interceptor. + When both interceptors are used, this `post_assign_with_metadata` interceptor runs after the + `post_assign` interceptor. The (possibly modified) response returned by + `post_assign` will be passed to + `post_assign_with_metadata`. + """ + return response, metadata + def pre_enumerate_licensed_users( self, request: license_management_service.EnumerateLicensedUsersRequest, @@ -163,12 +189,38 @@ def post_enumerate_licensed_users( ) -> license_management_service.EnumerateLicensedUsersResponse: """Post-rpc interceptor for enumerate_licensed_users - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enumerate_licensed_users_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseManagementService server but before - it is returned to user code. + it is returned to user code. This `post_enumerate_licensed_users` interceptor runs + before the `post_enumerate_licensed_users_with_metadata` interceptor. """ return response + def post_enumerate_licensed_users_with_metadata( + self, + response: license_management_service.EnumerateLicensedUsersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + license_management_service.EnumerateLicensedUsersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for enumerate_licensed_users + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseManagementService server but before it is returned to user code. + + We recommend only using this `post_enumerate_licensed_users_with_metadata` + interceptor in new development instead of the `post_enumerate_licensed_users` interceptor. + When both interceptors are used, this `post_enumerate_licensed_users_with_metadata` interceptor runs after the + `post_enumerate_licensed_users` interceptor. The (possibly modified) response returned by + `post_enumerate_licensed_users` will be passed to + `post_enumerate_licensed_users_with_metadata`. + """ + return response, metadata + def pre_get_license_pool( self, request: license_management_service.GetLicensePoolRequest, @@ -189,12 +241,37 @@ def post_get_license_pool( ) -> license_management_service.LicensePool: """Post-rpc interceptor for get_license_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_license_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseManagementService server but before - it is returned to user code. + it is returned to user code. This `post_get_license_pool` interceptor runs + before the `post_get_license_pool_with_metadata` interceptor. """ return response + def post_get_license_pool_with_metadata( + self, + response: license_management_service.LicensePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + license_management_service.LicensePool, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_license_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseManagementService server but before it is returned to user code. + + We recommend only using this `post_get_license_pool_with_metadata` + interceptor in new development instead of the `post_get_license_pool` interceptor. + When both interceptors are used, this `post_get_license_pool_with_metadata` interceptor runs after the + `post_get_license_pool` interceptor. The (possibly modified) response returned by + `post_get_license_pool` will be passed to + `post_get_license_pool_with_metadata`. + """ + return response, metadata + def pre_unassign( self, request: license_management_service.UnassignRequest, @@ -215,12 +292,38 @@ def post_unassign( ) -> license_management_service.UnassignResponse: """Post-rpc interceptor for unassign - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_unassign_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseManagementService server but before - it is returned to user code. + it is returned to user code. This `post_unassign` interceptor runs + before the `post_unassign_with_metadata` interceptor. """ return response + def post_unassign_with_metadata( + self, + response: license_management_service.UnassignResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + license_management_service.UnassignResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for unassign + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseManagementService server but before it is returned to user code. + + We recommend only using this `post_unassign_with_metadata` + interceptor in new development instead of the `post_unassign` interceptor. + When both interceptors are used, this `post_unassign_with_metadata` interceptor runs after the + `post_unassign` interceptor. The (possibly modified) response returned by + `post_unassign` will be passed to + `post_unassign_with_metadata`. + """ + return response, metadata + def pre_update_license_pool( self, request: license_management_service.UpdateLicensePoolRequest, @@ -241,12 +344,37 @@ def post_update_license_pool( ) -> license_management_service.LicensePool: """Post-rpc interceptor for update_license_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_license_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseManagementService server but before - it is returned to user code. + it is returned to user code. This `post_update_license_pool` interceptor runs + before the `post_update_license_pool_with_metadata` interceptor. """ return response + def post_update_license_pool_with_metadata( + self, + response: license_management_service.LicensePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + license_management_service.LicensePool, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_license_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseManagementService server but before it is returned to user code. + + We recommend only using this `post_update_license_pool_with_metadata` + interceptor in new development instead of the `post_update_license_pool` interceptor. + When both interceptors are used, this `post_update_license_pool_with_metadata` interceptor runs after the + `post_update_license_pool` interceptor. The (possibly modified) response returned by + `post_update_license_pool` will be passed to + `post_update_license_pool_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -486,6 +614,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_assign(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_assign_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -634,6 +766,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_enumerate_licensed_users(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enumerate_licensed_users_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -784,6 +920,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_license_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_license_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -936,6 +1076,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_unassign(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_unassign_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1092,6 +1236,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_license_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_license_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py index 4b834789ba9e..17bbab4c1877 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.11" # {x-release-please-version} +__version__ = "0.1.12" # {x-release-please-version} diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py index 863d914b359d..c2707e33ddac 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -520,6 +522,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1148,16 +1177,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/transports/rest.py b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/transports/rest.py index 8ba9f7686121..804ef46f0bb1 100644 --- a/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/transports/rest.py +++ b/packages/google-cloud-commerce-consumer-procurement/google/cloud/commerce_consumer_procurement_v1alpha1/services/consumer_procurement_service/transports/rest.py @@ -119,12 +119,33 @@ def pre_get_order( def post_get_order(self, response: order.Order) -> order.Order: """Post-rpc interceptor for get_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_get_order` interceptor runs + before the `post_get_order_with_metadata` interceptor. """ return response + def post_get_order_with_metadata( + self, response: order.Order, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[order.Order, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_get_order_with_metadata` + interceptor in new development instead of the `post_get_order` interceptor. + When both interceptors are used, this `post_get_order_with_metadata` interceptor runs after the + `post_get_order` interceptor. The (possibly modified) response returned by + `post_get_order` will be passed to + `post_get_order_with_metadata`. + """ + return response, metadata + def pre_list_orders( self, request: procurement_service.ListOrdersRequest, @@ -144,12 +165,37 @@ def post_list_orders( ) -> procurement_service.ListOrdersResponse: """Post-rpc interceptor for list_orders - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_orders_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_list_orders` interceptor runs + before the `post_list_orders_with_metadata` interceptor. """ return response + def post_list_orders_with_metadata( + self, + response: procurement_service.ListOrdersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + procurement_service.ListOrdersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_orders + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_list_orders_with_metadata` + interceptor in new development instead of the `post_list_orders` interceptor. + When both interceptors are used, this `post_list_orders_with_metadata` interceptor runs after the + `post_list_orders` interceptor. The (possibly modified) response returned by + `post_list_orders` will be passed to + `post_list_orders_with_metadata`. + """ + return response, metadata + def pre_place_order( self, request: procurement_service.PlaceOrderRequest, @@ -169,12 +215,35 @@ def post_place_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for place_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_place_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConsumerProcurementService server but before - it is returned to user code. + it is returned to user code. This `post_place_order` interceptor runs + before the `post_place_order_with_metadata` interceptor. """ return response + def post_place_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for place_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConsumerProcurementService server but before it is returned to user code. + + We recommend only using this `post_place_order_with_metadata` + interceptor in new development instead of the `post_place_order` interceptor. + When both interceptors are used, this `post_place_order_with_metadata` interceptor runs after the + `post_place_order` interceptor. The (possibly modified) response returned by + `post_place_order` will be passed to + `post_place_order_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -472,6 +541,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -618,6 +691,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_orders(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_orders_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -771,6 +848,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_place_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_place_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json index 152d2dbcbb25..ff84799be5b5 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.11" + "version": "0.1.12" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json index c82e7b22d7ee..7a442a48c03a 100644 --- a/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json +++ b/packages/google-cloud-commerce-consumer-procurement/samples/generated_samples/snippet_metadata_google.cloud.commerce.consumer.procurement.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-commerce-consumer-procurement", - "version": "0.1.11" + "version": "0.1.12" }, "snippets": [ { diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py index 181fa80f615b..9b84638e05cd 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_consumer_procurement_service.py @@ -74,6 +74,13 @@ procurement_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -353,6 +360,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConsumerProcurementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConsumerProcurementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3992,10 +4042,14 @@ def test_place_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_place_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.PlaceOrderRequest.pb( procurement_service.PlaceOrderRequest() ) @@ -4019,6 +4073,7 @@ def test_place_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.place_order( request, @@ -4030,6 +4085,7 @@ def test_place_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_order_rest_bad_request(request_type=procurement_service.GetOrderRequest): @@ -4116,10 +4172,14 @@ def test_get_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_get_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.GetOrderRequest.pb( procurement_service.GetOrderRequest() ) @@ -4143,6 +4203,7 @@ def test_get_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = order.Order() + post_with_metadata.return_value = order.Order(), metadata client.get_order( request, @@ -4154,6 +4215,7 @@ def test_get_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_orders_rest_bad_request( @@ -4238,10 +4300,14 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_list_orders_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.ListOrdersRequest.pb( procurement_service.ListOrdersRequest() ) @@ -4267,6 +4333,10 @@ def test_list_orders_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = procurement_service.ListOrdersResponse() + post_with_metadata.return_value = ( + procurement_service.ListOrdersResponse(), + metadata, + ) client.list_orders( request, @@ -4278,6 +4348,7 @@ def test_list_orders_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_modify_order_rest_bad_request( @@ -4358,10 +4429,14 @@ def test_modify_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_modify_order" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_modify_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_modify_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.ModifyOrderRequest.pb( procurement_service.ModifyOrderRequest() ) @@ -4385,6 +4460,7 @@ def test_modify_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.modify_order( request, @@ -4396,6 +4472,7 @@ def test_modify_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_order_rest_bad_request( @@ -4476,10 +4553,14 @@ def test_cancel_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_cancel_order" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_cancel_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_cancel_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.CancelOrderRequest.pb( procurement_service.CancelOrderRequest() ) @@ -4503,6 +4584,7 @@ def test_cancel_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.cancel_order( request, @@ -4514,6 +4596,7 @@ def test_cancel_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py index 5bad798cbb10..8b1317603f8f 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1/test_license_management_service.py @@ -65,6 +65,13 @@ license_management_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LicenseManagementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LicenseManagementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4527,10 +4577,14 @@ def test_get_license_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "post_get_license_pool" ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_get_license_pool_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "pre_get_license_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = license_management_service.GetLicensePoolRequest.pb( license_management_service.GetLicensePoolRequest() ) @@ -4556,6 +4610,10 @@ def test_get_license_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = license_management_service.LicensePool() + post_with_metadata.return_value = ( + license_management_service.LicensePool(), + metadata, + ) client.get_license_pool( request, @@ -4567,6 +4625,7 @@ def test_get_license_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_license_pool_rest_bad_request( @@ -4743,10 +4802,14 @@ def test_update_license_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "post_update_license_pool" ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_update_license_pool_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "pre_update_license_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = license_management_service.UpdateLicensePoolRequest.pb( license_management_service.UpdateLicensePoolRequest() ) @@ -4772,6 +4835,10 @@ def test_update_license_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = license_management_service.LicensePool() + post_with_metadata.return_value = ( + license_management_service.LicensePool(), + metadata, + ) client.update_license_pool( request, @@ -4783,6 +4850,7 @@ def test_update_license_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_assign_rest_bad_request(request_type=license_management_service.AssignRequest): @@ -4862,10 +4930,13 @@ def test_assign_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "post_assign" ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, "post_assign_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "pre_assign" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = license_management_service.AssignRequest.pb( license_management_service.AssignRequest() ) @@ -4891,6 +4962,10 @@ def test_assign_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = license_management_service.AssignResponse() + post_with_metadata.return_value = ( + license_management_service.AssignResponse(), + metadata, + ) client.assign( request, @@ -4902,6 +4977,7 @@ def test_assign_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_unassign_rest_bad_request( @@ -4983,10 +5059,14 @@ def test_unassign_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "post_unassign" ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_unassign_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "pre_unassign" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = license_management_service.UnassignRequest.pb( license_management_service.UnassignRequest() ) @@ -5012,6 +5092,10 @@ def test_unassign_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = license_management_service.UnassignResponse() + post_with_metadata.return_value = ( + license_management_service.UnassignResponse(), + metadata, + ) client.unassign( request, @@ -5023,6 +5107,7 @@ def test_unassign_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enumerate_licensed_users_rest_bad_request( @@ -5110,11 +5195,15 @@ def test_enumerate_licensed_users_rest_interceptors(null_interceptor): transports.LicenseManagementServiceRestInterceptor, "post_enumerate_licensed_users", ) as post, mock.patch.object( + transports.LicenseManagementServiceRestInterceptor, + "post_enumerate_licensed_users_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LicenseManagementServiceRestInterceptor, "pre_enumerate_licensed_users", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = license_management_service.EnumerateLicensedUsersRequest.pb( license_management_service.EnumerateLicensedUsersRequest() ) @@ -5142,6 +5231,10 @@ def test_enumerate_licensed_users_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = license_management_service.EnumerateLicensedUsersResponse() + post_with_metadata.return_value = ( + license_management_service.EnumerateLicensedUsersResponse(), + metadata, + ) client.enumerate_licensed_users( request, @@ -5153,6 +5246,7 @@ def test_enumerate_licensed_users_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py index 67024ddb75bb..809182f6d058 100644 --- a/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py +++ b/packages/google-cloud-commerce-consumer-procurement/tests/unit/gapic/commerce_consumer_procurement_v1alpha1/test_consumer_procurement_service.py @@ -74,6 +74,13 @@ procurement_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -353,6 +360,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConsumerProcurementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConsumerProcurementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3162,10 +3212,14 @@ def test_place_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_place_order" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_place_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_place_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.PlaceOrderRequest.pb( procurement_service.PlaceOrderRequest() ) @@ -3189,6 +3243,7 @@ def test_place_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.place_order( request, @@ -3200,6 +3255,7 @@ def test_place_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_order_rest_bad_request(request_type=procurement_service.GetOrderRequest): @@ -3286,10 +3342,14 @@ def test_get_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_get_order" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_get_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_get_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.GetOrderRequest.pb( procurement_service.GetOrderRequest() ) @@ -3313,6 +3373,7 @@ def test_get_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = order.Order() + post_with_metadata.return_value = order.Order(), metadata client.get_order( request, @@ -3324,6 +3385,7 @@ def test_get_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_orders_rest_bad_request( @@ -3408,10 +3470,14 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "post_list_orders" ) as post, mock.patch.object( + transports.ConsumerProcurementServiceRestInterceptor, + "post_list_orders_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConsumerProcurementServiceRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = procurement_service.ListOrdersRequest.pb( procurement_service.ListOrdersRequest() ) @@ -3437,6 +3503,10 @@ def test_list_orders_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = procurement_service.ListOrdersResponse() + post_with_metadata.return_value = ( + procurement_service.ListOrdersResponse(), + metadata, + ) client.list_orders( request, @@ -3448,6 +3518,7 @@ def test_list_orders_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-common/CHANGELOG.md b/packages/google-cloud-common/CHANGELOG.md index 9ba95bc902ba..eb6e77968fba 100644 --- a/packages/google-cloud-common/CHANGELOG.md +++ b/packages/google-cloud-common/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-common-v1.4.0...google-cloud-common-v1.5.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.4.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-common-v1.3.5...google-cloud-common-v1.4.0) (2024-10-24) diff --git a/packages/google-cloud-common/google/cloud/common/gapic_version.py b/packages/google-cloud-common/google/cloud/common/gapic_version.py index 4660de83cb4b..de27578dd493 100644 --- a/packages/google-cloud-common/google/cloud/common/gapic_version.py +++ b/packages/google-cloud-common/google/cloud/common/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.0" # {x-release-please-version} +__version__ = "1.5.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/CHANGELOG.md b/packages/google-cloud-compute/CHANGELOG.md index 39d527350c89..1d7a82be9546 100644 --- a/packages/google-cloud-compute/CHANGELOG.md +++ b/packages/google-cloud-compute/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [1.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-compute-v1.24.0...google-cloud-compute-v1.25.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([6142165](https://github.com/googleapis/google-cloud-python/commit/61421657aadf877cf55c7d55fdcd10be40569296)) +* Add support for reading selective GAPIC generation methods from service YAML ([6142165](https://github.com/googleapis/google-cloud-python/commit/61421657aadf877cf55c7d55fdcd10be40569296)) + +## [1.24.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-compute-v1.23.0...google-cloud-compute-v1.24.0) (2025-01-27) + + +### Features + +* [google-cloud-compute] Update Compute Engine API to revision 20250107 ([#975](https://github.com/googleapis/google-cloud-python/issues/975)) ([#13461](https://github.com/googleapis/google-cloud-python/issues/13461)) ([4c7730f](https://github.com/googleapis/google-cloud-python/commit/4c7730f600a64bd459d5645c992798c1ddb3b408)) + ## [1.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-compute-v1.22.0...google-cloud-compute-v1.23.0) (2024-12-12) diff --git a/packages/google-cloud-compute/google/cloud/compute/__init__.py b/packages/google-cloud-compute/google/cloud/compute/__init__.py index 406a6ba53398..17634defb96d 100644 --- a/packages/google-cloud-compute/google/cloud/compute/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute/__init__.py @@ -294,6 +294,7 @@ AggregatedListNetworkAttachmentsRequest, AggregatedListNetworkEdgeSecurityServicesRequest, AggregatedListNetworkEndpointGroupsRequest, + AggregatedListNetworkFirewallPoliciesRequest, AggregatedListNodeGroupsRequest, AggregatedListNodeTemplatesRequest, AggregatedListNodeTypesRequest, @@ -560,6 +561,7 @@ FirewallList, FirewallLogConfig, FirewallPoliciesListAssociationsResponse, + FirewallPoliciesScopedList, FirewallPolicy, FirewallPolicyAssociation, FirewallPolicyList, @@ -1137,6 +1139,7 @@ NetworkEndpointGroupsListNetworkEndpoints, NetworkEndpointGroupsScopedList, NetworkEndpointWithHealthStatus, + NetworkFirewallPolicyAggregatedList, NetworkInterface, NetworkList, NetworkPeering, @@ -1919,6 +1922,7 @@ "AggregatedListNetworkAttachmentsRequest", "AggregatedListNetworkEdgeSecurityServicesRequest", "AggregatedListNetworkEndpointGroupsRequest", + "AggregatedListNetworkFirewallPoliciesRequest", "AggregatedListNodeGroupsRequest", "AggregatedListNodeTemplatesRequest", "AggregatedListNodeTypesRequest", @@ -2185,6 +2189,7 @@ "FirewallList", "FirewallLogConfig", "FirewallPoliciesListAssociationsResponse", + "FirewallPoliciesScopedList", "FirewallPolicy", "FirewallPolicyAssociation", "FirewallPolicyList", @@ -2762,6 +2767,7 @@ "NetworkEndpointGroupsListNetworkEndpoints", "NetworkEndpointGroupsScopedList", "NetworkEndpointWithHealthStatus", + "NetworkFirewallPolicyAggregatedList", "NetworkInterface", "NetworkList", "NetworkPeering", diff --git a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py index c8313abd74cb..547a38985bb7 100644 --- a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.23.0" # {x-release-please-version} +__version__ = "1.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py index de8581c432cf..cdd7b1d58cff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py @@ -176,6 +176,7 @@ AggregatedListNetworkAttachmentsRequest, AggregatedListNetworkEdgeSecurityServicesRequest, AggregatedListNetworkEndpointGroupsRequest, + AggregatedListNetworkFirewallPoliciesRequest, AggregatedListNodeGroupsRequest, AggregatedListNodeTemplatesRequest, AggregatedListNodeTypesRequest, @@ -442,6 +443,7 @@ FirewallList, FirewallLogConfig, FirewallPoliciesListAssociationsResponse, + FirewallPoliciesScopedList, FirewallPolicy, FirewallPolicyAssociation, FirewallPolicyList, @@ -1019,6 +1021,7 @@ NetworkEndpointGroupsListNetworkEndpoints, NetworkEndpointGroupsScopedList, NetworkEndpointWithHealthStatus, + NetworkFirewallPolicyAggregatedList, NetworkInterface, NetworkList, NetworkPeering, @@ -1705,6 +1708,7 @@ "AggregatedListNetworkAttachmentsRequest", "AggregatedListNetworkEdgeSecurityServicesRequest", "AggregatedListNetworkEndpointGroupsRequest", + "AggregatedListNetworkFirewallPoliciesRequest", "AggregatedListNodeGroupsRequest", "AggregatedListNodeTemplatesRequest", "AggregatedListNodeTypesRequest", @@ -1977,6 +1981,7 @@ "FirewallLogConfig", "FirewallPoliciesClient", "FirewallPoliciesListAssociationsResponse", + "FirewallPoliciesScopedList", "FirewallPolicy", "FirewallPolicyAssociation", "FirewallPolicyList", @@ -2584,6 +2589,7 @@ "NetworkEndpointGroupsScopedList", "NetworkEndpointWithHealthStatus", "NetworkFirewallPoliciesClient", + "NetworkFirewallPolicyAggregatedList", "NetworkInterface", "NetworkList", "NetworkPeering", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json index 4cc069ff2947..dc47fbee26c3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json @@ -1972,6 +1972,11 @@ "add_rule" ] }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, "CloneRules": { "methods": [ "clone_rules" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py index c8313abd74cb..547a38985bb7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.23.0" # {x-release-please-version} +__version__ = "1.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py index ac106837c853..d39ffd72fb10 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/transports/rest.py index f693a068f350..c0567852cacd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/transports/rest.py @@ -118,12 +118,37 @@ def post_aggregated_list( ) -> compute.AcceleratorTypeAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AcceleratorTypes server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.AcceleratorTypeAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.AcceleratorTypeAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AcceleratorTypes server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetAcceleratorTypeRequest, @@ -141,12 +166,35 @@ def pre_get( def post_get(self, response: compute.AcceleratorType) -> compute.AcceleratorType: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AcceleratorTypes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.AcceleratorType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.AcceleratorType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AcceleratorTypes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListAcceleratorTypesRequest, @@ -166,12 +214,35 @@ def post_list( ) -> compute.AcceleratorTypeList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AcceleratorTypes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.AcceleratorTypeList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.AcceleratorTypeList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AcceleratorTypes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AcceleratorTypesRestStub: @@ -384,6 +455,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -538,6 +613,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -682,6 +759,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py index d3d71b3a0315..e4a92f99f398 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py index 88cc27b706b7..c51883d07134 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/transports/rest.py @@ -149,12 +149,35 @@ def post_aggregated_list( ) -> compute.AddressAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.AddressAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.AddressAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteAddressRequest, @@ -170,12 +193,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetAddressRequest, @@ -191,12 +237,35 @@ def pre_get( def post_get(self, response: compute.Address) -> compute.Address: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Address, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Address, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertAddressRequest, @@ -212,12 +281,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListAddressesRequest, @@ -233,12 +325,35 @@ def pre_list( def post_list(self, response: compute.AddressList) -> compute.AddressList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.AddressList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.AddressList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_move( self, request: compute.MoveAddressRequest, @@ -254,12 +369,35 @@ def pre_move( def post_move(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for move - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_move` interceptor runs + before the `post_move_with_metadata` interceptor. """ return response + def post_move_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_move_with_metadata` + interceptor in new development instead of the `post_move` interceptor. + When both interceptors are used, this `post_move_with_metadata` interceptor runs after the + `post_move` interceptor. The (possibly modified) response returned by + `post_move` will be passed to + `post_move_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsAddressRequest, @@ -277,12 +415,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Addresses server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Addresses server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AddressesRestStub: @@ -497,6 +658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -656,6 +821,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -802,6 +971,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -967,6 +1138,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1106,6 +1281,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1268,6 +1445,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1435,6 +1614,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py index c25341b16d1c..3fc709e8bbd0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py index 932152c9df4b..1b05c7b7451e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/transports/rest.py @@ -150,12 +150,37 @@ def post_aggregated_list( ) -> compute.AutoscalerAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.AutoscalerAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.AutoscalerAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteAutoscalerRequest, @@ -173,12 +198,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetAutoscalerRequest, @@ -194,12 +242,35 @@ def pre_get( def post_get(self, response: compute.Autoscaler) -> compute.Autoscaler: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Autoscaler, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Autoscaler, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertAutoscalerRequest, @@ -217,12 +288,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListAutoscalersRequest, @@ -238,12 +332,35 @@ def pre_list( def post_list(self, response: compute.AutoscalerList) -> compute.AutoscalerList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.AutoscalerList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.AutoscalerList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchAutoscalerRequest, @@ -259,12 +376,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateAutoscalerRequest, @@ -282,12 +422,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autoscalers server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autoscalers server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AutoscalersRestStub: @@ -498,6 +661,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -659,6 +826,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -813,6 +984,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -978,6 +1151,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1122,6 +1299,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1287,6 +1466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1452,6 +1635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index 9a8da0fe724f..a7c1a4e86d66 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py index 99593985513e..3f75b5ec3c39 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py @@ -188,12 +188,35 @@ def pre_add_signed_url_key( def post_add_signed_url_key(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_signed_url_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_signed_url_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_add_signed_url_key` interceptor runs + before the `post_add_signed_url_key_with_metadata` interceptor. """ return response + def post_add_signed_url_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_signed_url_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_add_signed_url_key_with_metadata` + interceptor in new development instead of the `post_add_signed_url_key` interceptor. + When both interceptors are used, this `post_add_signed_url_key_with_metadata` interceptor runs after the + `post_add_signed_url_key` interceptor. The (possibly modified) response returned by + `post_add_signed_url_key` will be passed to + `post_add_signed_url_key_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteBackendBucketRequest, @@ -211,12 +234,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_delete_signed_url_key( self, request: compute.DeleteSignedUrlKeyBackendBucketRequest, @@ -237,12 +283,35 @@ def post_delete_signed_url_key( ) -> compute.Operation: """Post-rpc interceptor for delete_signed_url_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_signed_url_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_delete_signed_url_key` interceptor runs + before the `post_delete_signed_url_key_with_metadata` interceptor. """ return response + def post_delete_signed_url_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_signed_url_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_delete_signed_url_key_with_metadata` + interceptor in new development instead of the `post_delete_signed_url_key` interceptor. + When both interceptors are used, this `post_delete_signed_url_key_with_metadata` interceptor runs after the + `post_delete_signed_url_key` interceptor. The (possibly modified) response returned by + `post_delete_signed_url_key` will be passed to + `post_delete_signed_url_key_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetBackendBucketRequest, @@ -260,12 +329,35 @@ def pre_get( def post_get(self, response: compute.BackendBucket) -> compute.BackendBucket: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.BackendBucket, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendBucket, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyBackendBucketRequest, @@ -284,12 +376,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertBackendBucketRequest, @@ -307,12 +422,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListBackendBucketsRequest, @@ -332,12 +470,35 @@ def post_list( ) -> compute.BackendBucketList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.BackendBucketList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendBucketList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchBackendBucketRequest, @@ -355,12 +516,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_edge_security_policy( self, request: compute.SetEdgeSecurityPolicyBackendBucketRequest, @@ -381,12 +565,35 @@ def post_set_edge_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_edge_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_edge_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_set_edge_security_policy` interceptor runs + before the `post_set_edge_security_policy_with_metadata` interceptor. """ return response + def post_set_edge_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_edge_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_set_edge_security_policy_with_metadata` + interceptor in new development instead of the `post_set_edge_security_policy` interceptor. + When both interceptors are used, this `post_set_edge_security_policy_with_metadata` interceptor runs after the + `post_set_edge_security_policy` interceptor. The (possibly modified) response returned by + `post_set_edge_security_policy` will be passed to + `post_set_edge_security_policy_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyBackendBucketRequest, @@ -405,12 +612,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsBackendBucketRequest, @@ -431,12 +661,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateBackendBucketRequest, @@ -454,12 +709,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendBuckets server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class BackendBucketsRestStub: @@ -695,6 +973,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_signed_url_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_signed_url_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -856,6 +1138,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1017,6 +1303,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_signed_url_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_signed_url_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1164,6 +1454,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1329,6 +1621,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1496,6 +1792,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1642,6 +1942,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1809,6 +2111,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1977,6 +2283,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_edge_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_edge_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2148,6 +2458,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2298,6 +2612,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2465,6 +2783,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py index 893247fe95e3..181824efffae 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py index 6ea2df45b005..551770013065 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/transports/rest.py @@ -220,12 +220,35 @@ def pre_add_signed_url_key( def post_add_signed_url_key(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_signed_url_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_signed_url_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_add_signed_url_key` interceptor runs + before the `post_add_signed_url_key_with_metadata` interceptor. """ return response + def post_add_signed_url_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_signed_url_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_add_signed_url_key_with_metadata` + interceptor in new development instead of the `post_add_signed_url_key` interceptor. + When both interceptors are used, this `post_add_signed_url_key_with_metadata` interceptor runs after the + `post_add_signed_url_key` interceptor. The (possibly modified) response returned by + `post_add_signed_url_key` will be passed to + `post_add_signed_url_key_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListBackendServicesRequest, @@ -246,12 +269,37 @@ def post_aggregated_list( ) -> compute.BackendServiceAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.BackendServiceAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendServiceAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteBackendServiceRequest, @@ -269,12 +317,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_delete_signed_url_key( self, request: compute.DeleteSignedUrlKeyBackendServiceRequest, @@ -295,12 +366,35 @@ def post_delete_signed_url_key( ) -> compute.Operation: """Post-rpc interceptor for delete_signed_url_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_signed_url_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_delete_signed_url_key` interceptor runs + before the `post_delete_signed_url_key_with_metadata` interceptor. """ return response + def post_delete_signed_url_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_signed_url_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_delete_signed_url_key_with_metadata` + interceptor in new development instead of the `post_delete_signed_url_key` interceptor. + When both interceptors are used, this `post_delete_signed_url_key_with_metadata` interceptor runs after the + `post_delete_signed_url_key` interceptor. The (possibly modified) response returned by + `post_delete_signed_url_key` will be passed to + `post_delete_signed_url_key_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetBackendServiceRequest, @@ -318,12 +412,35 @@ def pre_get( def post_get(self, response: compute.BackendService) -> compute.BackendService: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.BackendService, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendService, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_health( self, request: compute.GetHealthBackendServiceRequest, @@ -343,12 +460,37 @@ def post_get_health( ) -> compute.BackendServiceGroupHealth: """Post-rpc interceptor for get_health - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. """ return response + def post_get_health_with_metadata( + self, + response: compute.BackendServiceGroupHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendServiceGroupHealth, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyBackendServiceRequest, @@ -367,12 +509,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertBackendServiceRequest, @@ -390,12 +555,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListBackendServicesRequest, @@ -415,12 +603,35 @@ def post_list( ) -> compute.BackendServiceList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.BackendServiceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendServiceList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_usable( self, request: compute.ListUsableBackendServicesRequest, @@ -441,12 +652,37 @@ def post_list_usable( ) -> compute.BackendServiceListUsable: """Post-rpc interceptor for list_usable - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_usable_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_list_usable` interceptor runs + before the `post_list_usable_with_metadata` interceptor. """ return response + def post_list_usable_with_metadata( + self, + response: compute.BackendServiceListUsable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendServiceListUsable, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_usable + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_list_usable_with_metadata` + interceptor in new development instead of the `post_list_usable` interceptor. + When both interceptors are used, this `post_list_usable_with_metadata` interceptor runs after the + `post_list_usable` interceptor. The (possibly modified) response returned by + `post_list_usable` will be passed to + `post_list_usable_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchBackendServiceRequest, @@ -464,12 +700,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_edge_security_policy( self, request: compute.SetEdgeSecurityPolicyBackendServiceRequest, @@ -490,12 +749,35 @@ def post_set_edge_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_edge_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_edge_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_set_edge_security_policy` interceptor runs + before the `post_set_edge_security_policy_with_metadata` interceptor. """ return response + def post_set_edge_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_edge_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_set_edge_security_policy_with_metadata` + interceptor in new development instead of the `post_set_edge_security_policy` interceptor. + When both interceptors are used, this `post_set_edge_security_policy_with_metadata` interceptor runs after the + `post_set_edge_security_policy` interceptor. The (possibly modified) response returned by + `post_set_edge_security_policy` will be passed to + `post_set_edge_security_policy_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyBackendServiceRequest, @@ -514,12 +796,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_security_policy( self, request: compute.SetSecurityPolicyBackendServiceRequest, @@ -540,12 +845,35 @@ def post_set_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_set_security_policy` interceptor runs + before the `post_set_security_policy_with_metadata` interceptor. """ return response + def post_set_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_set_security_policy_with_metadata` + interceptor in new development instead of the `post_set_security_policy` interceptor. + When both interceptors are used, this `post_set_security_policy_with_metadata` interceptor runs after the + `post_set_security_policy` interceptor. The (possibly modified) response returned by + `post_set_security_policy` will be passed to + `post_set_security_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsBackendServiceRequest, @@ -566,12 +894,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateBackendServiceRequest, @@ -589,12 +942,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackendServices server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendServices server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class BackendServicesRestStub: @@ -830,6 +1206,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_signed_url_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_signed_url_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -974,6 +1354,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1139,6 +1523,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1301,6 +1689,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_signed_url_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_signed_url_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1460,6 +1852,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1612,6 +2006,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1779,6 +2177,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1948,6 +2350,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2094,6 +2500,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2238,6 +2646,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_usable(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_usable_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2407,6 +2819,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2577,6 +2993,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_edge_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_edge_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2748,6 +3168,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2916,6 +3340,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3067,6 +3495,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3236,6 +3668,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py index 2fe1619a9f90..41a0405c13d0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -456,6 +458,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/transports/rest.py index 53e57e51b7dd..1b580c0d105f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/transports/rest.py @@ -117,12 +117,35 @@ def post_aggregated_list( ) -> compute.DiskTypeAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiskTypes server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.DiskTypeAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskTypeAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiskTypes server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetDiskTypeRequest, @@ -138,12 +161,35 @@ def pre_get( def post_get(self, response: compute.DiskType) -> compute.DiskType: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiskTypes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.DiskType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiskTypes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListDiskTypesRequest, @@ -159,12 +205,35 @@ def pre_list( def post_list(self, response: compute.DiskTypeList) -> compute.DiskTypeList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DiskTypes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.DiskTypeList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskTypeList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DiskTypes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DiskTypesRestStub: @@ -379,6 +448,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -529,6 +602,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -668,6 +743,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index 229f23d3c36c..71d282805f76 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py index 02d1acbc36a9..9e494bfdf2e2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py @@ -237,12 +237,35 @@ def post_add_resource_policies( ) -> compute.Operation: """Post-rpc interceptor for add_resource_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_resource_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_add_resource_policies` interceptor runs + before the `post_add_resource_policies_with_metadata` interceptor. """ return response + def post_add_resource_policies_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_resource_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_add_resource_policies_with_metadata` + interceptor in new development instead of the `post_add_resource_policies` interceptor. + When both interceptors are used, this `post_add_resource_policies_with_metadata` interceptor runs after the + `post_add_resource_policies` interceptor. The (possibly modified) response returned by + `post_add_resource_policies` will be passed to + `post_add_resource_policies_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListDisksRequest, @@ -262,12 +285,35 @@ def post_aggregated_list( ) -> compute.DiskAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.DiskAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_bulk_insert( self, request: compute.BulkInsertDiskRequest, @@ -283,12 +329,35 @@ def pre_bulk_insert( def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for bulk_insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_bulk_insert` interceptor runs + before the `post_bulk_insert_with_metadata` interceptor. """ return response + def post_bulk_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_bulk_insert_with_metadata` + interceptor in new development instead of the `post_bulk_insert` interceptor. + When both interceptors are used, this `post_bulk_insert_with_metadata` interceptor runs after the + `post_bulk_insert` interceptor. The (possibly modified) response returned by + `post_bulk_insert` will be passed to + `post_bulk_insert_with_metadata`. + """ + return response, metadata + def pre_create_snapshot( self, request: compute.CreateSnapshotDiskRequest, @@ -306,12 +375,35 @@ def pre_create_snapshot( def post_create_snapshot(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for create_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_create_snapshot` interceptor runs + before the `post_create_snapshot_with_metadata` interceptor. """ return response + def post_create_snapshot_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_create_snapshot_with_metadata` + interceptor in new development instead of the `post_create_snapshot` interceptor. + When both interceptors are used, this `post_create_snapshot_with_metadata` interceptor runs after the + `post_create_snapshot` interceptor. The (possibly modified) response returned by + `post_create_snapshot` will be passed to + `post_create_snapshot_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteDiskRequest, @@ -327,12 +419,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetDiskRequest, @@ -348,12 +463,33 @@ def pre_get( def post_get(self, response: compute.Disk) -> compute.Disk: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, response: compute.Disk, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[compute.Disk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyDiskRequest, @@ -371,12 +507,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertDiskRequest, @@ -392,12 +551,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListDisksRequest, @@ -413,12 +595,35 @@ def pre_list( def post_list(self, response: compute.DiskList) -> compute.DiskList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.DiskList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_remove_resource_policies( self, request: compute.RemoveResourcePoliciesDiskRequest, @@ -439,12 +644,35 @@ def post_remove_resource_policies( ) -> compute.Operation: """Post-rpc interceptor for remove_resource_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_resource_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_remove_resource_policies` interceptor runs + before the `post_remove_resource_policies_with_metadata` interceptor. """ return response + def post_remove_resource_policies_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_resource_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_remove_resource_policies_with_metadata` + interceptor in new development instead of the `post_remove_resource_policies` interceptor. + When both interceptors are used, this `post_remove_resource_policies_with_metadata` interceptor runs after the + `post_remove_resource_policies` interceptor. The (possibly modified) response returned by + `post_remove_resource_policies` will be passed to + `post_remove_resource_policies_with_metadata`. + """ + return response, metadata + def pre_resize( self, request: compute.ResizeDiskRequest, @@ -460,12 +688,35 @@ def pre_resize( def post_resize(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resize - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_resize` interceptor runs + before the `post_resize_with_metadata` interceptor. """ return response + def post_resize_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_resize_with_metadata` + interceptor in new development instead of the `post_resize` interceptor. + When both interceptors are used, this `post_resize_with_metadata` interceptor runs after the + `post_resize` interceptor. The (possibly modified) response returned by + `post_resize` will be passed to + `post_resize_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyDiskRequest, @@ -483,12 +734,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsDiskRequest, @@ -504,12 +778,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_start_async_replication( self, request: compute.StartAsyncReplicationDiskRequest, @@ -530,12 +827,35 @@ def post_start_async_replication( ) -> compute.Operation: """Post-rpc interceptor for start_async_replication - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_async_replication_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_start_async_replication` interceptor runs + before the `post_start_async_replication_with_metadata` interceptor. """ return response + def post_start_async_replication_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_async_replication + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_start_async_replication_with_metadata` + interceptor in new development instead of the `post_start_async_replication` interceptor. + When both interceptors are used, this `post_start_async_replication_with_metadata` interceptor runs after the + `post_start_async_replication` interceptor. The (possibly modified) response returned by + `post_start_async_replication` will be passed to + `post_start_async_replication_with_metadata`. + """ + return response, metadata + def pre_stop_async_replication( self, request: compute.StopAsyncReplicationDiskRequest, @@ -555,12 +875,35 @@ def post_stop_async_replication( ) -> compute.Operation: """Post-rpc interceptor for stop_async_replication - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_async_replication_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_stop_async_replication` interceptor runs + before the `post_stop_async_replication_with_metadata` interceptor. """ return response + def post_stop_async_replication_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_async_replication + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_stop_async_replication_with_metadata` + interceptor in new development instead of the `post_stop_async_replication` interceptor. + When both interceptors are used, this `post_stop_async_replication_with_metadata` interceptor runs after the + `post_stop_async_replication` interceptor. The (possibly modified) response returned by + `post_stop_async_replication` will be passed to + `post_stop_async_replication_with_metadata`. + """ + return response, metadata + def pre_stop_group_async_replication( self, request: compute.StopGroupAsyncReplicationDiskRequest, @@ -581,12 +924,35 @@ def post_stop_group_async_replication( ) -> compute.Operation: """Post-rpc interceptor for stop_group_async_replication - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_group_async_replication_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_stop_group_async_replication` interceptor runs + before the `post_stop_group_async_replication_with_metadata` interceptor. """ return response + def post_stop_group_async_replication_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_group_async_replication + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_stop_group_async_replication_with_metadata` + interceptor in new development instead of the `post_stop_group_async_replication` interceptor. + When both interceptors are used, this `post_stop_group_async_replication_with_metadata` interceptor runs after the + `post_stop_group_async_replication` interceptor. The (possibly modified) response returned by + `post_stop_group_async_replication` will be passed to + `post_stop_group_async_replication_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsDiskRequest, @@ -606,12 +972,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateDiskRequest, @@ -627,12 +1018,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Disks server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DisksRestStub: @@ -872,6 +1286,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_resource_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_resource_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1016,6 +1434,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1181,6 +1603,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1348,6 +1774,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1504,6 +1934,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1655,6 +2089,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1820,6 +2256,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1982,6 +2422,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2121,6 +2565,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2288,6 +2734,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_resource_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_resource_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2450,6 +2900,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2621,6 +3075,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2786,6 +3244,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2953,6 +3415,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_async_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_async_replication_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3114,6 +3580,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_async_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_async_replication_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3282,6 +3752,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_group_async_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_group_async_replication_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3438,6 +3912,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3600,6 +4078,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py index fca2b661f1f1..2e20c7163db4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py index b3daa948e7ae..16ab9a34ed50 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py @@ -139,12 +139,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ExternalVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ExternalVpnGateways server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetExternalVpnGatewayRequest, @@ -164,12 +187,35 @@ def post_get( ) -> compute.ExternalVpnGateway: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ExternalVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.ExternalVpnGateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ExternalVpnGateway, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ExternalVpnGateways server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertExternalVpnGatewayRequest, @@ -187,12 +233,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ExternalVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ExternalVpnGateways server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListExternalVpnGatewaysRequest, @@ -212,12 +281,35 @@ def post_list( ) -> compute.ExternalVpnGatewayList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ExternalVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ExternalVpnGatewayList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ExternalVpnGatewayList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ExternalVpnGateways server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsExternalVpnGatewayRequest, @@ -236,12 +328,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ExternalVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ExternalVpnGateways server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsExternalVpnGatewayRequest, @@ -262,12 +377,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ExternalVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ExternalVpnGateways server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ExternalVpnGatewaysRestStub: @@ -495,6 +635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -655,6 +799,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -820,6 +966,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -968,6 +1118,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1134,6 +1286,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1287,6 +1443,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py index 8da708b9a75a..7ebd4d73b93c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py index 2ae3c7e45643..f001e71cd932 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/transports/rest.py @@ -236,12 +236,35 @@ def pre_add_association( def post_add_association(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_association` interceptor runs + before the `post_add_association_with_metadata` interceptor. """ return response + def post_add_association_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_add_association_with_metadata` + interceptor in new development instead of the `post_add_association` interceptor. + When both interceptors are used, this `post_add_association_with_metadata` interceptor runs after the + `post_add_association` interceptor. The (possibly modified) response returned by + `post_add_association` will be passed to + `post_add_association_with_metadata`. + """ + return response, metadata + def pre_add_rule( self, request: compute.AddRuleFirewallPolicyRequest, @@ -259,12 +282,35 @@ def pre_add_rule( def post_add_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_rule` interceptor runs + before the `post_add_rule_with_metadata` interceptor. """ return response + def post_add_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_add_rule_with_metadata` + interceptor in new development instead of the `post_add_rule` interceptor. + When both interceptors are used, this `post_add_rule_with_metadata` interceptor runs after the + `post_add_rule` interceptor. The (possibly modified) response returned by + `post_add_rule` will be passed to + `post_add_rule_with_metadata`. + """ + return response, metadata + def pre_clone_rules( self, request: compute.CloneRulesFirewallPolicyRequest, @@ -282,12 +328,35 @@ def pre_clone_rules( def post_clone_rules(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for clone_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_clone_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_clone_rules` interceptor runs + before the `post_clone_rules_with_metadata` interceptor. """ return response + def post_clone_rules_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for clone_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_clone_rules_with_metadata` + interceptor in new development instead of the `post_clone_rules` interceptor. + When both interceptors are used, this `post_clone_rules_with_metadata` interceptor runs after the + `post_clone_rules` interceptor. The (possibly modified) response returned by + `post_clone_rules` will be passed to + `post_clone_rules_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteFirewallPolicyRequest, @@ -305,12 +374,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetFirewallPolicyRequest, @@ -328,12 +420,35 @@ def pre_get( def post_get(self, response: compute.FirewallPolicy) -> compute.FirewallPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.FirewallPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_association( self, request: compute.GetAssociationFirewallPolicyRequest, @@ -354,12 +469,37 @@ def post_get_association( ) -> compute.FirewallPolicyAssociation: """Post-rpc interceptor for get_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_association` interceptor runs + before the `post_get_association_with_metadata` interceptor. """ return response + def post_get_association_with_metadata( + self, + response: compute.FirewallPolicyAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.FirewallPolicyAssociation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_association_with_metadata` + interceptor in new development instead of the `post_get_association` interceptor. + When both interceptors are used, this `post_get_association_with_metadata` interceptor runs after the + `post_get_association` interceptor. The (possibly modified) response returned by + `post_get_association` will be passed to + `post_get_association_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyFirewallPolicyRequest, @@ -378,12 +518,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_rule( self, request: compute.GetRuleFirewallPolicyRequest, @@ -403,12 +566,35 @@ def post_get_rule( ) -> compute.FirewallPolicyRule: """Post-rpc interceptor for get_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_rule` interceptor runs + before the `post_get_rule_with_metadata` interceptor. """ return response + def post_get_rule_with_metadata( + self, + response: compute.FirewallPolicyRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicyRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_rule_with_metadata` + interceptor in new development instead of the `post_get_rule` interceptor. + When both interceptors are used, this `post_get_rule_with_metadata` interceptor runs after the + `post_get_rule` interceptor. The (possibly modified) response returned by + `post_get_rule` will be passed to + `post_get_rule_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertFirewallPolicyRequest, @@ -426,12 +612,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListFirewallPoliciesRequest, @@ -451,12 +660,35 @@ def post_list( ) -> compute.FirewallPolicyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.FirewallPolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_associations( self, request: compute.ListAssociationsFirewallPolicyRequest, @@ -477,12 +709,38 @@ def post_list_associations( ) -> compute.FirewallPoliciesListAssociationsResponse: """Post-rpc interceptor for list_associations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_associations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list_associations` interceptor runs + before the `post_list_associations_with_metadata` interceptor. """ return response + def post_list_associations_with_metadata( + self, + response: compute.FirewallPoliciesListAssociationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.FirewallPoliciesListAssociationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_associations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_list_associations_with_metadata` + interceptor in new development instead of the `post_list_associations` interceptor. + When both interceptors are used, this `post_list_associations_with_metadata` interceptor runs after the + `post_list_associations` interceptor. The (possibly modified) response returned by + `post_list_associations` will be passed to + `post_list_associations_with_metadata`. + """ + return response, metadata + def pre_move( self, request: compute.MoveFirewallPolicyRequest, @@ -500,12 +758,35 @@ def pre_move( def post_move(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for move - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_move` interceptor runs + before the `post_move_with_metadata` interceptor. """ return response + def post_move_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_move_with_metadata` + interceptor in new development instead of the `post_move` interceptor. + When both interceptors are used, this `post_move_with_metadata` interceptor runs after the + `post_move` interceptor. The (possibly modified) response returned by + `post_move` will be passed to + `post_move_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchFirewallPolicyRequest, @@ -523,12 +804,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_rule( self, request: compute.PatchRuleFirewallPolicyRequest, @@ -546,12 +850,35 @@ def pre_patch_rule( def post_patch_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch_rule` interceptor runs + before the `post_patch_rule_with_metadata` interceptor. """ return response + def post_patch_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_rule_with_metadata` + interceptor in new development instead of the `post_patch_rule` interceptor. + When both interceptors are used, this `post_patch_rule_with_metadata` interceptor runs after the + `post_patch_rule` interceptor. The (possibly modified) response returned by + `post_patch_rule` will be passed to + `post_patch_rule_with_metadata`. + """ + return response, metadata + def pre_remove_association( self, request: compute.RemoveAssociationFirewallPolicyRequest, @@ -570,12 +897,35 @@ def pre_remove_association( def post_remove_association(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_association` interceptor runs + before the `post_remove_association_with_metadata` interceptor. """ return response + def post_remove_association_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_association_with_metadata` + interceptor in new development instead of the `post_remove_association` interceptor. + When both interceptors are used, this `post_remove_association_with_metadata` interceptor runs after the + `post_remove_association` interceptor. The (possibly modified) response returned by + `post_remove_association` will be passed to + `post_remove_association_with_metadata`. + """ + return response, metadata + def pre_remove_rule( self, request: compute.RemoveRuleFirewallPolicyRequest, @@ -593,12 +943,35 @@ def pre_remove_rule( def post_remove_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_rule` interceptor runs + before the `post_remove_rule_with_metadata` interceptor. """ return response + def post_remove_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_rule_with_metadata` + interceptor in new development instead of the `post_remove_rule` interceptor. + When both interceptors are used, this `post_remove_rule_with_metadata` interceptor runs after the + `post_remove_rule` interceptor. The (possibly modified) response returned by + `post_remove_rule` will be passed to + `post_remove_rule_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyFirewallPolicyRequest, @@ -617,12 +990,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsFirewallPolicyRequest, @@ -643,12 +1039,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class FirewallPoliciesRestStub: @@ -882,6 +1303,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1053,6 +1478,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1212,6 +1641,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_clone_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_clone_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1375,6 +1808,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1521,6 +1958,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1663,6 +2102,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1830,6 +2273,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1980,6 +2427,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2151,6 +2602,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2295,6 +2750,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2440,6 +2897,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_associations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_associations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2605,6 +3066,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2774,6 +3237,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2939,6 +3406,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3101,6 +3572,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3260,6 +3735,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3431,6 +3910,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3582,6 +4065,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py index 0de4cdc1a12d..295677f5f50e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py index 896cd8868490..7732be07c506 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/transports/rest.py @@ -137,12 +137,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewalls server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewalls server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetFirewallRequest, @@ -158,12 +181,35 @@ def pre_get( def post_get(self, response: compute.Firewall) -> compute.Firewall: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewalls server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Firewall, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Firewall, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewalls server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertFirewallRequest, @@ -179,12 +225,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewalls server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewalls server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListFirewallsRequest, @@ -200,12 +269,35 @@ def pre_list( def post_list(self, response: compute.FirewallList) -> compute.FirewallList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewalls server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.FirewallList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewalls server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchFirewallRequest, @@ -221,12 +313,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewalls server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewalls server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateFirewallRequest, @@ -242,12 +357,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Firewalls server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Firewalls server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class FirewallsRestStub: @@ -475,6 +613,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -619,6 +761,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -784,6 +928,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -923,6 +1071,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1238,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1253,6 +1407,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py index bce45cef30fc..f8560124eafe 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py index a7794aac8401..b29d4e0565d8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py @@ -158,12 +158,37 @@ def post_aggregated_list( ) -> compute.ForwardingRuleAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.ForwardingRuleAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ForwardingRuleAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteForwardingRuleRequest, @@ -181,12 +206,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetForwardingRuleRequest, @@ -204,12 +252,35 @@ def pre_get( def post_get(self, response: compute.ForwardingRule) -> compute.ForwardingRule: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.ForwardingRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ForwardingRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertForwardingRuleRequest, @@ -227,12 +298,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListForwardingRulesRequest, @@ -252,12 +346,35 @@ def post_list( ) -> compute.ForwardingRuleList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ForwardingRuleList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ForwardingRuleList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchForwardingRuleRequest, @@ -275,12 +392,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsForwardingRuleRequest, @@ -298,12 +438,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_set_target( self, request: compute.SetTargetForwardingRuleRequest, @@ -321,12 +484,35 @@ def pre_set_target( def post_set_target(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_target - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_set_target` interceptor runs + before the `post_set_target_with_metadata` interceptor. """ return response + def post_set_target_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ForwardingRules server but before it is returned to user code. + + We recommend only using this `post_set_target_with_metadata` + interceptor in new development instead of the `post_set_target` interceptor. + When both interceptors are used, this `post_set_target_with_metadata` interceptor runs after the + `post_set_target` interceptor. The (possibly modified) response returned by + `post_set_target` will be passed to + `post_set_target_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ForwardingRulesRestStub: @@ -537,6 +723,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -702,6 +892,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -858,6 +1052,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1027,6 +1223,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1173,6 +1373,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1340,6 +1542,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1509,6 +1715,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1678,6 +1888,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py index 2fcf027c8bec..b0cd83d9d1c6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py index d2110ebc0102..08e7c76ac995 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/transports/rest.py @@ -139,12 +139,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalAddresses server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalAddresses server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetGlobalAddressRequest, @@ -162,12 +185,35 @@ def pre_get( def post_get(self, response: compute.Address) -> compute.Address: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalAddresses server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Address, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Address, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalAddresses server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertGlobalAddressRequest, @@ -185,12 +231,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalAddresses server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalAddresses server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListGlobalAddressesRequest, @@ -208,12 +277,35 @@ def pre_list( def post_list(self, response: compute.AddressList) -> compute.AddressList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalAddresses server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.AddressList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.AddressList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalAddresses server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_move( self, request: compute.MoveGlobalAddressRequest, @@ -231,12 +323,35 @@ def pre_move( def post_move(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for move - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalAddresses server but before - it is returned to user code. + it is returned to user code. This `post_move` interceptor runs + before the `post_move_with_metadata` interceptor. """ return response + def post_move_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalAddresses server but before it is returned to user code. + + We recommend only using this `post_move_with_metadata` + interceptor in new development instead of the `post_move` interceptor. + When both interceptors are used, this `post_move_with_metadata` interceptor runs after the + `post_move` interceptor. The (possibly modified) response returned by + `post_move` will be passed to + `post_move_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsGlobalAddressRequest, @@ -254,12 +369,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalAddresses server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalAddresses server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GlobalAddressesRestStub: @@ -491,6 +629,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -642,6 +784,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -811,6 +955,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -955,6 +1103,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1122,6 +1272,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1291,6 +1443,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py index f950a5a25378..7fb8babb8113 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py index ae74b87b2e59..090695c9737c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py @@ -148,12 +148,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetGlobalForwardingRuleRequest, @@ -171,12 +194,35 @@ def pre_get( def post_get(self, response: compute.ForwardingRule) -> compute.ForwardingRule: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.ForwardingRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ForwardingRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertGlobalForwardingRuleRequest, @@ -195,12 +241,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListGlobalForwardingRulesRequest, @@ -221,12 +290,35 @@ def post_list( ) -> compute.ForwardingRuleList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ForwardingRuleList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ForwardingRuleList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchGlobalForwardingRuleRequest, @@ -245,12 +337,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsGlobalForwardingRuleRequest, @@ -269,12 +384,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_set_target( self, request: compute.SetTargetGlobalForwardingRuleRequest, @@ -293,12 +431,35 @@ def pre_set_target( def post_set_target(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_target - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalForwardingRules server but before - it is returned to user code. + it is returned to user code. This `post_set_target` interceptor runs + before the `post_set_target_with_metadata` interceptor. """ return response + def post_set_target_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalForwardingRules server but before it is returned to user code. + + We recommend only using this `post_set_target_with_metadata` + interceptor in new development instead of the `post_set_target` interceptor. + When both interceptors are used, this `post_set_target_with_metadata` interceptor runs after the + `post_set_target` interceptor. The (possibly modified) response returned by + `post_set_target` will be passed to + `post_set_target_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GlobalForwardingRulesRestStub: @@ -527,6 +688,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -683,6 +848,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -849,6 +1016,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -993,6 +1164,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1159,6 +1332,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1325,6 +1502,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1491,6 +1672,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index 097c349490a8..544514932029 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py index e9b4dfd2770f..55e613cc1bab 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py @@ -150,12 +150,35 @@ def post_attach_network_endpoints( ) -> compute.Operation: """Post-rpc interceptor for attach_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_attach_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_attach_network_endpoints` interceptor runs + before the `post_attach_network_endpoints_with_metadata` interceptor. """ return response + def post_attach_network_endpoints_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for attach_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_attach_network_endpoints_with_metadata` + interceptor in new development instead of the `post_attach_network_endpoints` interceptor. + When both interceptors are used, this `post_attach_network_endpoints_with_metadata` interceptor runs after the + `post_attach_network_endpoints` interceptor. The (possibly modified) response returned by + `post_attach_network_endpoints` will be passed to + `post_attach_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteGlobalNetworkEndpointGroupRequest, @@ -174,12 +197,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_detach_network_endpoints( self, request: compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, @@ -200,12 +246,35 @@ def post_detach_network_endpoints( ) -> compute.Operation: """Post-rpc interceptor for detach_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_detach_network_endpoints` interceptor runs + before the `post_detach_network_endpoints_with_metadata` interceptor. """ return response + def post_detach_network_endpoints_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detach_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_detach_network_endpoints_with_metadata` + interceptor in new development instead of the `post_detach_network_endpoints` interceptor. + When both interceptors are used, this `post_detach_network_endpoints_with_metadata` interceptor runs after the + `post_detach_network_endpoints` interceptor. The (possibly modified) response returned by + `post_detach_network_endpoints` will be passed to + `post_detach_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetGlobalNetworkEndpointGroupRequest, @@ -226,12 +295,35 @@ def post_get( ) -> compute.NetworkEndpointGroup: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NetworkEndpointGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkEndpointGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertGlobalNetworkEndpointGroupRequest, @@ -250,12 +342,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListGlobalNetworkEndpointGroupsRequest, @@ -276,12 +391,37 @@ def post_list( ) -> compute.NetworkEndpointGroupList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NetworkEndpointGroupList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_network_endpoints( self, request: compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, @@ -302,12 +442,38 @@ def post_list_network_endpoints( ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: """Post-rpc interceptor for list_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_network_endpoints` interceptor runs + before the `post_list_network_endpoints_with_metadata` interceptor. """ return response + def post_list_network_endpoints_with_metadata( + self, + response: compute.NetworkEndpointGroupsListNetworkEndpoints, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupsListNetworkEndpoints, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_list_network_endpoints_with_metadata` + interceptor in new development instead of the `post_list_network_endpoints` interceptor. + When both interceptors are used, this `post_list_network_endpoints_with_metadata` interceptor runs after the + `post_list_network_endpoints` interceptor. The (possibly modified) response returned by + `post_list_network_endpoints` will be passed to + `post_list_network_endpoints_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GlobalNetworkEndpointGroupsRestStub: @@ -548,6 +714,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_attach_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_attach_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -708,6 +878,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -878,6 +1052,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detach_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1029,6 +1207,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1195,6 +1375,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1338,6 +1522,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +1671,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py index a1223f9a9418..37b2d5b9d914 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -458,6 +460,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py index f7ca4bae9469..e029431ca2c7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/transports/rest.py @@ -134,12 +134,37 @@ def post_aggregated_list( ) -> compute.OperationAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOperations server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.OperationAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.OperationAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOperations server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteGlobalOperationRequest, @@ -159,12 +184,37 @@ def post_delete( ) -> compute.DeleteGlobalOperationResponse: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOperations server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.DeleteGlobalOperationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteGlobalOperationResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOperations server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetGlobalOperationRequest, @@ -182,12 +232,35 @@ def pre_get( def post_get(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOperations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOperations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListGlobalOperationsRequest, @@ -205,12 +278,35 @@ def pre_list( def post_list(self, response: compute.OperationList) -> compute.OperationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOperations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.OperationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.OperationList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOperations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_wait( self, request: compute.WaitGlobalOperationRequest, @@ -228,12 +324,35 @@ def pre_wait( def post_wait(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for wait - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_wait_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOperations server but before - it is returned to user code. + it is returned to user code. This `post_wait` interceptor runs + before the `post_wait_with_metadata` interceptor. """ return response + def post_wait_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for wait + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOperations server but before it is returned to user code. + + We recommend only using this `post_wait_with_metadata` + interceptor in new development instead of the `post_wait` interceptor. + When both interceptors are used, this `post_wait_with_metadata` interceptor runs after the + `post_wait` interceptor. The (possibly modified) response returned by + `post_wait` will be passed to + `post_wait_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GlobalOperationsRestStub: @@ -444,6 +563,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -593,6 +716,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -756,6 +883,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -902,6 +1031,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1063,6 +1194,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_wait(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_wait_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py index 9713ec40bd5c..3a5a1431f091 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -462,6 +464,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py index f84931824737..7f3938ac0ac8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py @@ -118,12 +118,38 @@ def post_delete( ) -> compute.DeleteGlobalOrganizationOperationResponse: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOrganizationOperations server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.DeleteGlobalOrganizationOperationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteGlobalOrganizationOperationResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOrganizationOperations server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetGlobalOrganizationOperationRequest, @@ -142,12 +168,35 @@ def pre_get( def post_get(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOrganizationOperations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOrganizationOperations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListGlobalOrganizationOperationsRequest, @@ -166,12 +215,35 @@ def pre_list( def post_list(self, response: compute.OperationList) -> compute.OperationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalOrganizationOperations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.OperationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.OperationList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalOrganizationOperations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GlobalOrganizationOperationsRestStub: @@ -388,6 +460,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -552,6 +628,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -697,6 +775,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py index a03981a443fb..e0a32a3b3a1d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py index 12aa802cb36c..c628a54fb6ab 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py @@ -132,12 +132,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalPublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalPublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetGlobalPublicDelegatedPrefixeRequest, @@ -158,12 +181,35 @@ def post_get( ) -> compute.PublicDelegatedPrefix: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalPublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.PublicDelegatedPrefix, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.PublicDelegatedPrefix, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalPublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertGlobalPublicDelegatedPrefixeRequest, @@ -182,12 +228,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalPublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalPublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListGlobalPublicDelegatedPrefixesRequest, @@ -208,12 +277,37 @@ def post_list( ) -> compute.PublicDelegatedPrefixList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalPublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.PublicDelegatedPrefixList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PublicDelegatedPrefixList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalPublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchGlobalPublicDelegatedPrefixeRequest, @@ -232,12 +326,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GlobalPublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GlobalPublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GlobalPublicDelegatedPrefixesRestStub: @@ -470,6 +587,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -622,6 +743,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -788,6 +911,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -931,6 +1058,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1099,6 +1228,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py index 6e2b31ffafba..e5bad572065c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py index c5f43ca900b5..8a9d050a700f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/transports/rest.py @@ -150,12 +150,37 @@ def post_aggregated_list( ) -> compute.HealthChecksAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.HealthChecksAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.HealthChecksAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteHealthCheckRequest, @@ -173,12 +198,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetHealthCheckRequest, @@ -194,12 +242,35 @@ def pre_get( def post_get(self, response: compute.HealthCheck) -> compute.HealthCheck: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.HealthCheck, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthCheck, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertHealthCheckRequest, @@ -217,12 +288,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListHealthChecksRequest, @@ -240,12 +334,35 @@ def pre_list( def post_list(self, response: compute.HealthCheckList) -> compute.HealthCheckList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.HealthCheckList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthCheckList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchHealthCheckRequest, @@ -263,12 +380,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateHealthCheckRequest, @@ -286,12 +426,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the HealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the HealthChecks server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class HealthChecksRestStub: @@ -502,6 +665,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -665,6 +832,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -824,6 +995,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -991,6 +1164,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1135,6 +1312,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1300,6 +1479,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1467,6 +1650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py index 9d3f03c0d5ed..bef11367f804 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -457,6 +459,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/transports/rest.py index f7ea98547660..3145d87c883a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/transports/rest.py @@ -99,12 +99,35 @@ def pre_get( def post_get(self, response: compute.ImageFamilyView) -> compute.ImageFamilyView: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ImageFamilyViews server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.ImageFamilyView, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ImageFamilyView, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ImageFamilyViews server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ImageFamilyViewsRestStub: @@ -317,6 +340,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py index 9bc48fc557f0..8dddd5b5d633 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py index 98f84f48aa20..51b78e430adb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/transports/rest.py @@ -177,12 +177,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_deprecate( self, request: compute.DeprecateImageRequest, @@ -198,12 +221,35 @@ def pre_deprecate( def post_deprecate(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for deprecate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deprecate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_deprecate` interceptor runs + before the `post_deprecate_with_metadata` interceptor. """ return response + def post_deprecate_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deprecate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_deprecate_with_metadata` + interceptor in new development instead of the `post_deprecate` interceptor. + When both interceptors are used, this `post_deprecate_with_metadata` interceptor runs after the + `post_deprecate` interceptor. The (possibly modified) response returned by + `post_deprecate` will be passed to + `post_deprecate_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetImageRequest, @@ -219,12 +265,33 @@ def pre_get( def post_get(self, response: compute.Image) -> compute.Image: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, response: compute.Image, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[compute.Image, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_from_family( self, request: compute.GetFromFamilyImageRequest, @@ -242,12 +309,33 @@ def pre_get_from_family( def post_get_from_family(self, response: compute.Image) -> compute.Image: """Post-rpc interceptor for get_from_family - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_from_family_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_get_from_family` interceptor runs + before the `post_get_from_family_with_metadata` interceptor. """ return response + def post_get_from_family_with_metadata( + self, response: compute.Image, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[compute.Image, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_from_family + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_get_from_family_with_metadata` + interceptor in new development instead of the `post_get_from_family` interceptor. + When both interceptors are used, this `post_get_from_family_with_metadata` interceptor runs after the + `post_get_from_family` interceptor. The (possibly modified) response returned by + `post_get_from_family` will be passed to + `post_get_from_family_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyImageRequest, @@ -265,12 +353,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertImageRequest, @@ -286,12 +397,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListImagesRequest, @@ -307,12 +441,35 @@ def pre_list( def post_list(self, response: compute.ImageList) -> compute.ImageList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ImageList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ImageList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchImageRequest, @@ -328,12 +485,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyImageRequest, @@ -351,12 +531,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsImageRequest, @@ -372,12 +575,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsImageRequest, @@ -397,12 +623,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Images server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Images server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ImagesRestStub: @@ -627,6 +878,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -792,6 +1047,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_deprecate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deprecate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -935,6 +1194,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1083,6 +1344,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_from_family(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_from_family_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1250,6 +1515,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1412,6 +1681,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1551,6 +1824,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1713,6 +1988,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1886,6 +2165,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2051,6 +2334,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2205,6 +2492,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py index c4ae0c53dcdd..6fc9eeef1b30 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -472,6 +474,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py index e1d4ea5d3afc..ee2244239e80 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py @@ -132,12 +132,35 @@ def pre_cancel( def post_cancel(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for cancel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagerResizeRequests server but before - it is returned to user code. + it is returned to user code. This `post_cancel` interceptor runs + before the `post_cancel_with_metadata` interceptor. """ return response + def post_cancel_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_cancel_with_metadata` + interceptor in new development instead of the `post_cancel` interceptor. + When both interceptors are used, this `post_cancel_with_metadata` interceptor runs after the + `post_cancel` interceptor. The (possibly modified) response returned by + `post_cancel` will be passed to + `post_cancel_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInstanceGroupManagerResizeRequestRequest, @@ -156,12 +179,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagerResizeRequests server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInstanceGroupManagerResizeRequestRequest, @@ -182,12 +228,38 @@ def post_get( ) -> compute.InstanceGroupManagerResizeRequest: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagerResizeRequests server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceGroupManagerResizeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagerResizeRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInstanceGroupManagerResizeRequestRequest, @@ -206,12 +278,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagerResizeRequests server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInstanceGroupManagerResizeRequestsRequest, @@ -232,12 +327,38 @@ def post_list( ) -> compute.InstanceGroupManagerResizeRequestsListResponse: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagerResizeRequests server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstanceGroupManagerResizeRequestsListResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagerResizeRequestsListResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstanceGroupManagerResizeRequestsRestStub: @@ -472,6 +593,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -634,6 +759,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -787,6 +916,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -957,6 +1088,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1102,6 +1237,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py index f06aca9cd640..202f0edccb47 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py index edcde7634684..8a4013387d9f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py @@ -284,12 +284,35 @@ def pre_abandon_instances( def post_abandon_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for abandon_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_abandon_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_abandon_instances` interceptor runs + before the `post_abandon_instances_with_metadata` interceptor. """ return response + def post_abandon_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for abandon_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_abandon_instances_with_metadata` + interceptor in new development instead of the `post_abandon_instances` interceptor. + When both interceptors are used, this `post_abandon_instances_with_metadata` interceptor runs after the + `post_abandon_instances` interceptor. The (possibly modified) response returned by + `post_abandon_instances` will be passed to + `post_abandon_instances_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListInstanceGroupManagersRequest, @@ -310,12 +333,38 @@ def post_aggregated_list( ) -> compute.InstanceGroupManagerAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.InstanceGroupManagerAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagerAggregatedList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_apply_updates_to_instances( self, request: compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, @@ -336,12 +385,35 @@ def post_apply_updates_to_instances( ) -> compute.Operation: """Post-rpc interceptor for apply_updates_to_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_apply_updates_to_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_apply_updates_to_instances` interceptor runs + before the `post_apply_updates_to_instances_with_metadata` interceptor. """ return response + def post_apply_updates_to_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for apply_updates_to_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_apply_updates_to_instances_with_metadata` + interceptor in new development instead of the `post_apply_updates_to_instances` interceptor. + When both interceptors are used, this `post_apply_updates_to_instances_with_metadata` interceptor runs after the + `post_apply_updates_to_instances` interceptor. The (possibly modified) response returned by + `post_apply_updates_to_instances` will be passed to + `post_apply_updates_to_instances_with_metadata`. + """ + return response, metadata + def pre_create_instances( self, request: compute.CreateInstancesInstanceGroupManagerRequest, @@ -360,12 +432,35 @@ def pre_create_instances( def post_create_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for create_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_create_instances` interceptor runs + before the `post_create_instances_with_metadata` interceptor. """ return response + def post_create_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_create_instances_with_metadata` + interceptor in new development instead of the `post_create_instances` interceptor. + When both interceptors are used, this `post_create_instances_with_metadata` interceptor runs after the + `post_create_instances` interceptor. The (possibly modified) response returned by + `post_create_instances` will be passed to + `post_create_instances_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInstanceGroupManagerRequest, @@ -384,12 +479,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_delete_instances( self, request: compute.DeleteInstancesInstanceGroupManagerRequest, @@ -408,12 +526,35 @@ def pre_delete_instances( def post_delete_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_delete_instances` interceptor runs + before the `post_delete_instances_with_metadata` interceptor. """ return response + def post_delete_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_delete_instances_with_metadata` + interceptor in new development instead of the `post_delete_instances` interceptor. + When both interceptors are used, this `post_delete_instances_with_metadata` interceptor runs after the + `post_delete_instances` interceptor. The (possibly modified) response returned by + `post_delete_instances` will be passed to + `post_delete_instances_with_metadata`. + """ + return response, metadata + def pre_delete_per_instance_configs( self, request: compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, @@ -434,12 +575,35 @@ def post_delete_per_instance_configs( ) -> compute.Operation: """Post-rpc interceptor for delete_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_delete_per_instance_configs` interceptor runs + before the `post_delete_per_instance_configs_with_metadata` interceptor. """ return response + def post_delete_per_instance_configs_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_delete_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_delete_per_instance_configs` interceptor. + When both interceptors are used, this `post_delete_per_instance_configs_with_metadata` interceptor runs after the + `post_delete_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_delete_per_instance_configs` will be passed to + `post_delete_per_instance_configs_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInstanceGroupManagerRequest, @@ -459,12 +623,35 @@ def post_get( ) -> compute.InstanceGroupManager: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceGroupManager, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceGroupManager, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInstanceGroupManagerRequest, @@ -483,12 +670,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInstanceGroupManagersRequest, @@ -509,12 +719,37 @@ def post_list( ) -> compute.InstanceGroupManagerList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstanceGroupManagerList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagerList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_errors( self, request: compute.ListErrorsInstanceGroupManagersRequest, @@ -535,12 +770,38 @@ def post_list_errors( ) -> compute.InstanceGroupManagersListErrorsResponse: """Post-rpc interceptor for list_errors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_errors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list_errors` interceptor runs + before the `post_list_errors_with_metadata` interceptor. """ return response + def post_list_errors_with_metadata( + self, + response: compute.InstanceGroupManagersListErrorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagersListErrorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_errors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_errors_with_metadata` + interceptor in new development instead of the `post_list_errors` interceptor. + When both interceptors are used, this `post_list_errors_with_metadata` interceptor runs after the + `post_list_errors` interceptor. The (possibly modified) response returned by + `post_list_errors` will be passed to + `post_list_errors_with_metadata`. + """ + return response, metadata + def pre_list_managed_instances( self, request: compute.ListManagedInstancesInstanceGroupManagersRequest, @@ -561,12 +822,38 @@ def post_list_managed_instances( ) -> compute.InstanceGroupManagersListManagedInstancesResponse: """Post-rpc interceptor for list_managed_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_managed_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list_managed_instances` interceptor runs + before the `post_list_managed_instances_with_metadata` interceptor. """ return response + def post_list_managed_instances_with_metadata( + self, + response: compute.InstanceGroupManagersListManagedInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagersListManagedInstancesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_managed_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_managed_instances_with_metadata` + interceptor in new development instead of the `post_list_managed_instances` interceptor. + When both interceptors are used, this `post_list_managed_instances_with_metadata` interceptor runs after the + `post_list_managed_instances` interceptor. The (possibly modified) response returned by + `post_list_managed_instances` will be passed to + `post_list_managed_instances_with_metadata`. + """ + return response, metadata + def pre_list_per_instance_configs( self, request: compute.ListPerInstanceConfigsInstanceGroupManagersRequest, @@ -587,12 +874,38 @@ def post_list_per_instance_configs( ) -> compute.InstanceGroupManagersListPerInstanceConfigsResp: """Post-rpc interceptor for list_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list_per_instance_configs` interceptor runs + before the `post_list_per_instance_configs_with_metadata` interceptor. """ return response + def post_list_per_instance_configs_with_metadata( + self, + response: compute.InstanceGroupManagersListPerInstanceConfigsResp, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagersListPerInstanceConfigsResp, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_list_per_instance_configs` interceptor. + When both interceptors are used, this `post_list_per_instance_configs_with_metadata` interceptor runs after the + `post_list_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_list_per_instance_configs` will be passed to + `post_list_per_instance_configs_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchInstanceGroupManagerRequest, @@ -611,12 +924,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_per_instance_configs( self, request: compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, @@ -637,12 +973,35 @@ def post_patch_per_instance_configs( ) -> compute.Operation: """Post-rpc interceptor for patch_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_patch_per_instance_configs` interceptor runs + before the `post_patch_per_instance_configs_with_metadata` interceptor. """ return response + def post_patch_per_instance_configs_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_patch_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_patch_per_instance_configs` interceptor. + When both interceptors are used, this `post_patch_per_instance_configs_with_metadata` interceptor runs after the + `post_patch_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_patch_per_instance_configs` will be passed to + `post_patch_per_instance_configs_with_metadata`. + """ + return response, metadata + def pre_recreate_instances( self, request: compute.RecreateInstancesInstanceGroupManagerRequest, @@ -661,12 +1020,35 @@ def pre_recreate_instances( def post_recreate_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for recreate_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recreate_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_recreate_instances` interceptor runs + before the `post_recreate_instances_with_metadata` interceptor. """ return response + def post_recreate_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for recreate_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_recreate_instances_with_metadata` + interceptor in new development instead of the `post_recreate_instances` interceptor. + When both interceptors are used, this `post_recreate_instances_with_metadata` interceptor runs after the + `post_recreate_instances` interceptor. The (possibly modified) response returned by + `post_recreate_instances` will be passed to + `post_recreate_instances_with_metadata`. + """ + return response, metadata + def pre_resize( self, request: compute.ResizeInstanceGroupManagerRequest, @@ -685,12 +1067,35 @@ def pre_resize( def post_resize(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resize - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_resize` interceptor runs + before the `post_resize_with_metadata` interceptor. """ return response + def post_resize_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_resize_with_metadata` + interceptor in new development instead of the `post_resize` interceptor. + When both interceptors are used, this `post_resize_with_metadata` interceptor runs after the + `post_resize` interceptor. The (possibly modified) response returned by + `post_resize` will be passed to + `post_resize_with_metadata`. + """ + return response, metadata + def pre_resume_instances( self, request: compute.ResumeInstancesInstanceGroupManagerRequest, @@ -709,12 +1114,35 @@ def pre_resume_instances( def post_resume_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resume_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resume_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_resume_instances` interceptor runs + before the `post_resume_instances_with_metadata` interceptor. """ return response + def post_resume_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resume_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_resume_instances_with_metadata` + interceptor in new development instead of the `post_resume_instances` interceptor. + When both interceptors are used, this `post_resume_instances_with_metadata` interceptor runs after the + `post_resume_instances` interceptor. The (possibly modified) response returned by + `post_resume_instances` will be passed to + `post_resume_instances_with_metadata`. + """ + return response, metadata + def pre_set_instance_template( self, request: compute.SetInstanceTemplateInstanceGroupManagerRequest, @@ -735,12 +1163,35 @@ def post_set_instance_template( ) -> compute.Operation: """Post-rpc interceptor for set_instance_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_instance_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_set_instance_template` interceptor runs + before the `post_set_instance_template_with_metadata` interceptor. """ return response + def post_set_instance_template_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_instance_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_set_instance_template_with_metadata` + interceptor in new development instead of the `post_set_instance_template` interceptor. + When both interceptors are used, this `post_set_instance_template_with_metadata` interceptor runs after the + `post_set_instance_template` interceptor. The (possibly modified) response returned by + `post_set_instance_template` will be passed to + `post_set_instance_template_with_metadata`. + """ + return response, metadata + def pre_set_target_pools( self, request: compute.SetTargetPoolsInstanceGroupManagerRequest, @@ -759,12 +1210,35 @@ def pre_set_target_pools( def post_set_target_pools(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_target_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_target_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_set_target_pools` interceptor runs + before the `post_set_target_pools_with_metadata` interceptor. """ return response + def post_set_target_pools_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_target_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_set_target_pools_with_metadata` + interceptor in new development instead of the `post_set_target_pools` interceptor. + When both interceptors are used, this `post_set_target_pools_with_metadata` interceptor runs after the + `post_set_target_pools` interceptor. The (possibly modified) response returned by + `post_set_target_pools` will be passed to + `post_set_target_pools_with_metadata`. + """ + return response, metadata + def pre_start_instances( self, request: compute.StartInstancesInstanceGroupManagerRequest, @@ -783,12 +1257,35 @@ def pre_start_instances( def post_start_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for start_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_start_instances` interceptor runs + before the `post_start_instances_with_metadata` interceptor. """ return response + def post_start_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_start_instances_with_metadata` + interceptor in new development instead of the `post_start_instances` interceptor. + When both interceptors are used, this `post_start_instances_with_metadata` interceptor runs after the + `post_start_instances` interceptor. The (possibly modified) response returned by + `post_start_instances` will be passed to + `post_start_instances_with_metadata`. + """ + return response, metadata + def pre_stop_instances( self, request: compute.StopInstancesInstanceGroupManagerRequest, @@ -807,12 +1304,35 @@ def pre_stop_instances( def post_stop_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for stop_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_stop_instances` interceptor runs + before the `post_stop_instances_with_metadata` interceptor. """ return response + def post_stop_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_stop_instances_with_metadata` + interceptor in new development instead of the `post_stop_instances` interceptor. + When both interceptors are used, this `post_stop_instances_with_metadata` interceptor runs after the + `post_stop_instances` interceptor. The (possibly modified) response returned by + `post_stop_instances` will be passed to + `post_stop_instances_with_metadata`. + """ + return response, metadata + def pre_suspend_instances( self, request: compute.SuspendInstancesInstanceGroupManagerRequest, @@ -831,12 +1351,35 @@ def pre_suspend_instances( def post_suspend_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for suspend_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suspend_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_suspend_instances` interceptor runs + before the `post_suspend_instances_with_metadata` interceptor. """ return response + def post_suspend_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for suspend_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_suspend_instances_with_metadata` + interceptor in new development instead of the `post_suspend_instances` interceptor. + When both interceptors are used, this `post_suspend_instances_with_metadata` interceptor runs after the + `post_suspend_instances` interceptor. The (possibly modified) response returned by + `post_suspend_instances` will be passed to + `post_suspend_instances_with_metadata`. + """ + return response, metadata + def pre_update_per_instance_configs( self, request: compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, @@ -857,12 +1400,35 @@ def post_update_per_instance_configs( ) -> compute.Operation: """Post-rpc interceptor for update_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_update_per_instance_configs` interceptor runs + before the `post_update_per_instance_configs_with_metadata` interceptor. """ return response + def post_update_per_instance_configs_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_update_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_update_per_instance_configs` interceptor. + When both interceptors are used, this `post_update_per_instance_configs_with_metadata` interceptor runs after the + `post_update_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_update_per_instance_configs` will be passed to + `post_update_per_instance_configs_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstanceGroupManagersRestStub: @@ -1101,6 +1667,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_abandon_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_abandon_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1244,6 +1814,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1415,6 +1989,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_apply_updates_to_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_apply_updates_to_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1585,6 +2163,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1745,6 +2327,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1915,6 +2501,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2084,6 +2674,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2237,6 +2831,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2403,6 +2999,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2545,6 +3145,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2690,6 +3292,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_errors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_errors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2841,6 +3447,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_managed_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_managed_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2988,6 +3598,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3158,6 +3772,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3327,6 +3945,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3497,6 +4119,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_recreate_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recreate_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3657,6 +4283,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3827,6 +4457,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resume_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resume_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3997,6 +4631,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_instance_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_instance_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4165,6 +4803,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_target_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_target_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4331,6 +4973,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4497,6 +5143,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4667,6 +5317,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suspend_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suspend_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4836,6 +5490,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py index 90b1be153830..641c4cca271c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py index aa161a56217c..87af5b318878 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/transports/rest.py @@ -164,12 +164,35 @@ def pre_add_instances( def post_add_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_add_instances` interceptor runs + before the `post_add_instances_with_metadata` interceptor. """ return response + def post_add_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_add_instances_with_metadata` + interceptor in new development instead of the `post_add_instances` interceptor. + When both interceptors are used, this `post_add_instances_with_metadata` interceptor runs after the + `post_add_instances` interceptor. The (possibly modified) response returned by + `post_add_instances` will be passed to + `post_add_instances_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListInstanceGroupsRequest, @@ -190,12 +213,37 @@ def post_aggregated_list( ) -> compute.InstanceGroupAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.InstanceGroupAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInstanceGroupRequest, @@ -213,12 +261,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInstanceGroupRequest, @@ -236,12 +307,35 @@ def pre_get( def post_get(self, response: compute.InstanceGroup) -> compute.InstanceGroup: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInstanceGroupRequest, @@ -259,12 +353,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInstanceGroupsRequest, @@ -284,12 +401,35 @@ def post_list( ) -> compute.InstanceGroupList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstanceGroupList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceGroupList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: compute.ListInstancesInstanceGroupsRequest, @@ -310,12 +450,37 @@ def post_list_instances( ) -> compute.InstanceGroupsListInstances: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: compute.InstanceGroupsListInstances, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupsListInstances, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_remove_instances( self, request: compute.RemoveInstancesInstanceGroupRequest, @@ -334,12 +499,35 @@ def pre_remove_instances( def post_remove_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_remove_instances` interceptor runs + before the `post_remove_instances_with_metadata` interceptor. """ return response + def post_remove_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_remove_instances_with_metadata` + interceptor in new development instead of the `post_remove_instances` interceptor. + When both interceptors are used, this `post_remove_instances_with_metadata` interceptor runs after the + `post_remove_instances` interceptor. The (possibly modified) response returned by + `post_remove_instances` will be passed to + `post_remove_instances_with_metadata`. + """ + return response, metadata + def pre_set_named_ports( self, request: compute.SetNamedPortsInstanceGroupRequest, @@ -358,12 +546,35 @@ def pre_set_named_ports( def post_set_named_ports(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_named_ports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_named_ports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_set_named_ports` interceptor runs + before the `post_set_named_ports_with_metadata` interceptor. """ return response + def post_set_named_ports_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_named_ports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceGroups server but before it is returned to user code. + + We recommend only using this `post_set_named_ports_with_metadata` + interceptor in new development instead of the `post_set_named_ports` interceptor. + When both interceptors are used, this `post_set_named_ports_with_metadata` interceptor runs after the + `post_set_named_ports` interceptor. The (possibly modified) response returned by + `post_set_named_ports` will be passed to + `post_set_named_ports_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstanceGroupsRestStub: @@ -597,6 +808,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -739,6 +954,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -902,6 +1121,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1058,6 +1281,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1225,6 +1450,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1369,6 +1598,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1517,6 +1748,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1686,6 +1921,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1851,6 +2090,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_named_ports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_named_ports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py index fbdce52b0ada..8291cb4642ad 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -462,6 +464,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py index 959a34cac6a4..6f125f992b7f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py @@ -107,12 +107,35 @@ def pre_get( def post_get(self, response: compute.InstanceSettings) -> compute.InstanceSettings: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceSettingsService server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceSettingsService server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchInstanceSettingRequest, @@ -130,12 +153,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceSettingsService server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceSettingsService server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstanceSettingsServiceRestStub: @@ -354,6 +400,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -520,6 +568,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py index cc5f086155c4..0bef30795e52 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py index 1d63419650cd..e7828267b6f1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/transports/rest.py @@ -158,12 +158,37 @@ def post_aggregated_list( ) -> compute.InstanceTemplateAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.InstanceTemplateAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceTemplateAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInstanceTemplateRequest, @@ -181,12 +206,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInstanceTemplateRequest, @@ -204,12 +252,35 @@ def pre_get( def post_get(self, response: compute.InstanceTemplate) -> compute.InstanceTemplate: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyInstanceTemplateRequest, @@ -228,12 +299,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInstanceTemplateRequest, @@ -251,12 +345,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInstanceTemplatesRequest, @@ -276,12 +393,35 @@ def post_list( ) -> compute.InstanceTemplateList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstanceTemplateList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceTemplateList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyInstanceTemplateRequest, @@ -300,12 +440,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsInstanceTemplateRequest, @@ -326,12 +489,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstanceTemplatesRestStub: @@ -545,6 +733,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -710,6 +902,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -867,6 +1063,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1032,6 +1230,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1203,6 +1405,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1349,6 +1555,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1520,6 +1728,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1671,6 +1883,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py index 73fb9fb7f43f..5df2a088c47e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py index 0dda8127c528..112f97691957 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/transports/rest.py @@ -475,12 +475,35 @@ def pre_add_access_config( def post_add_access_config(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_access_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_access_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_add_access_config` interceptor runs + before the `post_add_access_config_with_metadata` interceptor. """ return response + def post_add_access_config_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_access_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_add_access_config_with_metadata` + interceptor in new development instead of the `post_add_access_config` interceptor. + When both interceptors are used, this `post_add_access_config_with_metadata` interceptor runs after the + `post_add_access_config` interceptor. The (possibly modified) response returned by + `post_add_access_config` will be passed to + `post_add_access_config_with_metadata`. + """ + return response, metadata + def pre_add_resource_policies( self, request: compute.AddResourcePoliciesInstanceRequest, @@ -501,12 +524,35 @@ def post_add_resource_policies( ) -> compute.Operation: """Post-rpc interceptor for add_resource_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_resource_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_add_resource_policies` interceptor runs + before the `post_add_resource_policies_with_metadata` interceptor. """ return response + def post_add_resource_policies_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_resource_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_add_resource_policies_with_metadata` + interceptor in new development instead of the `post_add_resource_policies` interceptor. + When both interceptors are used, this `post_add_resource_policies_with_metadata` interceptor runs after the + `post_add_resource_policies` interceptor. The (possibly modified) response returned by + `post_add_resource_policies` will be passed to + `post_add_resource_policies_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListInstancesRequest, @@ -526,12 +572,35 @@ def post_aggregated_list( ) -> compute.InstanceAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.InstanceAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_attach_disk( self, request: compute.AttachDiskInstanceRequest, @@ -549,12 +618,35 @@ def pre_attach_disk( def post_attach_disk(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for attach_disk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_attach_disk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_attach_disk` interceptor runs + before the `post_attach_disk_with_metadata` interceptor. """ return response + def post_attach_disk_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for attach_disk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_attach_disk_with_metadata` + interceptor in new development instead of the `post_attach_disk` interceptor. + When both interceptors are used, this `post_attach_disk_with_metadata` interceptor runs after the + `post_attach_disk` interceptor. The (possibly modified) response returned by + `post_attach_disk` will be passed to + `post_attach_disk_with_metadata`. + """ + return response, metadata + def pre_bulk_insert( self, request: compute.BulkInsertInstanceRequest, @@ -572,12 +664,35 @@ def pre_bulk_insert( def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for bulk_insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_bulk_insert` interceptor runs + before the `post_bulk_insert_with_metadata` interceptor. """ return response + def post_bulk_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_bulk_insert_with_metadata` + interceptor in new development instead of the `post_bulk_insert` interceptor. + When both interceptors are used, this `post_bulk_insert_with_metadata` interceptor runs after the + `post_bulk_insert` interceptor. The (possibly modified) response returned by + `post_bulk_insert` will be passed to + `post_bulk_insert_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInstanceRequest, @@ -593,12 +708,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_delete_access_config( self, request: compute.DeleteAccessConfigInstanceRequest, @@ -619,12 +757,35 @@ def post_delete_access_config( ) -> compute.Operation: """Post-rpc interceptor for delete_access_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_access_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_delete_access_config` interceptor runs + before the `post_delete_access_config_with_metadata` interceptor. """ return response + def post_delete_access_config_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_access_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_delete_access_config_with_metadata` + interceptor in new development instead of the `post_delete_access_config` interceptor. + When both interceptors are used, this `post_delete_access_config_with_metadata` interceptor runs after the + `post_delete_access_config` interceptor. The (possibly modified) response returned by + `post_delete_access_config` will be passed to + `post_delete_access_config_with_metadata`. + """ + return response, metadata + def pre_detach_disk( self, request: compute.DetachDiskInstanceRequest, @@ -642,12 +803,35 @@ def pre_detach_disk( def post_detach_disk(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for detach_disk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_disk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_detach_disk` interceptor runs + before the `post_detach_disk_with_metadata` interceptor. """ return response + def post_detach_disk_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detach_disk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_detach_disk_with_metadata` + interceptor in new development instead of the `post_detach_disk` interceptor. + When both interceptors are used, this `post_detach_disk_with_metadata` interceptor runs after the + `post_detach_disk` interceptor. The (possibly modified) response returned by + `post_detach_disk` will be passed to + `post_detach_disk_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInstanceRequest, @@ -663,12 +847,35 @@ def pre_get( def post_get(self, response: compute.Instance) -> compute.Instance: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_effective_firewalls( self, request: compute.GetEffectiveFirewallsInstanceRequest, @@ -689,12 +896,38 @@ def post_get_effective_firewalls( ) -> compute.InstancesGetEffectiveFirewallsResponse: """Post-rpc interceptor for get_effective_firewalls - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_effective_firewalls_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_effective_firewalls` interceptor runs + before the `post_get_effective_firewalls_with_metadata` interceptor. """ return response + def post_get_effective_firewalls_with_metadata( + self, + response: compute.InstancesGetEffectiveFirewallsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstancesGetEffectiveFirewallsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_effective_firewalls + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_effective_firewalls_with_metadata` + interceptor in new development instead of the `post_get_effective_firewalls` interceptor. + When both interceptors are used, this `post_get_effective_firewalls_with_metadata` interceptor runs after the + `post_get_effective_firewalls` interceptor. The (possibly modified) response returned by + `post_get_effective_firewalls` will be passed to + `post_get_effective_firewalls_with_metadata`. + """ + return response, metadata + def pre_get_guest_attributes( self, request: compute.GetGuestAttributesInstanceRequest, @@ -715,12 +948,35 @@ def post_get_guest_attributes( ) -> compute.GuestAttributes: """Post-rpc interceptor for get_guest_attributes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_guest_attributes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_guest_attributes` interceptor runs + before the `post_get_guest_attributes_with_metadata` interceptor. """ return response + def post_get_guest_attributes_with_metadata( + self, + response: compute.GuestAttributes, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.GuestAttributes, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_guest_attributes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_guest_attributes_with_metadata` + interceptor in new development instead of the `post_get_guest_attributes` interceptor. + When both interceptors are used, this `post_get_guest_attributes_with_metadata` interceptor runs after the + `post_get_guest_attributes` interceptor. The (possibly modified) response returned by + `post_get_guest_attributes` will be passed to + `post_get_guest_attributes_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyInstanceRequest, @@ -738,12 +994,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_screenshot( self, request: compute.GetScreenshotInstanceRequest, @@ -761,12 +1040,35 @@ def pre_get_screenshot( def post_get_screenshot(self, response: compute.Screenshot) -> compute.Screenshot: """Post-rpc interceptor for get_screenshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_screenshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_screenshot` interceptor runs + before the `post_get_screenshot_with_metadata` interceptor. """ return response + def post_get_screenshot_with_metadata( + self, + response: compute.Screenshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Screenshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_screenshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_screenshot_with_metadata` + interceptor in new development instead of the `post_get_screenshot` interceptor. + When both interceptors are used, this `post_get_screenshot_with_metadata` interceptor runs after the + `post_get_screenshot` interceptor. The (possibly modified) response returned by + `post_get_screenshot` will be passed to + `post_get_screenshot_with_metadata`. + """ + return response, metadata + def pre_get_serial_port_output( self, request: compute.GetSerialPortOutputInstanceRequest, @@ -787,12 +1089,35 @@ def post_get_serial_port_output( ) -> compute.SerialPortOutput: """Post-rpc interceptor for get_serial_port_output - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_serial_port_output_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_serial_port_output` interceptor runs + before the `post_get_serial_port_output_with_metadata` interceptor. """ return response + def post_get_serial_port_output_with_metadata( + self, + response: compute.SerialPortOutput, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SerialPortOutput, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_serial_port_output + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_serial_port_output_with_metadata` + interceptor in new development instead of the `post_get_serial_port_output` interceptor. + When both interceptors are used, this `post_get_serial_port_output_with_metadata` interceptor runs after the + `post_get_serial_port_output` interceptor. The (possibly modified) response returned by + `post_get_serial_port_output` will be passed to + `post_get_serial_port_output_with_metadata`. + """ + return response, metadata + def pre_get_shielded_instance_identity( self, request: compute.GetShieldedInstanceIdentityInstanceRequest, @@ -813,12 +1138,37 @@ def post_get_shielded_instance_identity( ) -> compute.ShieldedInstanceIdentity: """Post-rpc interceptor for get_shielded_instance_identity - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_shielded_instance_identity_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_get_shielded_instance_identity` interceptor runs + before the `post_get_shielded_instance_identity_with_metadata` interceptor. """ return response + def post_get_shielded_instance_identity_with_metadata( + self, + response: compute.ShieldedInstanceIdentity, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ShieldedInstanceIdentity, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_shielded_instance_identity + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_get_shielded_instance_identity_with_metadata` + interceptor in new development instead of the `post_get_shielded_instance_identity` interceptor. + When both interceptors are used, this `post_get_shielded_instance_identity_with_metadata` interceptor runs after the + `post_get_shielded_instance_identity` interceptor. The (possibly modified) response returned by + `post_get_shielded_instance_identity` will be passed to + `post_get_shielded_instance_identity_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInstanceRequest, @@ -834,12 +1184,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInstancesRequest, @@ -855,12 +1228,35 @@ def pre_list( def post_list(self, response: compute.InstanceList) -> compute.InstanceList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstanceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_referrers( self, request: compute.ListReferrersInstancesRequest, @@ -880,12 +1276,35 @@ def post_list_referrers( ) -> compute.InstanceListReferrers: """Post-rpc interceptor for list_referrers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_referrers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_list_referrers` interceptor runs + before the `post_list_referrers_with_metadata` interceptor. """ return response + def post_list_referrers_with_metadata( + self, + response: compute.InstanceListReferrers, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceListReferrers, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_referrers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_list_referrers_with_metadata` + interceptor in new development instead of the `post_list_referrers` interceptor. + When both interceptors are used, this `post_list_referrers_with_metadata` interceptor runs after the + `post_list_referrers` interceptor. The (possibly modified) response returned by + `post_list_referrers` will be passed to + `post_list_referrers_with_metadata`. + """ + return response, metadata + def pre_perform_maintenance( self, request: compute.PerformMaintenanceInstanceRequest, @@ -906,12 +1325,35 @@ def post_perform_maintenance( ) -> compute.Operation: """Post-rpc interceptor for perform_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_perform_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_perform_maintenance` interceptor runs + before the `post_perform_maintenance_with_metadata` interceptor. """ return response + def post_perform_maintenance_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for perform_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_perform_maintenance_with_metadata` + interceptor in new development instead of the `post_perform_maintenance` interceptor. + When both interceptors are used, this `post_perform_maintenance_with_metadata` interceptor runs after the + `post_perform_maintenance` interceptor. The (possibly modified) response returned by + `post_perform_maintenance` will be passed to + `post_perform_maintenance_with_metadata`. + """ + return response, metadata + def pre_remove_resource_policies( self, request: compute.RemoveResourcePoliciesInstanceRequest, @@ -932,12 +1374,35 @@ def post_remove_resource_policies( ) -> compute.Operation: """Post-rpc interceptor for remove_resource_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_resource_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_remove_resource_policies` interceptor runs + before the `post_remove_resource_policies_with_metadata` interceptor. """ return response + def post_remove_resource_policies_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_resource_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_remove_resource_policies_with_metadata` + interceptor in new development instead of the `post_remove_resource_policies` interceptor. + When both interceptors are used, this `post_remove_resource_policies_with_metadata` interceptor runs after the + `post_remove_resource_policies` interceptor. The (possibly modified) response returned by + `post_remove_resource_policies` will be passed to + `post_remove_resource_policies_with_metadata`. + """ + return response, metadata + def pre_reset( self, request: compute.ResetInstanceRequest, @@ -953,12 +1418,35 @@ def pre_reset( def post_reset(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for reset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_reset` interceptor runs + before the `post_reset_with_metadata` interceptor. """ return response + def post_reset_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_reset_with_metadata` + interceptor in new development instead of the `post_reset` interceptor. + When both interceptors are used, this `post_reset_with_metadata` interceptor runs after the + `post_reset` interceptor. The (possibly modified) response returned by + `post_reset` will be passed to + `post_reset_with_metadata`. + """ + return response, metadata + def pre_resume( self, request: compute.ResumeInstanceRequest, @@ -974,12 +1462,35 @@ def pre_resume( def post_resume(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resume - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resume_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_resume` interceptor runs + before the `post_resume_with_metadata` interceptor. """ return response + def post_resume_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resume + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_resume_with_metadata` + interceptor in new development instead of the `post_resume` interceptor. + When both interceptors are used, this `post_resume_with_metadata` interceptor runs after the + `post_resume` interceptor. The (possibly modified) response returned by + `post_resume` will be passed to + `post_resume_with_metadata`. + """ + return response, metadata + def pre_send_diagnostic_interrupt( self, request: compute.SendDiagnosticInterruptInstanceRequest, @@ -1000,12 +1511,38 @@ def post_send_diagnostic_interrupt( ) -> compute.SendDiagnosticInterruptInstanceResponse: """Post-rpc interceptor for send_diagnostic_interrupt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_send_diagnostic_interrupt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_send_diagnostic_interrupt` interceptor runs + before the `post_send_diagnostic_interrupt_with_metadata` interceptor. """ return response + def post_send_diagnostic_interrupt_with_metadata( + self, + response: compute.SendDiagnosticInterruptInstanceResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SendDiagnosticInterruptInstanceResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for send_diagnostic_interrupt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_send_diagnostic_interrupt_with_metadata` + interceptor in new development instead of the `post_send_diagnostic_interrupt` interceptor. + When both interceptors are used, this `post_send_diagnostic_interrupt_with_metadata` interceptor runs after the + `post_send_diagnostic_interrupt` interceptor. The (possibly modified) response returned by + `post_send_diagnostic_interrupt` will be passed to + `post_send_diagnostic_interrupt_with_metadata`. + """ + return response, metadata + def pre_set_deletion_protection( self, request: compute.SetDeletionProtectionInstanceRequest, @@ -1026,12 +1563,35 @@ def post_set_deletion_protection( ) -> compute.Operation: """Post-rpc interceptor for set_deletion_protection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_deletion_protection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_deletion_protection` interceptor runs + before the `post_set_deletion_protection_with_metadata` interceptor. """ return response + def post_set_deletion_protection_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_deletion_protection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_deletion_protection_with_metadata` + interceptor in new development instead of the `post_set_deletion_protection` interceptor. + When both interceptors are used, this `post_set_deletion_protection_with_metadata` interceptor runs after the + `post_set_deletion_protection` interceptor. The (possibly modified) response returned by + `post_set_deletion_protection` will be passed to + `post_set_deletion_protection_with_metadata`. + """ + return response, metadata + def pre_set_disk_auto_delete( self, request: compute.SetDiskAutoDeleteInstanceRequest, @@ -1052,12 +1612,35 @@ def post_set_disk_auto_delete( ) -> compute.Operation: """Post-rpc interceptor for set_disk_auto_delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_disk_auto_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_disk_auto_delete` interceptor runs + before the `post_set_disk_auto_delete_with_metadata` interceptor. """ return response + def post_set_disk_auto_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_disk_auto_delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_disk_auto_delete_with_metadata` + interceptor in new development instead of the `post_set_disk_auto_delete` interceptor. + When both interceptors are used, this `post_set_disk_auto_delete_with_metadata` interceptor runs after the + `post_set_disk_auto_delete` interceptor. The (possibly modified) response returned by + `post_set_disk_auto_delete` will be passed to + `post_set_disk_auto_delete_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyInstanceRequest, @@ -1075,12 +1658,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsInstanceRequest, @@ -1098,12 +1704,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_set_machine_resources( self, request: compute.SetMachineResourcesInstanceRequest, @@ -1124,12 +1753,35 @@ def post_set_machine_resources( ) -> compute.Operation: """Post-rpc interceptor for set_machine_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_machine_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_machine_resources` interceptor runs + before the `post_set_machine_resources_with_metadata` interceptor. """ return response + def post_set_machine_resources_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_machine_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_machine_resources_with_metadata` + interceptor in new development instead of the `post_set_machine_resources` interceptor. + When both interceptors are used, this `post_set_machine_resources_with_metadata` interceptor runs after the + `post_set_machine_resources` interceptor. The (possibly modified) response returned by + `post_set_machine_resources` will be passed to + `post_set_machine_resources_with_metadata`. + """ + return response, metadata + def pre_set_machine_type( self, request: compute.SetMachineTypeInstanceRequest, @@ -1147,12 +1799,35 @@ def pre_set_machine_type( def post_set_machine_type(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_machine_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_machine_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_machine_type` interceptor runs + before the `post_set_machine_type_with_metadata` interceptor. """ return response + def post_set_machine_type_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_machine_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_machine_type_with_metadata` + interceptor in new development instead of the `post_set_machine_type` interceptor. + When both interceptors are used, this `post_set_machine_type_with_metadata` interceptor runs after the + `post_set_machine_type` interceptor. The (possibly modified) response returned by + `post_set_machine_type` will be passed to + `post_set_machine_type_with_metadata`. + """ + return response, metadata + def pre_set_metadata( self, request: compute.SetMetadataInstanceRequest, @@ -1170,12 +1845,35 @@ def pre_set_metadata( def post_set_metadata(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_metadata` interceptor runs + before the `post_set_metadata_with_metadata` interceptor. """ return response + def post_set_metadata_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_metadata_with_metadata` + interceptor in new development instead of the `post_set_metadata` interceptor. + When both interceptors are used, this `post_set_metadata_with_metadata` interceptor runs after the + `post_set_metadata` interceptor. The (possibly modified) response returned by + `post_set_metadata` will be passed to + `post_set_metadata_with_metadata`. + """ + return response, metadata + def pre_set_min_cpu_platform( self, request: compute.SetMinCpuPlatformInstanceRequest, @@ -1196,12 +1894,35 @@ def post_set_min_cpu_platform( ) -> compute.Operation: """Post-rpc interceptor for set_min_cpu_platform - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_min_cpu_platform_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_min_cpu_platform` interceptor runs + before the `post_set_min_cpu_platform_with_metadata` interceptor. """ return response + def post_set_min_cpu_platform_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_min_cpu_platform + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_min_cpu_platform_with_metadata` + interceptor in new development instead of the `post_set_min_cpu_platform` interceptor. + When both interceptors are used, this `post_set_min_cpu_platform_with_metadata` interceptor runs after the + `post_set_min_cpu_platform` interceptor. The (possibly modified) response returned by + `post_set_min_cpu_platform` will be passed to + `post_set_min_cpu_platform_with_metadata`. + """ + return response, metadata + def pre_set_name( self, request: compute.SetNameInstanceRequest, @@ -1217,12 +1938,35 @@ def pre_set_name( def post_set_name(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_name - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_name_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_name` interceptor runs + before the `post_set_name_with_metadata` interceptor. """ return response + def post_set_name_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_name + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_name_with_metadata` + interceptor in new development instead of the `post_set_name` interceptor. + When both interceptors are used, this `post_set_name_with_metadata` interceptor runs after the + `post_set_name` interceptor. The (possibly modified) response returned by + `post_set_name` will be passed to + `post_set_name_with_metadata`. + """ + return response, metadata + def pre_set_scheduling( self, request: compute.SetSchedulingInstanceRequest, @@ -1240,12 +1984,35 @@ def pre_set_scheduling( def post_set_scheduling(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_scheduling - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_scheduling_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_scheduling` interceptor runs + before the `post_set_scheduling_with_metadata` interceptor. """ return response + def post_set_scheduling_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_scheduling + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_scheduling_with_metadata` + interceptor in new development instead of the `post_set_scheduling` interceptor. + When both interceptors are used, this `post_set_scheduling_with_metadata` interceptor runs after the + `post_set_scheduling` interceptor. The (possibly modified) response returned by + `post_set_scheduling` will be passed to + `post_set_scheduling_with_metadata`. + """ + return response, metadata + def pre_set_security_policy( self, request: compute.SetSecurityPolicyInstanceRequest, @@ -1266,12 +2033,35 @@ def post_set_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_security_policy` interceptor runs + before the `post_set_security_policy_with_metadata` interceptor. """ return response + def post_set_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_security_policy_with_metadata` + interceptor in new development instead of the `post_set_security_policy` interceptor. + When both interceptors are used, this `post_set_security_policy_with_metadata` interceptor runs after the + `post_set_security_policy` interceptor. The (possibly modified) response returned by + `post_set_security_policy` will be passed to + `post_set_security_policy_with_metadata`. + """ + return response, metadata + def pre_set_service_account( self, request: compute.SetServiceAccountInstanceRequest, @@ -1292,12 +2082,35 @@ def post_set_service_account( ) -> compute.Operation: """Post-rpc interceptor for set_service_account - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_service_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_service_account` interceptor runs + before the `post_set_service_account_with_metadata` interceptor. """ return response + def post_set_service_account_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_service_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_service_account_with_metadata` + interceptor in new development instead of the `post_set_service_account` interceptor. + When both interceptors are used, this `post_set_service_account_with_metadata` interceptor runs after the + `post_set_service_account` interceptor. The (possibly modified) response returned by + `post_set_service_account` will be passed to + `post_set_service_account_with_metadata`. + """ + return response, metadata + def pre_set_shielded_instance_integrity_policy( self, request: compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, @@ -1318,12 +2131,35 @@ def post_set_shielded_instance_integrity_policy( ) -> compute.Operation: """Post-rpc interceptor for set_shielded_instance_integrity_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_shielded_instance_integrity_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_shielded_instance_integrity_policy` interceptor runs + before the `post_set_shielded_instance_integrity_policy_with_metadata` interceptor. """ return response + def post_set_shielded_instance_integrity_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_shielded_instance_integrity_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_shielded_instance_integrity_policy_with_metadata` + interceptor in new development instead of the `post_set_shielded_instance_integrity_policy` interceptor. + When both interceptors are used, this `post_set_shielded_instance_integrity_policy_with_metadata` interceptor runs after the + `post_set_shielded_instance_integrity_policy` interceptor. The (possibly modified) response returned by + `post_set_shielded_instance_integrity_policy` will be passed to + `post_set_shielded_instance_integrity_policy_with_metadata`. + """ + return response, metadata + def pre_set_tags( self, request: compute.SetTagsInstanceRequest, @@ -1339,12 +2175,35 @@ def pre_set_tags( def post_set_tags(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_tags - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_tags_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_set_tags` interceptor runs + before the `post_set_tags_with_metadata` interceptor. """ return response + def post_set_tags_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_tags + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_set_tags_with_metadata` + interceptor in new development instead of the `post_set_tags` interceptor. + When both interceptors are used, this `post_set_tags_with_metadata` interceptor runs after the + `post_set_tags` interceptor. The (possibly modified) response returned by + `post_set_tags` will be passed to + `post_set_tags_with_metadata`. + """ + return response, metadata + def pre_simulate_maintenance_event( self, request: compute.SimulateMaintenanceEventInstanceRequest, @@ -1365,12 +2224,35 @@ def post_simulate_maintenance_event( ) -> compute.Operation: """Post-rpc interceptor for simulate_maintenance_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_simulate_maintenance_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_simulate_maintenance_event` interceptor runs + before the `post_simulate_maintenance_event_with_metadata` interceptor. """ return response + def post_simulate_maintenance_event_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for simulate_maintenance_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_simulate_maintenance_event_with_metadata` + interceptor in new development instead of the `post_simulate_maintenance_event` interceptor. + When both interceptors are used, this `post_simulate_maintenance_event_with_metadata` interceptor runs after the + `post_simulate_maintenance_event` interceptor. The (possibly modified) response returned by + `post_simulate_maintenance_event` will be passed to + `post_simulate_maintenance_event_with_metadata`. + """ + return response, metadata + def pre_start( self, request: compute.StartInstanceRequest, @@ -1386,12 +2268,35 @@ def pre_start( def post_start(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for start - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_start` interceptor runs + before the `post_start_with_metadata` interceptor. """ return response + def post_start_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_start_with_metadata` + interceptor in new development instead of the `post_start` interceptor. + When both interceptors are used, this `post_start_with_metadata` interceptor runs after the + `post_start` interceptor. The (possibly modified) response returned by + `post_start` will be passed to + `post_start_with_metadata`. + """ + return response, metadata + def pre_start_with_encryption_key( self, request: compute.StartWithEncryptionKeyInstanceRequest, @@ -1412,12 +2317,35 @@ def post_start_with_encryption_key( ) -> compute.Operation: """Post-rpc interceptor for start_with_encryption_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_with_encryption_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_start_with_encryption_key` interceptor runs + before the `post_start_with_encryption_key_with_metadata` interceptor. """ return response + def post_start_with_encryption_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_with_encryption_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_start_with_encryption_key_with_metadata` + interceptor in new development instead of the `post_start_with_encryption_key` interceptor. + When both interceptors are used, this `post_start_with_encryption_key_with_metadata` interceptor runs after the + `post_start_with_encryption_key` interceptor. The (possibly modified) response returned by + `post_start_with_encryption_key` will be passed to + `post_start_with_encryption_key_with_metadata`. + """ + return response, metadata + def pre_stop( self, request: compute.StopInstanceRequest, @@ -1433,12 +2361,35 @@ def pre_stop( def post_stop(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for stop - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_stop` interceptor runs + before the `post_stop_with_metadata` interceptor. """ return response + def post_stop_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_stop_with_metadata` + interceptor in new development instead of the `post_stop` interceptor. + When both interceptors are used, this `post_stop_with_metadata` interceptor runs after the + `post_stop` interceptor. The (possibly modified) response returned by + `post_stop` will be passed to + `post_stop_with_metadata`. + """ + return response, metadata + def pre_suspend( self, request: compute.SuspendInstanceRequest, @@ -1454,12 +2405,35 @@ def pre_suspend( def post_suspend(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for suspend - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suspend_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_suspend` interceptor runs + before the `post_suspend_with_metadata` interceptor. """ return response + def post_suspend_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for suspend + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_suspend_with_metadata` + interceptor in new development instead of the `post_suspend` interceptor. + When both interceptors are used, this `post_suspend_with_metadata` interceptor runs after the + `post_suspend` interceptor. The (possibly modified) response returned by + `post_suspend` will be passed to + `post_suspend_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsInstanceRequest, @@ -1480,12 +2454,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateInstanceRequest, @@ -1501,12 +2500,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + def pre_update_access_config( self, request: compute.UpdateAccessConfigInstanceRequest, @@ -1527,12 +2549,35 @@ def post_update_access_config( ) -> compute.Operation: """Post-rpc interceptor for update_access_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_access_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_update_access_config` interceptor runs + before the `post_update_access_config_with_metadata` interceptor. """ return response + def post_update_access_config_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_access_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_update_access_config_with_metadata` + interceptor in new development instead of the `post_update_access_config` interceptor. + When both interceptors are used, this `post_update_access_config_with_metadata` interceptor runs after the + `post_update_access_config` interceptor. The (possibly modified) response returned by + `post_update_access_config` will be passed to + `post_update_access_config_with_metadata`. + """ + return response, metadata + def pre_update_display_device( self, request: compute.UpdateDisplayDeviceInstanceRequest, @@ -1553,12 +2598,35 @@ def post_update_display_device( ) -> compute.Operation: """Post-rpc interceptor for update_display_device - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_display_device_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_update_display_device` interceptor runs + before the `post_update_display_device_with_metadata` interceptor. """ return response + def post_update_display_device_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_display_device + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_update_display_device_with_metadata` + interceptor in new development instead of the `post_update_display_device` interceptor. + When both interceptors are used, this `post_update_display_device_with_metadata` interceptor runs after the + `post_update_display_device` interceptor. The (possibly modified) response returned by + `post_update_display_device` will be passed to + `post_update_display_device_with_metadata`. + """ + return response, metadata + def pre_update_network_interface( self, request: compute.UpdateNetworkInterfaceInstanceRequest, @@ -1579,12 +2647,35 @@ def post_update_network_interface( ) -> compute.Operation: """Post-rpc interceptor for update_network_interface - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_network_interface_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_update_network_interface` interceptor runs + before the `post_update_network_interface_with_metadata` interceptor. """ return response + def post_update_network_interface_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_network_interface + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_update_network_interface_with_metadata` + interceptor in new development instead of the `post_update_network_interface` interceptor. + When both interceptors are used, this `post_update_network_interface_with_metadata` interceptor runs after the + `post_update_network_interface` interceptor. The (possibly modified) response returned by + `post_update_network_interface` will be passed to + `post_update_network_interface_with_metadata`. + """ + return response, metadata + def pre_update_shielded_instance_config( self, request: compute.UpdateShieldedInstanceConfigInstanceRequest, @@ -1605,12 +2696,35 @@ def post_update_shielded_instance_config( ) -> compute.Operation: """Post-rpc interceptor for update_shielded_instance_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_shielded_instance_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Instances server but before - it is returned to user code. + it is returned to user code. This `post_update_shielded_instance_config` interceptor runs + before the `post_update_shielded_instance_config_with_metadata` interceptor. """ return response + def post_update_shielded_instance_config_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_shielded_instance_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Instances server but before it is returned to user code. + + We recommend only using this `post_update_shielded_instance_config_with_metadata` + interceptor in new development instead of the `post_update_shielded_instance_config` interceptor. + When both interceptors are used, this `post_update_shielded_instance_config_with_metadata` interceptor runs after the + `post_update_shielded_instance_config` interceptor. The (possibly modified) response returned by + `post_update_shielded_instance_config` will be passed to + `post_update_shielded_instance_config_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstancesRestStub: @@ -1850,6 +2964,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_access_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_access_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2017,6 +3135,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_resource_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_resource_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2163,6 +3285,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2330,6 +3456,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_attach_disk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_attach_disk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2497,6 +3627,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2656,6 +3790,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2817,6 +3955,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_access_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_access_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2978,6 +4120,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detach_disk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_disk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3122,6 +4268,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3266,6 +4414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_effective_firewalls(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_effective_firewalls_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3412,6 +4564,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_guest_attributes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_guest_attributes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3581,6 +4737,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3727,6 +4887,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_screenshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_screenshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3871,6 +5035,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_serial_port_output(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_serial_port_output_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4018,6 +5186,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_shielded_instance_identity(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_shielded_instance_identity_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4185,6 +5360,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4324,6 +5503,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4472,6 +5653,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_referrers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_referrers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4633,6 +5818,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_perform_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_perform_maintenance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4800,6 +5989,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_resource_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_resource_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4959,6 +6152,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_reset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5118,6 +6315,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resume(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resume_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5265,6 +6466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_send_diagnostic_interrupt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_send_diagnostic_interrupt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5430,6 +6635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_deletion_protection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_deletion_protection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5591,6 +6800,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_disk_auto_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_disk_auto_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5766,6 +6979,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5933,6 +7150,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6100,6 +7321,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_machine_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_machine_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6273,6 +7498,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_machine_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_machine_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6440,6 +7669,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6607,6 +7840,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_min_cpu_platform(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_min_cpu_platform_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6772,6 +8009,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_name(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_name_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6943,6 +8184,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_scheduling(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_scheduling_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7110,6 +8355,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7277,6 +8526,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_service_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_service_account_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7449,6 +8702,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_shielded_instance_integrity_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_shielded_instance_integrity_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7614,6 +8874,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_tags(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_tags_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7776,6 +9040,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_simulate_maintenance_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_simulate_maintenance_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7935,6 +9203,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8102,6 +9374,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_with_encryption_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_with_encryption_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8258,6 +9534,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8417,6 +9695,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suspend(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suspend_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8567,6 +9849,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8732,6 +10018,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8899,6 +10189,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_access_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_access_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9066,6 +10360,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_display_device(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_display_device_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9233,6 +10531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_network_interface(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_network_interface_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9403,6 +10705,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_shielded_instance_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_shielded_instance_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py index f5d8ed835d02..66181f88e686 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/transports/rest.py index d2cce9d83f8d..8682fd2090af 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/transports/rest.py @@ -166,12 +166,37 @@ def post_aggregated_list( ) -> compute.InstantSnapshotAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.InstantSnapshotAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstantSnapshotAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInstantSnapshotRequest, @@ -189,12 +214,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInstantSnapshotRequest, @@ -212,12 +260,35 @@ def pre_get( def post_get(self, response: compute.InstantSnapshot) -> compute.InstantSnapshot: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstantSnapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstantSnapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyInstantSnapshotRequest, @@ -236,12 +307,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInstantSnapshotRequest, @@ -259,12 +353,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInstantSnapshotsRequest, @@ -284,12 +401,35 @@ def post_list( ) -> compute.InstantSnapshotList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstantSnapshotList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstantSnapshotList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyInstantSnapshotRequest, @@ -308,12 +448,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsInstantSnapshotRequest, @@ -331,12 +494,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsInstantSnapshotRequest, @@ -357,12 +543,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InstantSnapshotsRestStub: @@ -573,6 +784,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -738,6 +953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -885,6 +1104,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1050,6 +1271,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1221,6 +1446,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1367,6 +1596,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1538,6 +1769,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1703,6 +1938,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1854,6 +2093,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py index d9a865e9f15a..6a00def9fdf9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py index d34a2f82d66b..2d9e1a49a4a8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py @@ -150,12 +150,38 @@ def post_aggregated_list( ) -> compute.InterconnectAttachmentAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.InterconnectAttachmentAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectAttachmentAggregatedList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteInterconnectAttachmentRequest, @@ -174,12 +200,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInterconnectAttachmentRequest, @@ -200,12 +249,35 @@ def post_get( ) -> compute.InterconnectAttachment: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InterconnectAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InterconnectAttachment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInterconnectAttachmentRequest, @@ -224,12 +296,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInterconnectAttachmentsRequest, @@ -250,12 +345,37 @@ def post_list( ) -> compute.InterconnectAttachmentList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InterconnectAttachmentList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectAttachmentList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchInterconnectAttachmentRequest, @@ -274,12 +394,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsInterconnectAttachmentRequest, @@ -298,12 +441,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectAttachments server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectAttachments server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InterconnectAttachmentsRestStub: @@ -517,6 +683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -679,6 +849,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -830,6 +1004,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -996,6 +1172,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1142,6 +1322,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1310,6 +1492,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1476,6 +1662,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py index eac11cd73c81..3b837cf5c65c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -460,6 +462,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py index f013556c5b60..64d18dc3a89e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py @@ -109,12 +109,35 @@ def post_get( ) -> compute.InterconnectLocation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectLocations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InterconnectLocation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InterconnectLocation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectLocations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInterconnectLocationsRequest, @@ -135,12 +158,37 @@ def post_list( ) -> compute.InterconnectLocationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectLocations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InterconnectLocationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectLocationList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectLocations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InterconnectLocationsRestStub: @@ -360,6 +408,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -505,6 +555,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py index ff52cd42d4db..7257931a3c45 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -462,6 +464,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py index ae52a5a8d009..068b9e455b3a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/transports/rest.py @@ -110,12 +110,37 @@ def post_get( ) -> compute.InterconnectRemoteLocation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectRemoteLocations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InterconnectRemoteLocation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectRemoteLocation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectRemoteLocations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInterconnectRemoteLocationsRequest, @@ -136,12 +161,37 @@ def post_list( ) -> compute.InterconnectRemoteLocationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the InterconnectRemoteLocations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InterconnectRemoteLocationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectRemoteLocationList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InterconnectRemoteLocations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InterconnectRemoteLocationsRestStub: @@ -360,6 +410,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -508,6 +560,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py index c6903a51675e..8ee45d6125b5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py index 9e6c92da9135..48f3dcd3f57c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/transports/rest.py @@ -155,12 +155,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetInterconnectRequest, @@ -176,12 +199,35 @@ def pre_get( def post_get(self, response: compute.Interconnect) -> compute.Interconnect: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Interconnect, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Interconnect, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_diagnostics( self, request: compute.GetDiagnosticsInterconnectRequest, @@ -202,12 +248,38 @@ def post_get_diagnostics( ) -> compute.InterconnectsGetDiagnosticsResponse: """Post-rpc interceptor for get_diagnostics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_diagnostics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_get_diagnostics` interceptor runs + before the `post_get_diagnostics_with_metadata` interceptor. """ return response + def post_get_diagnostics_with_metadata( + self, + response: compute.InterconnectsGetDiagnosticsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectsGetDiagnosticsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_diagnostics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_get_diagnostics_with_metadata` + interceptor in new development instead of the `post_get_diagnostics` interceptor. + When both interceptors are used, this `post_get_diagnostics_with_metadata` interceptor runs after the + `post_get_diagnostics` interceptor. The (possibly modified) response returned by + `post_get_diagnostics` will be passed to + `post_get_diagnostics_with_metadata`. + """ + return response, metadata + def pre_get_macsec_config( self, request: compute.GetMacsecConfigInterconnectRequest, @@ -228,12 +300,38 @@ def post_get_macsec_config( ) -> compute.InterconnectsGetMacsecConfigResponse: """Post-rpc interceptor for get_macsec_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_macsec_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_get_macsec_config` interceptor runs + before the `post_get_macsec_config_with_metadata` interceptor. """ return response + def post_get_macsec_config_with_metadata( + self, + response: compute.InterconnectsGetMacsecConfigResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InterconnectsGetMacsecConfigResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_macsec_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_get_macsec_config_with_metadata` + interceptor in new development instead of the `post_get_macsec_config` interceptor. + When both interceptors are used, this `post_get_macsec_config_with_metadata` interceptor runs after the + `post_get_macsec_config` interceptor. The (possibly modified) response returned by + `post_get_macsec_config` will be passed to + `post_get_macsec_config_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertInterconnectRequest, @@ -251,12 +349,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListInterconnectsRequest, @@ -274,12 +395,35 @@ def pre_list( def post_list(self, response: compute.InterconnectList) -> compute.InterconnectList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InterconnectList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InterconnectList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchInterconnectRequest, @@ -297,12 +441,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsInterconnectRequest, @@ -320,12 +487,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Interconnects server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Interconnects server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class InterconnectsRestStub: @@ -555,6 +745,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -703,6 +897,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -847,6 +1043,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_diagnostics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_diagnostics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -995,6 +1195,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_macsec_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_macsec_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1164,6 +1368,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1308,6 +1516,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1475,6 +1685,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1646,6 +1860,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py index 596f8823fbef..f4e152e434c6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -455,6 +457,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/transports/rest.py index 61a092695a7c..bf09ac178ed5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/transports/rest.py @@ -105,12 +105,35 @@ def pre_get( def post_get(self, response: compute.LicenseCode) -> compute.LicenseCode: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseCodes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.LicenseCode, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.LicenseCode, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseCodes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsLicenseCodeRequest, @@ -131,12 +154,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LicenseCodes server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LicenseCodes server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class LicenseCodesRestStub: @@ -352,6 +400,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -502,6 +552,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py index 726d920c3163..f4d99b451603 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py index c17b9f1fc1c2..d1806a48ced5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/transports/rest.py @@ -145,12 +145,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetLicenseRequest, @@ -166,12 +189,35 @@ def pre_get( def post_get(self, response: compute.License) -> compute.License: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.License, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.License, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyLicenseRequest, @@ -189,12 +235,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertLicenseRequest, @@ -210,12 +279,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListLicensesRequest, @@ -233,12 +325,35 @@ def post_list( ) -> compute.LicensesListResponse: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.LicensesListResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.LicensesListResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyLicenseRequest, @@ -256,12 +371,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsLicenseRequest, @@ -282,12 +420,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Licenses server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Licenses server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class LicensesRestStub: @@ -515,6 +678,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -659,6 +826,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -826,6 +995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -991,6 +1164,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1130,6 +1307,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1303,6 +1482,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1453,6 +1636,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py index 238bc73d5002..13d23b5ba7c8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py index 104e9f290c2f..62e9f64bc06e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/transports/rest.py @@ -147,12 +147,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetMachineImageRequest, @@ -168,12 +191,35 @@ def pre_get( def post_get(self, response: compute.MachineImage) -> compute.MachineImage: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.MachineImage, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.MachineImage, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyMachineImageRequest, @@ -191,12 +237,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertMachineImageRequest, @@ -214,12 +283,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListMachineImagesRequest, @@ -237,12 +329,35 @@ def pre_list( def post_list(self, response: compute.MachineImageList) -> compute.MachineImageList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.MachineImageList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.MachineImageList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyMachineImageRequest, @@ -260,12 +375,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsMachineImageRequest, @@ -286,12 +424,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineImages server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineImages server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MachineImagesRestStub: @@ -521,6 +684,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -671,6 +838,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -836,6 +1005,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1003,6 +1176,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1145,6 +1322,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1316,6 +1495,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1466,6 +1649,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py index db82778e69d7..8c74e620f6fd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -456,6 +458,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/transports/rest.py index c344ffdce171..e53284abeb52 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/transports/rest.py @@ -118,12 +118,37 @@ def post_aggregated_list( ) -> compute.MachineTypeAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineTypes server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.MachineTypeAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.MachineTypeAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineTypes server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetMachineTypeRequest, @@ -139,12 +164,35 @@ def pre_get( def post_get(self, response: compute.MachineType) -> compute.MachineType: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineTypes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.MachineType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.MachineType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineTypes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListMachineTypesRequest, @@ -162,12 +210,35 @@ def pre_list( def post_list(self, response: compute.MachineTypeList) -> compute.MachineTypeList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MachineTypes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.MachineTypeList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.MachineTypeList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MachineTypes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MachineTypesRestStub: @@ -378,6 +449,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -527,6 +602,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -669,6 +746,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py index 7916d20e1543..eaff86efa694 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py index 188a56103cd6..77f2e2cdef58 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/transports/rest.py @@ -166,12 +166,37 @@ def post_aggregated_list( ) -> compute.NetworkAttachmentAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NetworkAttachmentAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkAttachmentAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNetworkAttachmentRequest, @@ -189,12 +214,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNetworkAttachmentRequest, @@ -214,12 +262,35 @@ def post_get( ) -> compute.NetworkAttachment: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NetworkAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkAttachment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyNetworkAttachmentRequest, @@ -238,12 +309,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNetworkAttachmentRequest, @@ -261,12 +355,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNetworkAttachmentsRequest, @@ -286,12 +403,35 @@ def post_list( ) -> compute.NetworkAttachmentList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NetworkAttachmentList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkAttachmentList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchNetworkAttachmentRequest, @@ -309,12 +449,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyNetworkAttachmentRequest, @@ -333,12 +496,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsNetworkAttachmentRequest, @@ -359,12 +545,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkAttachments server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkAttachments server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NetworkAttachmentsRestStub: @@ -578,6 +789,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -741,6 +956,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -889,6 +1108,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1055,6 +1276,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1224,6 +1449,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1370,6 +1599,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1541,6 +1772,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1713,6 +1948,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1866,6 +2105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py index fdcf09a73eb2..7016da9453c0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py index c2a28f890fb2..b0cc76feef10 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/transports/rest.py @@ -134,12 +134,38 @@ def post_aggregated_list( ) -> compute.NetworkEdgeSecurityServiceAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEdgeSecurityServices server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NetworkEdgeSecurityServiceAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEdgeSecurityServiceAggregatedList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEdgeSecurityServices server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNetworkEdgeSecurityServiceRequest, @@ -158,12 +184,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEdgeSecurityServices server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEdgeSecurityServices server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNetworkEdgeSecurityServiceRequest, @@ -184,12 +233,37 @@ def post_get( ) -> compute.NetworkEdgeSecurityService: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEdgeSecurityServices server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NetworkEdgeSecurityService, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEdgeSecurityService, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEdgeSecurityServices server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNetworkEdgeSecurityServiceRequest, @@ -208,12 +282,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEdgeSecurityServices server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEdgeSecurityServices server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchNetworkEdgeSecurityServiceRequest, @@ -232,12 +329,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEdgeSecurityServices server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEdgeSecurityServices server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NetworkEdgeSecurityServicesRestStub: @@ -453,6 +573,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -617,6 +741,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -762,6 +890,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -930,6 +1060,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1096,6 +1230,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py index 0d91efec9d6a..c6383d546885 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py index 1f660c604392..0f9fb6c7fdfe 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py @@ -166,12 +166,38 @@ def post_aggregated_list( ) -> compute.NetworkEndpointGroupAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NetworkEndpointGroupAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupAggregatedList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_attach_network_endpoints( self, request: compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, @@ -192,12 +218,35 @@ def post_attach_network_endpoints( ) -> compute.Operation: """Post-rpc interceptor for attach_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_attach_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_attach_network_endpoints` interceptor runs + before the `post_attach_network_endpoints_with_metadata` interceptor. """ return response + def post_attach_network_endpoints_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for attach_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_attach_network_endpoints_with_metadata` + interceptor in new development instead of the `post_attach_network_endpoints` interceptor. + When both interceptors are used, this `post_attach_network_endpoints_with_metadata` interceptor runs after the + `post_attach_network_endpoints` interceptor. The (possibly modified) response returned by + `post_attach_network_endpoints` will be passed to + `post_attach_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNetworkEndpointGroupRequest, @@ -216,12 +265,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_detach_network_endpoints( self, request: compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, @@ -242,12 +314,35 @@ def post_detach_network_endpoints( ) -> compute.Operation: """Post-rpc interceptor for detach_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_detach_network_endpoints` interceptor runs + before the `post_detach_network_endpoints_with_metadata` interceptor. """ return response + def post_detach_network_endpoints_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detach_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_detach_network_endpoints_with_metadata` + interceptor in new development instead of the `post_detach_network_endpoints` interceptor. + When both interceptors are used, this `post_detach_network_endpoints_with_metadata` interceptor runs after the + `post_detach_network_endpoints` interceptor. The (possibly modified) response returned by + `post_detach_network_endpoints` will be passed to + `post_detach_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNetworkEndpointGroupRequest, @@ -267,12 +362,35 @@ def post_get( ) -> compute.NetworkEndpointGroup: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NetworkEndpointGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkEndpointGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNetworkEndpointGroupRequest, @@ -291,12 +409,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNetworkEndpointGroupsRequest, @@ -317,12 +458,37 @@ def post_list( ) -> compute.NetworkEndpointGroupList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NetworkEndpointGroupList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_network_endpoints( self, request: compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, @@ -343,12 +509,38 @@ def post_list_network_endpoints( ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: """Post-rpc interceptor for list_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_network_endpoints` interceptor runs + before the `post_list_network_endpoints_with_metadata` interceptor. """ return response + def post_list_network_endpoints_with_metadata( + self, + response: compute.NetworkEndpointGroupsListNetworkEndpoints, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupsListNetworkEndpoints, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_list_network_endpoints_with_metadata` + interceptor in new development instead of the `post_list_network_endpoints` interceptor. + When both interceptors are used, this `post_list_network_endpoints_with_metadata` interceptor runs after the + `post_list_network_endpoints` interceptor. The (possibly modified) response returned by + `post_list_network_endpoints` will be passed to + `post_list_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsNetworkEndpointGroupRequest, @@ -369,12 +561,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NetworkEndpointGroupsRestStub: @@ -586,6 +803,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -756,6 +977,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_attach_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_attach_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -916,6 +1141,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1084,6 +1313,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detach_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1236,6 +1469,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1402,6 +1637,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1544,6 +1783,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1699,6 +1940,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1856,6 +2101,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py index 86042cde4d69..121fb8fcb00b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1255,6 +1284,131 @@ def error_code(self): # Done; return the response. return response + def aggregated_list( + self, + request: Optional[ + Union[compute.AggregatedListNetworkFirewallPoliciesRequest, dict] + ] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of network firewall policies, + listing network firewall policies from all applicable scopes + (global and regional) and grouping the results per scope. To + prevent failure, Google recommends that you set the + ``returnPartialSuccess`` parameter to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkFirewallPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListNetworkFirewallPoliciesRequest, dict]): + The request object. A request message for + NetworkFirewallPolicies.AggregatedList. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.network_firewall_policies.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.AggregatedListNetworkFirewallPoliciesRequest + ): + request = compute.AggregatedListNetworkFirewallPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def clone_rules_unary( self, request: Optional[ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/pagers.py index 5e1a7afcaeeb..b66397a23560 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/pagers.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/pagers.py @@ -41,6 +41,85 @@ from google.cloud.compute_v1.types import compute +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.NetworkFirewallPolicyAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.NetworkFirewallPolicyAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.NetworkFirewallPolicyAggregatedList], + request: compute.AggregatedListNetworkFirewallPoliciesRequest, + response: compute.NetworkFirewallPolicyAggregatedList, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListNetworkFirewallPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.NetworkFirewallPolicyAggregatedList): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.AggregatedListNetworkFirewallPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.NetworkFirewallPolicyAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.FirewallPoliciesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.FirewallPoliciesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListPager: """A pager for iterating through ``list`` requests. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py index 369cd9dbabe5..43db934d1ab0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/base.py @@ -143,6 +143,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), self.clone_rules: gapic_v1.method.wrap_method( self.clone_rules, default_timeout=None, @@ -242,6 +247,18 @@ def add_rule( ]: raise NotImplementedError() + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListNetworkFirewallPoliciesRequest], + Union[ + compute.NetworkFirewallPolicyAggregatedList, + Awaitable[compute.NetworkFirewallPolicyAggregatedList], + ], + ]: + raise NotImplementedError() + @property def clone_rules( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py index 3f1974d73269..4f57cb9f55ff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest.py @@ -84,6 +84,14 @@ def post_add_rule(self, response): logging.log(f"Received response: {response}") return response + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + def pre_clone_rules(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -220,12 +228,35 @@ def pre_add_association( def post_add_association(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_association` interceptor runs + before the `post_add_association_with_metadata` interceptor. """ return response + def post_add_association_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_add_association_with_metadata` + interceptor in new development instead of the `post_add_association` interceptor. + When both interceptors are used, this `post_add_association_with_metadata` interceptor runs after the + `post_add_association` interceptor. The (possibly modified) response returned by + `post_add_association` will be passed to + `post_add_association_with_metadata`. + """ + return response, metadata + def pre_add_rule( self, request: compute.AddRuleNetworkFirewallPolicyRequest, @@ -244,12 +275,87 @@ def pre_add_rule( def post_add_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the NetworkFirewallPolicies server but before + it is returned to user code. This `post_add_rule` interceptor runs + before the `post_add_rule_with_metadata` interceptor. + """ + return response + + def post_add_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_add_rule_with_metadata` + interceptor in new development instead of the `post_add_rule` interceptor. + When both interceptors are used, this `post_add_rule_with_metadata` interceptor runs after the + `post_add_rule` interceptor. The (possibly modified) response returned by + `post_add_rule` will be passed to + `post_add_rule_with_metadata`. + """ + return response, metadata + + def pre_aggregated_list( + self, + request: compute.AggregatedListNetworkFirewallPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.AggregatedListNetworkFirewallPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the NetworkFirewallPolicies server. + """ + return request, metadata + + def post_aggregated_list( + self, response: compute.NetworkFirewallPolicyAggregatedList + ) -> compute.NetworkFirewallPolicyAggregatedList: + """Post-rpc interceptor for aggregated_list + + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NetworkFirewallPolicyAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkFirewallPolicyAggregatedList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_clone_rules( self, request: compute.CloneRulesNetworkFirewallPolicyRequest, @@ -268,12 +374,35 @@ def pre_clone_rules( def post_clone_rules(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for clone_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_clone_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_clone_rules` interceptor runs + before the `post_clone_rules_with_metadata` interceptor. """ return response + def post_clone_rules_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for clone_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_clone_rules_with_metadata` + interceptor in new development instead of the `post_clone_rules` interceptor. + When both interceptors are used, this `post_clone_rules_with_metadata` interceptor runs after the + `post_clone_rules` interceptor. The (possibly modified) response returned by + `post_clone_rules` will be passed to + `post_clone_rules_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNetworkFirewallPolicyRequest, @@ -292,12 +421,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNetworkFirewallPolicyRequest, @@ -315,12 +467,35 @@ def pre_get( def post_get(self, response: compute.FirewallPolicy) -> compute.FirewallPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.FirewallPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_association( self, request: compute.GetAssociationNetworkFirewallPolicyRequest, @@ -341,12 +516,37 @@ def post_get_association( ) -> compute.FirewallPolicyAssociation: """Post-rpc interceptor for get_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_association` interceptor runs + before the `post_get_association_with_metadata` interceptor. """ return response + def post_get_association_with_metadata( + self, + response: compute.FirewallPolicyAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.FirewallPolicyAssociation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_association_with_metadata` + interceptor in new development instead of the `post_get_association` interceptor. + When both interceptors are used, this `post_get_association_with_metadata` interceptor runs after the + `post_get_association` interceptor. The (possibly modified) response returned by + `post_get_association` will be passed to + `post_get_association_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyNetworkFirewallPolicyRequest, @@ -365,12 +565,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_rule( self, request: compute.GetRuleNetworkFirewallPolicyRequest, @@ -391,12 +614,35 @@ def post_get_rule( ) -> compute.FirewallPolicyRule: """Post-rpc interceptor for get_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_rule` interceptor runs + before the `post_get_rule_with_metadata` interceptor. """ return response + def post_get_rule_with_metadata( + self, + response: compute.FirewallPolicyRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicyRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_rule_with_metadata` + interceptor in new development instead of the `post_get_rule` interceptor. + When both interceptors are used, this `post_get_rule_with_metadata` interceptor runs after the + `post_get_rule` interceptor. The (possibly modified) response returned by + `post_get_rule` will be passed to + `post_get_rule_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNetworkFirewallPolicyRequest, @@ -415,12 +661,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNetworkFirewallPoliciesRequest, @@ -441,12 +710,35 @@ def post_list( ) -> compute.FirewallPolicyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.FirewallPolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchNetworkFirewallPolicyRequest, @@ -465,12 +757,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_rule( self, request: compute.PatchRuleNetworkFirewallPolicyRequest, @@ -489,12 +804,35 @@ def pre_patch_rule( def post_patch_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch_rule` interceptor runs + before the `post_patch_rule_with_metadata` interceptor. """ return response + def post_patch_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_rule_with_metadata` + interceptor in new development instead of the `post_patch_rule` interceptor. + When both interceptors are used, this `post_patch_rule_with_metadata` interceptor runs after the + `post_patch_rule` interceptor. The (possibly modified) response returned by + `post_patch_rule` will be passed to + `post_patch_rule_with_metadata`. + """ + return response, metadata + def pre_remove_association( self, request: compute.RemoveAssociationNetworkFirewallPolicyRequest, @@ -513,12 +851,35 @@ def pre_remove_association( def post_remove_association(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_association` interceptor runs + before the `post_remove_association_with_metadata` interceptor. """ return response + def post_remove_association_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_association_with_metadata` + interceptor in new development instead of the `post_remove_association` interceptor. + When both interceptors are used, this `post_remove_association_with_metadata` interceptor runs after the + `post_remove_association` interceptor. The (possibly modified) response returned by + `post_remove_association` will be passed to + `post_remove_association_with_metadata`. + """ + return response, metadata + def pre_remove_rule( self, request: compute.RemoveRuleNetworkFirewallPolicyRequest, @@ -537,12 +898,35 @@ def pre_remove_rule( def post_remove_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_rule` interceptor runs + before the `post_remove_rule_with_metadata` interceptor. """ return response + def post_remove_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_rule_with_metadata` + interceptor in new development instead of the `post_remove_rule` interceptor. + When both interceptors are used, this `post_remove_rule_with_metadata` interceptor runs after the + `post_remove_rule` interceptor. The (possibly modified) response returned by + `post_remove_rule` will be passed to + `post_remove_rule_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyNetworkFirewallPolicyRequest, @@ -561,12 +945,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsNetworkFirewallPolicyRequest, @@ -587,12 +994,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NetworkFirewallPoliciesRestStub: @@ -829,6 +1261,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -995,6 +1431,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1018,6 +1458,157 @@ def __call__( ) return resp + class _AggregatedList( + _BaseNetworkFirewallPoliciesRestTransport._BaseAggregatedList, + NetworkFirewallPoliciesRestStub, + ): + def __hash__(self): + return hash("NetworkFirewallPoliciesRestTransport.AggregatedList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.AggregatedListNetworkFirewallPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.NetworkFirewallPolicyAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListNetworkFirewallPoliciesRequest): + The request object. A request message for + NetworkFirewallPolicies.AggregatedList. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.NetworkFirewallPolicyAggregatedList: + + """ + + http_options = ( + _BaseNetworkFirewallPoliciesRestTransport._BaseAggregatedList._get_http_options() + ) + + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + transcoded_request = _BaseNetworkFirewallPoliciesRestTransport._BaseAggregatedList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseNetworkFirewallPoliciesRestTransport._BaseAggregatedList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.NetworkFirewallPoliciesClient.AggregatedList", + extra={ + "serviceName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "rpcName": "AggregatedList", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + NetworkFirewallPoliciesRestTransport._AggregatedList._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.NetworkFirewallPolicyAggregatedList() + pb_resp = compute.NetworkFirewallPolicyAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + compute.NetworkFirewallPolicyAggregatedList.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.NetworkFirewallPoliciesClient.aggregated_list", + extra={ + "serviceName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "rpcName": "AggregatedList", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _CloneRules( _BaseNetworkFirewallPoliciesRestTransport._BaseCloneRules, NetworkFirewallPoliciesRestStub, @@ -1155,6 +1746,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_clone_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_clone_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1315,6 +1910,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1460,6 +2059,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1605,6 +2206,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1773,6 +2378,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1920,6 +2529,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2086,6 +2699,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2229,6 +2846,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2395,6 +3014,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2561,6 +3184,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2725,6 +3352,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2885,6 +3516,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3057,6 +3692,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3210,6 +3849,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3251,6 +3894,17 @@ def add_rule( # In C++ this would require a dynamic_cast return self._AddRule(self._session, self._host, self._interceptor) # type: ignore + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListNetworkFirewallPoliciesRequest], + compute.NetworkFirewallPolicyAggregatedList, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + @property def clone_rules( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest_base.py index 12b8707fa93e..6f191e61a07c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/transports/rest_base.py @@ -199,6 +199,54 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseAggregatedList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/firewallPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.AggregatedListNetworkFirewallPoliciesRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseNetworkFirewallPoliciesRestTransport._BaseAggregatedList._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BaseCloneRules: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/client.py index e50c56271b10..7108726baab9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -458,6 +460,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/transports/rest.py index b1b3f27cf9d6..bd377f0a1543 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_profiles/transports/rest.py @@ -107,12 +107,35 @@ def pre_get( def post_get(self, response: compute.NetworkProfile) -> compute.NetworkProfile: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkProfiles server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NetworkProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkProfiles server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNetworkProfilesRequest, @@ -132,12 +155,37 @@ def post_list( ) -> compute.NetworkProfilesListResponse: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NetworkProfiles server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NetworkProfilesListResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkProfilesListResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NetworkProfiles server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NetworkProfilesRestStub: @@ -352,6 +400,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -496,6 +546,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py index ac1d43bd8c92..b41e24682ebb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py index db29bebbf255..a148446c9b53 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/transports/rest.py @@ -179,12 +179,35 @@ def pre_add_peering( def post_add_peering(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_peering - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_peering_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_add_peering` interceptor runs + before the `post_add_peering_with_metadata` interceptor. """ return response + def post_add_peering_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_peering + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_add_peering_with_metadata` + interceptor in new development instead of the `post_add_peering` interceptor. + When both interceptors are used, this `post_add_peering_with_metadata` interceptor runs after the + `post_add_peering` interceptor. The (possibly modified) response returned by + `post_add_peering` will be passed to + `post_add_peering_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNetworkRequest, @@ -200,12 +223,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNetworkRequest, @@ -221,12 +267,35 @@ def pre_get( def post_get(self, response: compute.Network) -> compute.Network: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Network, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Network, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_effective_firewalls( self, request: compute.GetEffectiveFirewallsNetworkRequest, @@ -247,12 +316,38 @@ def post_get_effective_firewalls( ) -> compute.NetworksGetEffectiveFirewallsResponse: """Post-rpc interceptor for get_effective_firewalls - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_effective_firewalls_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_get_effective_firewalls` interceptor runs + before the `post_get_effective_firewalls_with_metadata` interceptor. """ return response + def post_get_effective_firewalls_with_metadata( + self, + response: compute.NetworksGetEffectiveFirewallsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworksGetEffectiveFirewallsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_effective_firewalls + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_get_effective_firewalls_with_metadata` + interceptor in new development instead of the `post_get_effective_firewalls` interceptor. + When both interceptors are used, this `post_get_effective_firewalls_with_metadata` interceptor runs after the + `post_get_effective_firewalls` interceptor. The (possibly modified) response returned by + `post_get_effective_firewalls` will be passed to + `post_get_effective_firewalls_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNetworkRequest, @@ -268,12 +363,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNetworksRequest, @@ -289,12 +407,35 @@ def pre_list( def post_list(self, response: compute.NetworkList) -> compute.NetworkList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NetworkList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_peering_routes( self, request: compute.ListPeeringRoutesNetworksRequest, @@ -315,12 +456,37 @@ def post_list_peering_routes( ) -> compute.ExchangedPeeringRoutesList: """Post-rpc interceptor for list_peering_routes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_peering_routes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_list_peering_routes` interceptor runs + before the `post_list_peering_routes_with_metadata` interceptor. """ return response + def post_list_peering_routes_with_metadata( + self, + response: compute.ExchangedPeeringRoutesList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ExchangedPeeringRoutesList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_peering_routes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_list_peering_routes_with_metadata` + interceptor in new development instead of the `post_list_peering_routes` interceptor. + When both interceptors are used, this `post_list_peering_routes_with_metadata` interceptor runs after the + `post_list_peering_routes` interceptor. The (possibly modified) response returned by + `post_list_peering_routes` will be passed to + `post_list_peering_routes_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchNetworkRequest, @@ -336,12 +502,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_remove_peering( self, request: compute.RemovePeeringNetworkRequest, @@ -359,12 +548,35 @@ def pre_remove_peering( def post_remove_peering(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_peering - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_peering_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_remove_peering` interceptor runs + before the `post_remove_peering_with_metadata` interceptor. """ return response + def post_remove_peering_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_peering + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_remove_peering_with_metadata` + interceptor in new development instead of the `post_remove_peering` interceptor. + When both interceptors are used, this `post_remove_peering_with_metadata` interceptor runs after the + `post_remove_peering` interceptor. The (possibly modified) response returned by + `post_remove_peering` will be passed to + `post_remove_peering_with_metadata`. + """ + return response, metadata + def pre_switch_to_custom_mode( self, request: compute.SwitchToCustomModeNetworkRequest, @@ -385,12 +597,35 @@ def post_switch_to_custom_mode( ) -> compute.Operation: """Post-rpc interceptor for switch_to_custom_mode - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_switch_to_custom_mode_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_switch_to_custom_mode` interceptor runs + before the `post_switch_to_custom_mode_with_metadata` interceptor. """ return response + def post_switch_to_custom_mode_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for switch_to_custom_mode + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_switch_to_custom_mode_with_metadata` + interceptor in new development instead of the `post_switch_to_custom_mode` interceptor. + When both interceptors are used, this `post_switch_to_custom_mode_with_metadata` interceptor runs after the + `post_switch_to_custom_mode` interceptor. The (possibly modified) response returned by + `post_switch_to_custom_mode` will be passed to + `post_switch_to_custom_mode_with_metadata`. + """ + return response, metadata + def pre_update_peering( self, request: compute.UpdatePeeringNetworkRequest, @@ -408,12 +643,35 @@ def pre_update_peering( def post_update_peering(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update_peering - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_peering_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Networks server but before - it is returned to user code. + it is returned to user code. This `post_update_peering` interceptor runs + before the `post_update_peering_with_metadata` interceptor. """ return response + def post_update_peering_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_peering + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Networks server but before it is returned to user code. + + We recommend only using this `post_update_peering_with_metadata` + interceptor in new development instead of the `post_update_peering` interceptor. + When both interceptors are used, this `post_update_peering_with_metadata` interceptor runs after the + `post_update_peering` interceptor. The (possibly modified) response returned by + `post_update_peering` will be passed to + `post_update_peering_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NetworksRestStub: @@ -649,6 +907,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_peering(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_peering_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -808,6 +1070,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -952,6 +1218,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1096,6 +1364,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_effective_firewalls(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_effective_firewalls_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1263,6 +1535,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1402,6 +1678,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1546,6 +1824,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_peering_routes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_peering_routes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1710,6 +1992,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1879,6 +2165,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_peering(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_peering_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2040,6 +2330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_switch_to_custom_mode(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_switch_to_custom_mode_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2209,6 +2503,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_peering(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_peering_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py index 6e92e2cec2b2..64a04da2c980 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py index 39570a632dcc..603e72bd487e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py @@ -211,12 +211,35 @@ def pre_add_nodes( def post_add_nodes(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_nodes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_nodes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_add_nodes` interceptor runs + before the `post_add_nodes_with_metadata` interceptor. """ return response + def post_add_nodes_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_nodes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_add_nodes_with_metadata` + interceptor in new development instead of the `post_add_nodes` interceptor. + When both interceptors are used, this `post_add_nodes_with_metadata` interceptor runs after the + `post_add_nodes` interceptor. The (possibly modified) response returned by + `post_add_nodes` will be passed to + `post_add_nodes_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListNodeGroupsRequest, @@ -236,12 +259,37 @@ def post_aggregated_list( ) -> compute.NodeGroupAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NodeGroupAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NodeGroupAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNodeGroupRequest, @@ -257,12 +305,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_delete_nodes( self, request: compute.DeleteNodesNodeGroupRequest, @@ -280,12 +351,35 @@ def pre_delete_nodes( def post_delete_nodes(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete_nodes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_nodes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_delete_nodes` interceptor runs + before the `post_delete_nodes_with_metadata` interceptor. """ return response + def post_delete_nodes_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_nodes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_delete_nodes_with_metadata` + interceptor in new development instead of the `post_delete_nodes` interceptor. + When both interceptors are used, this `post_delete_nodes_with_metadata` interceptor runs after the + `post_delete_nodes` interceptor. The (possibly modified) response returned by + `post_delete_nodes` will be passed to + `post_delete_nodes_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNodeGroupRequest, @@ -301,12 +395,35 @@ def pre_get( def post_get(self, response: compute.NodeGroup) -> compute.NodeGroup: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NodeGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyNodeGroupRequest, @@ -324,12 +441,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNodeGroupRequest, @@ -345,12 +485,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNodeGroupsRequest, @@ -366,12 +529,35 @@ def pre_list( def post_list(self, response: compute.NodeGroupList) -> compute.NodeGroupList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NodeGroupList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeGroupList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_nodes( self, request: compute.ListNodesNodeGroupsRequest, @@ -391,12 +577,35 @@ def post_list_nodes( ) -> compute.NodeGroupsListNodes: """Post-rpc interceptor for list_nodes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_nodes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_nodes` interceptor runs + before the `post_list_nodes_with_metadata` interceptor. """ return response + def post_list_nodes_with_metadata( + self, + response: compute.NodeGroupsListNodes, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeGroupsListNodes, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_nodes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_list_nodes_with_metadata` + interceptor in new development instead of the `post_list_nodes` interceptor. + When both interceptors are used, this `post_list_nodes_with_metadata` interceptor runs after the + `post_list_nodes` interceptor. The (possibly modified) response returned by + `post_list_nodes` will be passed to + `post_list_nodes_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchNodeGroupRequest, @@ -412,12 +621,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_perform_maintenance( self, request: compute.PerformMaintenanceNodeGroupRequest, @@ -438,12 +670,35 @@ def post_perform_maintenance( ) -> compute.Operation: """Post-rpc interceptor for perform_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_perform_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_perform_maintenance` interceptor runs + before the `post_perform_maintenance_with_metadata` interceptor. """ return response + def post_perform_maintenance_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for perform_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_perform_maintenance_with_metadata` + interceptor in new development instead of the `post_perform_maintenance` interceptor. + When both interceptors are used, this `post_perform_maintenance_with_metadata` interceptor runs after the + `post_perform_maintenance` interceptor. The (possibly modified) response returned by + `post_perform_maintenance` will be passed to + `post_perform_maintenance_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyNodeGroupRequest, @@ -461,12 +716,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_node_template( self, request: compute.SetNodeTemplateNodeGroupRequest, @@ -484,12 +762,35 @@ def pre_set_node_template( def post_set_node_template(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_node_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_node_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_set_node_template` interceptor runs + before the `post_set_node_template_with_metadata` interceptor. """ return response + def post_set_node_template_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_node_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_set_node_template_with_metadata` + interceptor in new development instead of the `post_set_node_template` interceptor. + When both interceptors are used, this `post_set_node_template_with_metadata` interceptor runs after the + `post_set_node_template` interceptor. The (possibly modified) response returned by + `post_set_node_template` will be passed to + `post_set_node_template_with_metadata`. + """ + return response, metadata + def pre_simulate_maintenance_event( self, request: compute.SimulateMaintenanceEventNodeGroupRequest, @@ -510,12 +811,35 @@ def post_simulate_maintenance_event( ) -> compute.Operation: """Post-rpc interceptor for simulate_maintenance_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_simulate_maintenance_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_simulate_maintenance_event` interceptor runs + before the `post_simulate_maintenance_event_with_metadata` interceptor. """ return response + def post_simulate_maintenance_event_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for simulate_maintenance_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_simulate_maintenance_event_with_metadata` + interceptor in new development instead of the `post_simulate_maintenance_event` interceptor. + When both interceptors are used, this `post_simulate_maintenance_event_with_metadata` interceptor runs after the + `post_simulate_maintenance_event` interceptor. The (possibly modified) response returned by + `post_simulate_maintenance_event` will be passed to + `post_simulate_maintenance_event_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsNodeGroupRequest, @@ -536,12 +860,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroups server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroups server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NodeGroupsRestStub: @@ -777,6 +1126,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_nodes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_nodes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -921,6 +1274,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1080,6 +1437,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1249,6 +1610,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_nodes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_nodes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1399,6 +1764,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1568,6 +1935,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1733,6 +2104,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1875,6 +2250,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2019,6 +2396,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_nodes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_nodes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2184,6 +2565,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2351,6 +2736,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_perform_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_perform_maintenance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2528,6 +2917,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2695,6 +3088,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_node_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_node_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2863,6 +3260,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_simulate_maintenance_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_simulate_maintenance_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3013,6 +3414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py index cc827787a1af..bfba7aa0d505 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py index dc3693828142..1a67b0ce6868 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/transports/rest.py @@ -158,12 +158,37 @@ def post_aggregated_list( ) -> compute.NodeTemplateAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NodeTemplateAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NodeTemplateAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteNodeTemplateRequest, @@ -181,12 +206,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNodeTemplateRequest, @@ -202,12 +250,35 @@ def pre_get( def post_get(self, response: compute.NodeTemplate) -> compute.NodeTemplate: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NodeTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyNodeTemplateRequest, @@ -225,12 +296,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertNodeTemplateRequest, @@ -248,12 +342,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNodeTemplatesRequest, @@ -271,12 +388,35 @@ def pre_list( def post_list(self, response: compute.NodeTemplateList) -> compute.NodeTemplateList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NodeTemplateList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeTemplateList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyNodeTemplateRequest, @@ -294,12 +434,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsNodeTemplateRequest, @@ -320,12 +483,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTemplates server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTemplates server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NodeTemplatesRestStub: @@ -536,6 +724,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -699,6 +891,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -846,6 +1042,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1011,6 +1209,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1178,6 +1380,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1320,6 +1526,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1491,6 +1699,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1641,6 +1853,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py index 9b793fe99729..d60d01771db2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -456,6 +458,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/transports/rest.py index 823084a79601..e94f3483882e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/transports/rest.py @@ -117,12 +117,35 @@ def post_aggregated_list( ) -> compute.NodeTypeAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTypes server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.NodeTypeAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeTypeAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTypes server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetNodeTypeRequest, @@ -138,12 +161,35 @@ def pre_get( def post_get(self, response: compute.NodeType) -> compute.NodeType: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTypes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NodeType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTypes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListNodeTypesRequest, @@ -159,12 +205,35 @@ def pre_list( def post_list(self, response: compute.NodeTypeList) -> compute.NodeTypeList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeTypes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NodeTypeList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NodeTypeList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeTypes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class NodeTypesRestStub: @@ -379,6 +448,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -528,6 +601,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -667,6 +742,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py index da159c1bedb7..d0d959792da0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py index 82ba73f64e3c..fc5c73fb3b3d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py @@ -150,12 +150,37 @@ def post_aggregated_list( ) -> compute.PacketMirroringAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.PacketMirroringAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PacketMirroringAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeletePacketMirroringRequest, @@ -173,12 +198,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetPacketMirroringRequest, @@ -196,12 +244,35 @@ def pre_get( def post_get(self, response: compute.PacketMirroring) -> compute.PacketMirroring: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.PacketMirroring, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.PacketMirroring, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertPacketMirroringRequest, @@ -219,12 +290,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListPacketMirroringsRequest, @@ -244,12 +338,35 @@ def post_list( ) -> compute.PacketMirroringList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.PacketMirroringList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.PacketMirroringList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchPacketMirroringRequest, @@ -267,12 +384,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsPacketMirroringRequest, @@ -293,12 +433,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PacketMirrorings server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PacketMirrorings server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PacketMirroringsRestStub: @@ -509,6 +674,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -674,6 +843,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -828,6 +1001,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -999,6 +1174,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1145,6 +1324,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1314,6 +1495,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1465,6 +1650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py index be5a0c35b7ce..0b8a45f44c5b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py index fb2d094618c1..b5ed0b87b6a8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/transports/rest.py @@ -203,12 +203,35 @@ def pre_disable_xpn_host( def post_disable_xpn_host(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for disable_xpn_host - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_xpn_host_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_disable_xpn_host` interceptor runs + before the `post_disable_xpn_host_with_metadata` interceptor. """ return response + def post_disable_xpn_host_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_xpn_host + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_disable_xpn_host_with_metadata` + interceptor in new development instead of the `post_disable_xpn_host` interceptor. + When both interceptors are used, this `post_disable_xpn_host_with_metadata` interceptor runs after the + `post_disable_xpn_host` interceptor. The (possibly modified) response returned by + `post_disable_xpn_host` will be passed to + `post_disable_xpn_host_with_metadata`. + """ + return response, metadata + def pre_disable_xpn_resource( self, request: compute.DisableXpnResourceProjectRequest, @@ -229,12 +252,35 @@ def post_disable_xpn_resource( ) -> compute.Operation: """Post-rpc interceptor for disable_xpn_resource - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_xpn_resource_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_disable_xpn_resource` interceptor runs + before the `post_disable_xpn_resource_with_metadata` interceptor. """ return response + def post_disable_xpn_resource_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_xpn_resource + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_disable_xpn_resource_with_metadata` + interceptor in new development instead of the `post_disable_xpn_resource` interceptor. + When both interceptors are used, this `post_disable_xpn_resource_with_metadata` interceptor runs after the + `post_disable_xpn_resource` interceptor. The (possibly modified) response returned by + `post_disable_xpn_resource` will be passed to + `post_disable_xpn_resource_with_metadata`. + """ + return response, metadata + def pre_enable_xpn_host( self, request: compute.EnableXpnHostProjectRequest, @@ -252,12 +298,35 @@ def pre_enable_xpn_host( def post_enable_xpn_host(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for enable_xpn_host - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_xpn_host_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_enable_xpn_host` interceptor runs + before the `post_enable_xpn_host_with_metadata` interceptor. """ return response + def post_enable_xpn_host_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_xpn_host + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_enable_xpn_host_with_metadata` + interceptor in new development instead of the `post_enable_xpn_host` interceptor. + When both interceptors are used, this `post_enable_xpn_host_with_metadata` interceptor runs after the + `post_enable_xpn_host` interceptor. The (possibly modified) response returned by + `post_enable_xpn_host` will be passed to + `post_enable_xpn_host_with_metadata`. + """ + return response, metadata + def pre_enable_xpn_resource( self, request: compute.EnableXpnResourceProjectRequest, @@ -277,12 +346,35 @@ def post_enable_xpn_resource( ) -> compute.Operation: """Post-rpc interceptor for enable_xpn_resource - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_xpn_resource_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_enable_xpn_resource` interceptor runs + before the `post_enable_xpn_resource_with_metadata` interceptor. """ return response + def post_enable_xpn_resource_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_xpn_resource + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_enable_xpn_resource_with_metadata` + interceptor in new development instead of the `post_enable_xpn_resource` interceptor. + When both interceptors are used, this `post_enable_xpn_resource_with_metadata` interceptor runs after the + `post_enable_xpn_resource` interceptor. The (possibly modified) response returned by + `post_enable_xpn_resource` will be passed to + `post_enable_xpn_resource_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetProjectRequest, @@ -298,12 +390,35 @@ def pre_get( def post_get(self, response: compute.Project) -> compute.Project: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Project, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Project, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_xpn_host( self, request: compute.GetXpnHostProjectRequest, @@ -321,12 +436,35 @@ def pre_get_xpn_host( def post_get_xpn_host(self, response: compute.Project) -> compute.Project: """Post-rpc interceptor for get_xpn_host - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_xpn_host_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_get_xpn_host` interceptor runs + before the `post_get_xpn_host_with_metadata` interceptor. """ return response + def post_get_xpn_host_with_metadata( + self, + response: compute.Project, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Project, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_xpn_host + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_get_xpn_host_with_metadata` + interceptor in new development instead of the `post_get_xpn_host` interceptor. + When both interceptors are used, this `post_get_xpn_host_with_metadata` interceptor runs after the + `post_get_xpn_host` interceptor. The (possibly modified) response returned by + `post_get_xpn_host` will be passed to + `post_get_xpn_host_with_metadata`. + """ + return response, metadata + def pre_get_xpn_resources( self, request: compute.GetXpnResourcesProjectsRequest, @@ -346,12 +484,37 @@ def post_get_xpn_resources( ) -> compute.ProjectsGetXpnResources: """Post-rpc interceptor for get_xpn_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_xpn_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_get_xpn_resources` interceptor runs + before the `post_get_xpn_resources_with_metadata` interceptor. """ return response + def post_get_xpn_resources_with_metadata( + self, + response: compute.ProjectsGetXpnResources, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ProjectsGetXpnResources, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_xpn_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_get_xpn_resources_with_metadata` + interceptor in new development instead of the `post_get_xpn_resources` interceptor. + When both interceptors are used, this `post_get_xpn_resources_with_metadata` interceptor runs after the + `post_get_xpn_resources` interceptor. The (possibly modified) response returned by + `post_get_xpn_resources` will be passed to + `post_get_xpn_resources_with_metadata`. + """ + return response, metadata + def pre_list_xpn_hosts( self, request: compute.ListXpnHostsProjectsRequest, @@ -369,12 +532,35 @@ def pre_list_xpn_hosts( def post_list_xpn_hosts(self, response: compute.XpnHostList) -> compute.XpnHostList: """Post-rpc interceptor for list_xpn_hosts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_xpn_hosts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_list_xpn_hosts` interceptor runs + before the `post_list_xpn_hosts_with_metadata` interceptor. """ return response + def post_list_xpn_hosts_with_metadata( + self, + response: compute.XpnHostList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.XpnHostList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_xpn_hosts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_list_xpn_hosts_with_metadata` + interceptor in new development instead of the `post_list_xpn_hosts` interceptor. + When both interceptors are used, this `post_list_xpn_hosts_with_metadata` interceptor runs after the + `post_list_xpn_hosts` interceptor. The (possibly modified) response returned by + `post_list_xpn_hosts` will be passed to + `post_list_xpn_hosts_with_metadata`. + """ + return response, metadata + def pre_move_disk( self, request: compute.MoveDiskProjectRequest, @@ -390,12 +576,35 @@ def pre_move_disk( def post_move_disk(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for move_disk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_disk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_move_disk` interceptor runs + before the `post_move_disk_with_metadata` interceptor. """ return response + def post_move_disk_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_disk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_move_disk_with_metadata` + interceptor in new development instead of the `post_move_disk` interceptor. + When both interceptors are used, this `post_move_disk_with_metadata` interceptor runs after the + `post_move_disk` interceptor. The (possibly modified) response returned by + `post_move_disk` will be passed to + `post_move_disk_with_metadata`. + """ + return response, metadata + def pre_move_instance( self, request: compute.MoveInstanceProjectRequest, @@ -413,12 +622,35 @@ def pre_move_instance( def post_move_instance(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for move_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_move_instance` interceptor runs + before the `post_move_instance_with_metadata` interceptor. """ return response + def post_move_instance_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_move_instance_with_metadata` + interceptor in new development instead of the `post_move_instance` interceptor. + When both interceptors are used, this `post_move_instance_with_metadata` interceptor runs after the + `post_move_instance` interceptor. The (possibly modified) response returned by + `post_move_instance` will be passed to + `post_move_instance_with_metadata`. + """ + return response, metadata + def pre_set_cloud_armor_tier( self, request: compute.SetCloudArmorTierProjectRequest, @@ -438,12 +670,35 @@ def post_set_cloud_armor_tier( ) -> compute.Operation: """Post-rpc interceptor for set_cloud_armor_tier - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_cloud_armor_tier_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_set_cloud_armor_tier` interceptor runs + before the `post_set_cloud_armor_tier_with_metadata` interceptor. """ return response + def post_set_cloud_armor_tier_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_cloud_armor_tier + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_set_cloud_armor_tier_with_metadata` + interceptor in new development instead of the `post_set_cloud_armor_tier` interceptor. + When both interceptors are used, this `post_set_cloud_armor_tier_with_metadata` interceptor runs after the + `post_set_cloud_armor_tier` interceptor. The (possibly modified) response returned by + `post_set_cloud_armor_tier` will be passed to + `post_set_cloud_armor_tier_with_metadata`. + """ + return response, metadata + def pre_set_common_instance_metadata( self, request: compute.SetCommonInstanceMetadataProjectRequest, @@ -464,12 +719,35 @@ def post_set_common_instance_metadata( ) -> compute.Operation: """Post-rpc interceptor for set_common_instance_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_common_instance_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_set_common_instance_metadata` interceptor runs + before the `post_set_common_instance_metadata_with_metadata` interceptor. """ return response + def post_set_common_instance_metadata_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_common_instance_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_set_common_instance_metadata_with_metadata` + interceptor in new development instead of the `post_set_common_instance_metadata` interceptor. + When both interceptors are used, this `post_set_common_instance_metadata_with_metadata` interceptor runs after the + `post_set_common_instance_metadata` interceptor. The (possibly modified) response returned by + `post_set_common_instance_metadata` will be passed to + `post_set_common_instance_metadata_with_metadata`. + """ + return response, metadata + def pre_set_default_network_tier( self, request: compute.SetDefaultNetworkTierProjectRequest, @@ -490,12 +768,35 @@ def post_set_default_network_tier( ) -> compute.Operation: """Post-rpc interceptor for set_default_network_tier - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_default_network_tier_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_set_default_network_tier` interceptor runs + before the `post_set_default_network_tier_with_metadata` interceptor. """ return response + def post_set_default_network_tier_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_default_network_tier + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_set_default_network_tier_with_metadata` + interceptor in new development instead of the `post_set_default_network_tier` interceptor. + When both interceptors are used, this `post_set_default_network_tier_with_metadata` interceptor runs after the + `post_set_default_network_tier` interceptor. The (possibly modified) response returned by + `post_set_default_network_tier` will be passed to + `post_set_default_network_tier_with_metadata`. + """ + return response, metadata + def pre_set_usage_export_bucket( self, request: compute.SetUsageExportBucketProjectRequest, @@ -516,12 +817,35 @@ def post_set_usage_export_bucket( ) -> compute.Operation: """Post-rpc interceptor for set_usage_export_bucket - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_usage_export_bucket_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Projects server but before - it is returned to user code. + it is returned to user code. This `post_set_usage_export_bucket` interceptor runs + before the `post_set_usage_export_bucket_with_metadata` interceptor. """ return response + def post_set_usage_export_bucket_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_usage_export_bucket + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Projects server but before it is returned to user code. + + We recommend only using this `post_set_usage_export_bucket_with_metadata` + interceptor in new development instead of the `post_set_usage_export_bucket` interceptor. + When both interceptors are used, this `post_set_usage_export_bucket_with_metadata` interceptor runs after the + `post_set_usage_export_bucket` interceptor. The (possibly modified) response returned by + `post_set_usage_export_bucket` will be passed to + `post_set_usage_export_bucket_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ProjectsRestStub: @@ -755,6 +1079,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_xpn_host(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_xpn_host_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -922,6 +1250,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_xpn_resource(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_xpn_resource_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1085,6 +1417,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_xpn_host(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_xpn_host_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1252,6 +1588,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_xpn_resource(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_xpn_resource_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1396,6 +1736,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1545,6 +1887,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_xpn_host(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_xpn_host_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1693,6 +2039,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_xpn_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_xpn_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1843,6 +2193,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_xpn_hosts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_xpn_hosts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2008,6 +2362,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_disk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_disk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2175,6 +2533,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2342,6 +2704,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_cloud_armor_tier(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_cloud_armor_tier_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2510,6 +2876,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_common_instance_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_common_instance_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2677,6 +3047,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_default_network_tier(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_default_network_tier_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2844,6 +3218,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_usage_export_bucket(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_usage_export_bucket_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index e9ee9d2de359..0c995165e3e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py index e056f1431a12..5e0c73d04b46 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py @@ -148,12 +148,35 @@ def pre_announce( def post_announce(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for announce - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_announce_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_announce` interceptor runs + before the `post_announce_with_metadata` interceptor. """ return response + def post_announce_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for announce + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_announce_with_metadata` + interceptor in new development instead of the `post_announce` interceptor. + When both interceptors are used, this `post_announce_with_metadata` interceptor runs after the + `post_announce` interceptor. The (possibly modified) response returned by + `post_announce` will be passed to + `post_announce_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeletePublicAdvertisedPrefixeRequest, @@ -172,12 +195,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetPublicAdvertisedPrefixeRequest, @@ -198,12 +244,35 @@ def post_get( ) -> compute.PublicAdvertisedPrefix: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.PublicAdvertisedPrefix, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.PublicAdvertisedPrefix, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertPublicAdvertisedPrefixeRequest, @@ -222,12 +291,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListPublicAdvertisedPrefixesRequest, @@ -248,12 +340,37 @@ def post_list( ) -> compute.PublicAdvertisedPrefixList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.PublicAdvertisedPrefixList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PublicAdvertisedPrefixList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchPublicAdvertisedPrefixeRequest, @@ -272,12 +389,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_withdraw( self, request: compute.WithdrawPublicAdvertisedPrefixeRequest, @@ -296,12 +436,35 @@ def pre_withdraw( def post_withdraw(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for withdraw - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_withdraw_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicAdvertisedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_withdraw` interceptor runs + before the `post_withdraw_with_metadata` interceptor. """ return response + def post_withdraw_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for withdraw + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicAdvertisedPrefixes server but before it is returned to user code. + + We recommend only using this `post_withdraw_with_metadata` + interceptor in new development instead of the `post_withdraw` interceptor. + When both interceptors are used, this `post_withdraw_with_metadata` interceptor runs after the + `post_withdraw` interceptor. The (possibly modified) response returned by + `post_withdraw` will be passed to + `post_withdraw_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PublicAdvertisedPrefixesRestStub: @@ -530,6 +693,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_announce(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_announce_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -690,6 +857,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -839,6 +1010,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1005,6 +1178,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1148,6 +1325,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1316,6 +1495,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1476,6 +1659,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_withdraw(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_withdraw_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index 1a5913c2719c..d587480818a6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py index 03d6adfe820d..47183b0ec8ad 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py @@ -158,12 +158,38 @@ def post_aggregated_list( ) -> compute.PublicDelegatedPrefixAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.PublicDelegatedPrefixAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PublicDelegatedPrefixAggregatedList, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_announce( self, request: compute.AnnouncePublicDelegatedPrefixeRequest, @@ -182,12 +208,35 @@ def pre_announce( def post_announce(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for announce - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_announce_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_announce` interceptor runs + before the `post_announce_with_metadata` interceptor. """ return response + def post_announce_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for announce + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_announce_with_metadata` + interceptor in new development instead of the `post_announce` interceptor. + When both interceptors are used, this `post_announce_with_metadata` interceptor runs after the + `post_announce` interceptor. The (possibly modified) response returned by + `post_announce` will be passed to + `post_announce_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeletePublicDelegatedPrefixeRequest, @@ -206,12 +255,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetPublicDelegatedPrefixeRequest, @@ -232,12 +304,35 @@ def post_get( ) -> compute.PublicDelegatedPrefix: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.PublicDelegatedPrefix, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.PublicDelegatedPrefix, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertPublicDelegatedPrefixeRequest, @@ -256,12 +351,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListPublicDelegatedPrefixesRequest, @@ -282,12 +400,37 @@ def post_list( ) -> compute.PublicDelegatedPrefixList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.PublicDelegatedPrefixList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PublicDelegatedPrefixList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchPublicDelegatedPrefixeRequest, @@ -306,12 +449,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_withdraw( self, request: compute.WithdrawPublicDelegatedPrefixeRequest, @@ -330,12 +496,35 @@ def pre_withdraw( def post_withdraw(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for withdraw - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_withdraw_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PublicDelegatedPrefixes server but before - it is returned to user code. + it is returned to user code. This `post_withdraw` interceptor runs + before the `post_withdraw_with_metadata` interceptor. """ return response + def post_withdraw_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for withdraw + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PublicDelegatedPrefixes server but before it is returned to user code. + + We recommend only using this `post_withdraw_with_metadata` + interceptor in new development instead of the `post_withdraw` interceptor. + When both interceptors are used, this `post_withdraw_with_metadata` interceptor runs after the + `post_withdraw` interceptor. The (possibly modified) response returned by + `post_withdraw` will be passed to + `post_withdraw_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PublicDelegatedPrefixesRestStub: @@ -549,6 +738,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -711,6 +904,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_announce(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_announce_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -871,6 +1068,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1023,6 +1224,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1189,6 +1392,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1332,6 +1539,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1500,6 +1709,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1660,6 +1873,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_withdraw(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_withdraw_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py index cb07e86ba4b4..0fc0935e5902 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py index f385074fb135..7953511730d6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py @@ -139,12 +139,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionAutoscalers server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionAutoscalers server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionAutoscalerRequest, @@ -162,12 +185,35 @@ def pre_get( def post_get(self, response: compute.Autoscaler) -> compute.Autoscaler: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionAutoscalers server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Autoscaler, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Autoscaler, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionAutoscalers server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionAutoscalerRequest, @@ -185,12 +231,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionAutoscalers server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionAutoscalers server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionAutoscalersRequest, @@ -210,12 +279,35 @@ def post_list( ) -> compute.RegionAutoscalerList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionAutoscalers server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RegionAutoscalerList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RegionAutoscalerList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionAutoscalers server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionAutoscalerRequest, @@ -233,12 +325,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionAutoscalers server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionAutoscalers server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRegionAutoscalerRequest, @@ -256,12 +371,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionAutoscalers server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionAutoscalers server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionAutoscalersRestStub: @@ -493,6 +631,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -649,6 +791,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -820,6 +964,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -966,6 +1114,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1137,6 +1287,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1308,6 +1462,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py index 8df5d123fc61..272a151d5a2f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py index 4ad5008ff3a5..3d98fc4f4aa2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/transports/rest.py @@ -188,12 +188,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionBackendServiceRequest, @@ -211,12 +234,35 @@ def pre_get( def post_get(self, response: compute.BackendService) -> compute.BackendService: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.BackendService, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendService, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_health( self, request: compute.GetHealthRegionBackendServiceRequest, @@ -237,12 +283,37 @@ def post_get_health( ) -> compute.BackendServiceGroupHealth: """Post-rpc interceptor for get_health - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. """ return response + def post_get_health_with_metadata( + self, + response: compute.BackendServiceGroupHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendServiceGroupHealth, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyRegionBackendServiceRequest, @@ -261,12 +332,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionBackendServiceRequest, @@ -285,12 +379,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionBackendServicesRequest, @@ -311,12 +428,35 @@ def post_list( ) -> compute.BackendServiceList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.BackendServiceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendServiceList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_usable( self, request: compute.ListUsableRegionBackendServicesRequest, @@ -337,12 +477,37 @@ def post_list_usable( ) -> compute.BackendServiceListUsable: """Post-rpc interceptor for list_usable - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_usable_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_list_usable` interceptor runs + before the `post_list_usable_with_metadata` interceptor. """ return response + def post_list_usable_with_metadata( + self, + response: compute.BackendServiceListUsable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendServiceListUsable, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_usable + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_list_usable_with_metadata` + interceptor in new development instead of the `post_list_usable` interceptor. + When both interceptors are used, this `post_list_usable_with_metadata` interceptor runs after the + `post_list_usable` interceptor. The (possibly modified) response returned by + `post_list_usable` will be passed to + `post_list_usable_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionBackendServiceRequest, @@ -361,12 +526,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyRegionBackendServiceRequest, @@ -385,12 +573,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_security_policy( self, request: compute.SetSecurityPolicyRegionBackendServiceRequest, @@ -411,12 +622,35 @@ def post_set_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_set_security_policy` interceptor runs + before the `post_set_security_policy_with_metadata` interceptor. """ return response + def post_set_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_set_security_policy_with_metadata` + interceptor in new development instead of the `post_set_security_policy` interceptor. + When both interceptors are used, this `post_set_security_policy_with_metadata` interceptor runs after the + `post_set_security_policy` interceptor. The (possibly modified) response returned by + `post_set_security_policy` will be passed to + `post_set_security_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsRegionBackendServiceRequest, @@ -437,12 +671,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRegionBackendServiceRequest, @@ -461,12 +720,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionBackendServices server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendServices server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionBackendServicesRestStub: @@ -695,6 +977,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -854,6 +1140,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1003,6 +1291,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1171,6 +1463,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1337,6 +1633,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1481,6 +1781,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1626,6 +1928,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_usable(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_usable_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1794,6 +2100,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1966,6 +2276,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2136,6 +2450,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2289,6 +2607,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2455,6 +2777,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py index f7426af18183..d5a9503c1968 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py index b207f0dda3e0..5359bcab9485 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/transports/rest.py @@ -134,12 +134,37 @@ def post_aggregated_list( ) -> compute.CommitmentAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionCommitments server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.CommitmentAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.CommitmentAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCommitments server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionCommitmentRequest, @@ -157,12 +182,35 @@ def pre_get( def post_get(self, response: compute.Commitment) -> compute.Commitment: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionCommitments server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Commitment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Commitment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCommitments server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionCommitmentRequest, @@ -180,12 +228,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionCommitments server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCommitments server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionCommitmentsRequest, @@ -203,12 +274,35 @@ def pre_list( def post_list(self, response: compute.CommitmentList) -> compute.CommitmentList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionCommitments server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.CommitmentList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.CommitmentList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCommitments server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRegionCommitmentRequest, @@ -226,12 +320,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionCommitments server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCommitments server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionCommitmentsRestStub: @@ -443,6 +560,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -598,6 +719,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -769,6 +892,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -917,6 +1044,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1217,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py index a0b4bd78b6c2..b06410ae8c3d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -458,6 +460,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/transports/rest.py index 5cf89c9f26de..e8758fb495f3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/transports/rest.py @@ -107,12 +107,35 @@ def pre_get( def post_get(self, response: compute.DiskType) -> compute.DiskType: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDiskTypes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.DiskType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDiskTypes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionDiskTypesRequest, @@ -132,12 +155,35 @@ def post_list( ) -> compute.RegionDiskTypeList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDiskTypes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RegionDiskTypeList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RegionDiskTypeList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDiskTypes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionDiskTypesRestStub: @@ -361,6 +407,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -505,6 +553,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index 1e4c08bc612e..502b87c1d9c7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py index 8b2876377397..ae5cedbac8c2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py @@ -230,12 +230,35 @@ def post_add_resource_policies( ) -> compute.Operation: """Post-rpc interceptor for add_resource_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_resource_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_add_resource_policies` interceptor runs + before the `post_add_resource_policies_with_metadata` interceptor. """ return response + def post_add_resource_policies_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_resource_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_add_resource_policies_with_metadata` + interceptor in new development instead of the `post_add_resource_policies` interceptor. + When both interceptors are used, this `post_add_resource_policies_with_metadata` interceptor runs after the + `post_add_resource_policies` interceptor. The (possibly modified) response returned by + `post_add_resource_policies` will be passed to + `post_add_resource_policies_with_metadata`. + """ + return response, metadata + def pre_bulk_insert( self, request: compute.BulkInsertRegionDiskRequest, @@ -253,12 +276,35 @@ def pre_bulk_insert( def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for bulk_insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_bulk_insert` interceptor runs + before the `post_bulk_insert_with_metadata` interceptor. """ return response + def post_bulk_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_bulk_insert_with_metadata` + interceptor in new development instead of the `post_bulk_insert` interceptor. + When both interceptors are used, this `post_bulk_insert_with_metadata` interceptor runs after the + `post_bulk_insert` interceptor. The (possibly modified) response returned by + `post_bulk_insert` will be passed to + `post_bulk_insert_with_metadata`. + """ + return response, metadata + def pre_create_snapshot( self, request: compute.CreateSnapshotRegionDiskRequest, @@ -276,12 +322,35 @@ def pre_create_snapshot( def post_create_snapshot(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for create_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_create_snapshot` interceptor runs + before the `post_create_snapshot_with_metadata` interceptor. """ return response + def post_create_snapshot_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_create_snapshot_with_metadata` + interceptor in new development instead of the `post_create_snapshot` interceptor. + When both interceptors are used, this `post_create_snapshot_with_metadata` interceptor runs after the + `post_create_snapshot` interceptor. The (possibly modified) response returned by + `post_create_snapshot` will be passed to + `post_create_snapshot_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteRegionDiskRequest, @@ -299,12 +368,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionDiskRequest, @@ -320,12 +412,33 @@ def pre_get( def post_get(self, response: compute.Disk) -> compute.Disk: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, response: compute.Disk, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[compute.Disk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyRegionDiskRequest, @@ -343,12 +456,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionDiskRequest, @@ -366,12 +502,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionDisksRequest, @@ -387,12 +546,35 @@ def pre_list( def post_list(self, response: compute.DiskList) -> compute.DiskList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.DiskList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.DiskList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_remove_resource_policies( self, request: compute.RemoveResourcePoliciesRegionDiskRequest, @@ -413,12 +595,35 @@ def post_remove_resource_policies( ) -> compute.Operation: """Post-rpc interceptor for remove_resource_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_resource_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_remove_resource_policies` interceptor runs + before the `post_remove_resource_policies_with_metadata` interceptor. """ return response + def post_remove_resource_policies_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_resource_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_remove_resource_policies_with_metadata` + interceptor in new development instead of the `post_remove_resource_policies` interceptor. + When both interceptors are used, this `post_remove_resource_policies_with_metadata` interceptor runs after the + `post_remove_resource_policies` interceptor. The (possibly modified) response returned by + `post_remove_resource_policies` will be passed to + `post_remove_resource_policies_with_metadata`. + """ + return response, metadata + def pre_resize( self, request: compute.ResizeRegionDiskRequest, @@ -436,12 +641,35 @@ def pre_resize( def post_resize(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resize - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_resize` interceptor runs + before the `post_resize_with_metadata` interceptor. """ return response + def post_resize_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_resize_with_metadata` + interceptor in new development instead of the `post_resize` interceptor. + When both interceptors are used, this `post_resize_with_metadata` interceptor runs after the + `post_resize` interceptor. The (possibly modified) response returned by + `post_resize` will be passed to + `post_resize_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyRegionDiskRequest, @@ -459,12 +687,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsRegionDiskRequest, @@ -482,12 +733,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_start_async_replication( self, request: compute.StartAsyncReplicationRegionDiskRequest, @@ -508,12 +782,35 @@ def post_start_async_replication( ) -> compute.Operation: """Post-rpc interceptor for start_async_replication - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_async_replication_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_start_async_replication` interceptor runs + before the `post_start_async_replication_with_metadata` interceptor. """ return response + def post_start_async_replication_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_async_replication + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_start_async_replication_with_metadata` + interceptor in new development instead of the `post_start_async_replication` interceptor. + When both interceptors are used, this `post_start_async_replication_with_metadata` interceptor runs after the + `post_start_async_replication` interceptor. The (possibly modified) response returned by + `post_start_async_replication` will be passed to + `post_start_async_replication_with_metadata`. + """ + return response, metadata + def pre_stop_async_replication( self, request: compute.StopAsyncReplicationRegionDiskRequest, @@ -534,12 +831,35 @@ def post_stop_async_replication( ) -> compute.Operation: """Post-rpc interceptor for stop_async_replication - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_async_replication_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_stop_async_replication` interceptor runs + before the `post_stop_async_replication_with_metadata` interceptor. """ return response + def post_stop_async_replication_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_async_replication + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_stop_async_replication_with_metadata` + interceptor in new development instead of the `post_stop_async_replication` interceptor. + When both interceptors are used, this `post_stop_async_replication_with_metadata` interceptor runs after the + `post_stop_async_replication` interceptor. The (possibly modified) response returned by + `post_stop_async_replication` will be passed to + `post_stop_async_replication_with_metadata`. + """ + return response, metadata + def pre_stop_group_async_replication( self, request: compute.StopGroupAsyncReplicationRegionDiskRequest, @@ -560,12 +880,35 @@ def post_stop_group_async_replication( ) -> compute.Operation: """Post-rpc interceptor for stop_group_async_replication - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_group_async_replication_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_stop_group_async_replication` interceptor runs + before the `post_stop_group_async_replication_with_metadata` interceptor. """ return response + def post_stop_group_async_replication_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_group_async_replication + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_stop_group_async_replication_with_metadata` + interceptor in new development instead of the `post_stop_group_async_replication` interceptor. + When both interceptors are used, this `post_stop_group_async_replication_with_metadata` interceptor runs after the + `post_stop_group_async_replication` interceptor. The (possibly modified) response returned by + `post_stop_group_async_replication` will be passed to + `post_stop_group_async_replication_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsRegionDiskRequest, @@ -586,12 +929,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRegionDiskRequest, @@ -609,12 +977,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionDisks server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionDisksRestStub: @@ -850,6 +1241,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_resource_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_resource_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1019,6 +1414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1184,6 +1583,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1343,6 +1746,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1497,6 +1904,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1666,6 +2075,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1831,6 +2244,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1973,6 +2390,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2140,6 +2559,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_resource_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_resource_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2305,6 +2728,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2482,6 +2909,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2649,6 +3080,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2816,6 +3251,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_async_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_async_replication_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2977,6 +3416,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_async_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_async_replication_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3148,6 +3591,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_group_async_replication(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_group_async_replication_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3298,6 +3745,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3463,6 +3914,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py index a394a0334801..368d1873a182 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py index bd0ea4061c20..838a8835aa47 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py @@ -132,12 +132,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthCheckServices server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthCheckServices server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionHealthCheckServiceRequest, @@ -158,12 +181,35 @@ def post_get( ) -> compute.HealthCheckService: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthCheckServices server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.HealthCheckService, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthCheckService, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthCheckServices server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionHealthCheckServiceRequest, @@ -182,12 +228,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthCheckServices server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthCheckServices server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionHealthCheckServicesRequest, @@ -208,12 +277,37 @@ def post_list( ) -> compute.HealthCheckServicesList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthCheckServices server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.HealthCheckServicesList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.HealthCheckServicesList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthCheckServices server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionHealthCheckServiceRequest, @@ -232,12 +326,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthCheckServices server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthCheckServices server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionHealthCheckServicesRestStub: @@ -468,6 +585,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -613,6 +734,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -779,6 +902,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -922,6 +1049,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1217,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py index b806419b537f..392fff87cb7b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py index c5258e0645f8..b7c6ea8740e1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/transports/rest.py @@ -139,12 +139,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthChecks server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionHealthCheckRequest, @@ -162,12 +185,35 @@ def pre_get( def post_get(self, response: compute.HealthCheck) -> compute.HealthCheck: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.HealthCheck, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthCheck, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthChecks server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionHealthCheckRequest, @@ -185,12 +231,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthChecks server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionHealthChecksRequest, @@ -208,12 +277,35 @@ def pre_list( def post_list(self, response: compute.HealthCheckList) -> compute.HealthCheckList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.HealthCheckList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthCheckList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthChecks server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionHealthCheckRequest, @@ -231,12 +323,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthChecks server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRegionHealthCheckRequest, @@ -254,12 +369,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionHealthChecks server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthChecks server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionHealthChecksRestStub: @@ -489,6 +627,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -652,6 +794,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -821,6 +965,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -969,6 +1117,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1140,6 +1290,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1309,6 +1463,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py index 4d9d72a5ed15..137bccda27e9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py index e7fe3883fb5e..14a677ad8d92 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py @@ -276,12 +276,35 @@ def pre_abandon_instances( def post_abandon_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for abandon_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_abandon_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_abandon_instances` interceptor runs + before the `post_abandon_instances_with_metadata` interceptor. """ return response + def post_abandon_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for abandon_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_abandon_instances_with_metadata` + interceptor in new development instead of the `post_abandon_instances` interceptor. + When both interceptors are used, this `post_abandon_instances_with_metadata` interceptor runs after the + `post_abandon_instances` interceptor. The (possibly modified) response returned by + `post_abandon_instances` will be passed to + `post_abandon_instances_with_metadata`. + """ + return response, metadata + def pre_apply_updates_to_instances( self, request: compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, @@ -302,12 +325,35 @@ def post_apply_updates_to_instances( ) -> compute.Operation: """Post-rpc interceptor for apply_updates_to_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_apply_updates_to_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_apply_updates_to_instances` interceptor runs + before the `post_apply_updates_to_instances_with_metadata` interceptor. """ return response + def post_apply_updates_to_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for apply_updates_to_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_apply_updates_to_instances_with_metadata` + interceptor in new development instead of the `post_apply_updates_to_instances` interceptor. + When both interceptors are used, this `post_apply_updates_to_instances_with_metadata` interceptor runs after the + `post_apply_updates_to_instances` interceptor. The (possibly modified) response returned by + `post_apply_updates_to_instances` will be passed to + `post_apply_updates_to_instances_with_metadata`. + """ + return response, metadata + def pre_create_instances( self, request: compute.CreateInstancesRegionInstanceGroupManagerRequest, @@ -326,12 +372,35 @@ def pre_create_instances( def post_create_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for create_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_create_instances` interceptor runs + before the `post_create_instances_with_metadata` interceptor. """ return response + def post_create_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_create_instances_with_metadata` + interceptor in new development instead of the `post_create_instances` interceptor. + When both interceptors are used, this `post_create_instances_with_metadata` interceptor runs after the + `post_create_instances` interceptor. The (possibly modified) response returned by + `post_create_instances` will be passed to + `post_create_instances_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteRegionInstanceGroupManagerRequest, @@ -350,12 +419,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_delete_instances( self, request: compute.DeleteInstancesRegionInstanceGroupManagerRequest, @@ -374,12 +466,35 @@ def pre_delete_instances( def post_delete_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_delete_instances` interceptor runs + before the `post_delete_instances_with_metadata` interceptor. """ return response + def post_delete_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_delete_instances_with_metadata` + interceptor in new development instead of the `post_delete_instances` interceptor. + When both interceptors are used, this `post_delete_instances_with_metadata` interceptor runs after the + `post_delete_instances` interceptor. The (possibly modified) response returned by + `post_delete_instances` will be passed to + `post_delete_instances_with_metadata`. + """ + return response, metadata + def pre_delete_per_instance_configs( self, request: compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, @@ -400,12 +515,35 @@ def post_delete_per_instance_configs( ) -> compute.Operation: """Post-rpc interceptor for delete_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_delete_per_instance_configs` interceptor runs + before the `post_delete_per_instance_configs_with_metadata` interceptor. """ return response + def post_delete_per_instance_configs_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_delete_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_delete_per_instance_configs` interceptor. + When both interceptors are used, this `post_delete_per_instance_configs_with_metadata` interceptor runs after the + `post_delete_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_delete_per_instance_configs` will be passed to + `post_delete_per_instance_configs_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionInstanceGroupManagerRequest, @@ -426,12 +564,35 @@ def post_get( ) -> compute.InstanceGroupManager: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceGroupManager, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceGroupManager, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionInstanceGroupManagerRequest, @@ -450,12 +611,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionInstanceGroupManagersRequest, @@ -476,12 +660,37 @@ def post_list( ) -> compute.RegionInstanceGroupManagerList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RegionInstanceGroupManagerList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupManagerList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_errors( self, request: compute.ListErrorsRegionInstanceGroupManagersRequest, @@ -502,12 +711,38 @@ def post_list_errors( ) -> compute.RegionInstanceGroupManagersListErrorsResponse: """Post-rpc interceptor for list_errors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_errors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list_errors` interceptor runs + before the `post_list_errors_with_metadata` interceptor. """ return response + def post_list_errors_with_metadata( + self, + response: compute.RegionInstanceGroupManagersListErrorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupManagersListErrorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_errors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_errors_with_metadata` + interceptor in new development instead of the `post_list_errors` interceptor. + When both interceptors are used, this `post_list_errors_with_metadata` interceptor runs after the + `post_list_errors` interceptor. The (possibly modified) response returned by + `post_list_errors` will be passed to + `post_list_errors_with_metadata`. + """ + return response, metadata + def pre_list_managed_instances( self, request: compute.ListManagedInstancesRegionInstanceGroupManagersRequest, @@ -528,12 +763,38 @@ def post_list_managed_instances( ) -> compute.RegionInstanceGroupManagersListInstancesResponse: """Post-rpc interceptor for list_managed_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_managed_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list_managed_instances` interceptor runs + before the `post_list_managed_instances_with_metadata` interceptor. """ return response + def post_list_managed_instances_with_metadata( + self, + response: compute.RegionInstanceGroupManagersListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupManagersListInstancesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_managed_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_managed_instances_with_metadata` + interceptor in new development instead of the `post_list_managed_instances` interceptor. + When both interceptors are used, this `post_list_managed_instances_with_metadata` interceptor runs after the + `post_list_managed_instances` interceptor. The (possibly modified) response returned by + `post_list_managed_instances` will be passed to + `post_list_managed_instances_with_metadata`. + """ + return response, metadata + def pre_list_per_instance_configs( self, request: compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, @@ -554,12 +815,38 @@ def post_list_per_instance_configs( ) -> compute.RegionInstanceGroupManagersListInstanceConfigsResp: """Post-rpc interceptor for list_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_list_per_instance_configs` interceptor runs + before the `post_list_per_instance_configs_with_metadata` interceptor. """ return response + def post_list_per_instance_configs_with_metadata( + self, + response: compute.RegionInstanceGroupManagersListInstanceConfigsResp, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupManagersListInstanceConfigsResp, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_list_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_list_per_instance_configs` interceptor. + When both interceptors are used, this `post_list_per_instance_configs_with_metadata` interceptor runs after the + `post_list_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_list_per_instance_configs` will be passed to + `post_list_per_instance_configs_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionInstanceGroupManagerRequest, @@ -578,12 +865,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_per_instance_configs( self, request: compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, @@ -604,12 +914,35 @@ def post_patch_per_instance_configs( ) -> compute.Operation: """Post-rpc interceptor for patch_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_patch_per_instance_configs` interceptor runs + before the `post_patch_per_instance_configs_with_metadata` interceptor. """ return response + def post_patch_per_instance_configs_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_patch_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_patch_per_instance_configs` interceptor. + When both interceptors are used, this `post_patch_per_instance_configs_with_metadata` interceptor runs after the + `post_patch_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_patch_per_instance_configs` will be passed to + `post_patch_per_instance_configs_with_metadata`. + """ + return response, metadata + def pre_recreate_instances( self, request: compute.RecreateInstancesRegionInstanceGroupManagerRequest, @@ -628,12 +961,35 @@ def pre_recreate_instances( def post_recreate_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for recreate_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recreate_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_recreate_instances` interceptor runs + before the `post_recreate_instances_with_metadata` interceptor. """ return response + def post_recreate_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for recreate_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_recreate_instances_with_metadata` + interceptor in new development instead of the `post_recreate_instances` interceptor. + When both interceptors are used, this `post_recreate_instances_with_metadata` interceptor runs after the + `post_recreate_instances` interceptor. The (possibly modified) response returned by + `post_recreate_instances` will be passed to + `post_recreate_instances_with_metadata`. + """ + return response, metadata + def pre_resize( self, request: compute.ResizeRegionInstanceGroupManagerRequest, @@ -652,12 +1008,35 @@ def pre_resize( def post_resize(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resize - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_resize` interceptor runs + before the `post_resize_with_metadata` interceptor. """ return response + def post_resize_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_resize_with_metadata` + interceptor in new development instead of the `post_resize` interceptor. + When both interceptors are used, this `post_resize_with_metadata` interceptor runs after the + `post_resize` interceptor. The (possibly modified) response returned by + `post_resize` will be passed to + `post_resize_with_metadata`. + """ + return response, metadata + def pre_resume_instances( self, request: compute.ResumeInstancesRegionInstanceGroupManagerRequest, @@ -676,12 +1055,35 @@ def pre_resume_instances( def post_resume_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resume_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resume_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_resume_instances` interceptor runs + before the `post_resume_instances_with_metadata` interceptor. """ return response + def post_resume_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resume_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_resume_instances_with_metadata` + interceptor in new development instead of the `post_resume_instances` interceptor. + When both interceptors are used, this `post_resume_instances_with_metadata` interceptor runs after the + `post_resume_instances` interceptor. The (possibly modified) response returned by + `post_resume_instances` will be passed to + `post_resume_instances_with_metadata`. + """ + return response, metadata + def pre_set_instance_template( self, request: compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, @@ -702,12 +1104,35 @@ def post_set_instance_template( ) -> compute.Operation: """Post-rpc interceptor for set_instance_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_instance_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_set_instance_template` interceptor runs + before the `post_set_instance_template_with_metadata` interceptor. """ return response + def post_set_instance_template_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_instance_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_set_instance_template_with_metadata` + interceptor in new development instead of the `post_set_instance_template` interceptor. + When both interceptors are used, this `post_set_instance_template_with_metadata` interceptor runs after the + `post_set_instance_template` interceptor. The (possibly modified) response returned by + `post_set_instance_template` will be passed to + `post_set_instance_template_with_metadata`. + """ + return response, metadata + def pre_set_target_pools( self, request: compute.SetTargetPoolsRegionInstanceGroupManagerRequest, @@ -726,12 +1151,35 @@ def pre_set_target_pools( def post_set_target_pools(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_target_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_target_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_set_target_pools` interceptor runs + before the `post_set_target_pools_with_metadata` interceptor. """ return response + def post_set_target_pools_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_target_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_set_target_pools_with_metadata` + interceptor in new development instead of the `post_set_target_pools` interceptor. + When both interceptors are used, this `post_set_target_pools_with_metadata` interceptor runs after the + `post_set_target_pools` interceptor. The (possibly modified) response returned by + `post_set_target_pools` will be passed to + `post_set_target_pools_with_metadata`. + """ + return response, metadata + def pre_start_instances( self, request: compute.StartInstancesRegionInstanceGroupManagerRequest, @@ -750,12 +1198,35 @@ def pre_start_instances( def post_start_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for start_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_start_instances` interceptor runs + before the `post_start_instances_with_metadata` interceptor. """ return response + def post_start_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_start_instances_with_metadata` + interceptor in new development instead of the `post_start_instances` interceptor. + When both interceptors are used, this `post_start_instances_with_metadata` interceptor runs after the + `post_start_instances` interceptor. The (possibly modified) response returned by + `post_start_instances` will be passed to + `post_start_instances_with_metadata`. + """ + return response, metadata + def pre_stop_instances( self, request: compute.StopInstancesRegionInstanceGroupManagerRequest, @@ -774,12 +1245,35 @@ def pre_stop_instances( def post_stop_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for stop_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_stop_instances` interceptor runs + before the `post_stop_instances_with_metadata` interceptor. """ return response + def post_stop_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_stop_instances_with_metadata` + interceptor in new development instead of the `post_stop_instances` interceptor. + When both interceptors are used, this `post_stop_instances_with_metadata` interceptor runs after the + `post_stop_instances` interceptor. The (possibly modified) response returned by + `post_stop_instances` will be passed to + `post_stop_instances_with_metadata`. + """ + return response, metadata + def pre_suspend_instances( self, request: compute.SuspendInstancesRegionInstanceGroupManagerRequest, @@ -798,12 +1292,35 @@ def pre_suspend_instances( def post_suspend_instances(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for suspend_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suspend_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_suspend_instances` interceptor runs + before the `post_suspend_instances_with_metadata` interceptor. """ return response + def post_suspend_instances_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for suspend_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_suspend_instances_with_metadata` + interceptor in new development instead of the `post_suspend_instances` interceptor. + When both interceptors are used, this `post_suspend_instances_with_metadata` interceptor runs after the + `post_suspend_instances` interceptor. The (possibly modified) response returned by + `post_suspend_instances` will be passed to + `post_suspend_instances_with_metadata`. + """ + return response, metadata + def pre_update_per_instance_configs( self, request: compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, @@ -824,12 +1341,35 @@ def post_update_per_instance_configs( ) -> compute.Operation: """Post-rpc interceptor for update_per_instance_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_per_instance_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroupManagers server but before - it is returned to user code. + it is returned to user code. This `post_update_per_instance_configs` interceptor runs + before the `post_update_per_instance_configs_with_metadata` interceptor. """ return response + def post_update_per_instance_configs_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_per_instance_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagers server but before it is returned to user code. + + We recommend only using this `post_update_per_instance_configs_with_metadata` + interceptor in new development instead of the `post_update_per_instance_configs` interceptor. + When both interceptors are used, this `post_update_per_instance_configs_with_metadata` interceptor runs after the + `post_update_per_instance_configs` interceptor. The (possibly modified) response returned by + `post_update_per_instance_configs` will be passed to + `post_update_per_instance_configs_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionInstanceGroupManagersRestStub: @@ -1068,6 +1608,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_abandon_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_abandon_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1239,6 +1783,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_apply_updates_to_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_apply_updates_to_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1409,6 +1957,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1569,6 +2121,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1739,6 +2295,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1910,6 +2470,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2062,6 +2626,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2228,6 +2794,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2373,6 +2943,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2520,6 +3092,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_errors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_errors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2669,6 +3245,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_managed_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_managed_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2820,6 +3400,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2988,6 +3572,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3159,6 +3747,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3327,6 +3919,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_recreate_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recreate_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3487,6 +4083,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3657,6 +4257,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resume_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resume_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3825,6 +4429,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_instance_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_instance_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3995,6 +4603,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_target_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_target_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4163,6 +4775,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4331,6 +4947,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4499,6 +5119,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suspend_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suspend_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4670,6 +5294,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_per_instance_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_per_instance_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py index 36fa9df7e915..d76627fb1da6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py index f062136e4df4..af7e0a3aea5b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py @@ -123,12 +123,35 @@ def pre_get( def post_get(self, response: compute.InstanceGroup) -> compute.InstanceGroup: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionInstanceGroupsRequest, @@ -148,12 +171,37 @@ def post_list( ) -> compute.RegionInstanceGroupList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RegionInstanceGroupList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: compute.ListInstancesRegionInstanceGroupsRequest, @@ -174,12 +222,38 @@ def post_list_instances( ) -> compute.RegionInstanceGroupsListInstances: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: compute.RegionInstanceGroupsListInstances, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupsListInstances, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroups server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_set_named_ports( self, request: compute.SetNamedPortsRegionInstanceGroupRequest, @@ -198,12 +272,35 @@ def pre_set_named_ports( def post_set_named_ports(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_named_ports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_named_ports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceGroups server but before - it is returned to user code. + it is returned to user code. This `post_set_named_ports` interceptor runs + before the `post_set_named_ports_with_metadata` interceptor. """ return response + def post_set_named_ports_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_named_ports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroups server but before it is returned to user code. + + We recommend only using this `post_set_named_ports_with_metadata` + interceptor in new development instead of the `post_set_named_ports` interceptor. + When both interceptors are used, this `post_set_named_ports_with_metadata` interceptor runs after the + `post_set_named_ports` interceptor. The (possibly modified) response returned by + `post_set_named_ports` will be passed to + `post_set_named_ports_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionInstanceGroupsRestStub: @@ -432,6 +529,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -578,6 +677,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -727,6 +828,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -895,6 +1000,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_named_ports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_named_ports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py index 1e08b1bda2d8..de228abe0e6e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py index f7ddf7d8fe5a..15348ce90cbb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/transports/rest.py @@ -124,12 +124,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionInstanceTemplateRequest, @@ -148,12 +171,35 @@ def pre_get( def post_get(self, response: compute.InstanceTemplate) -> compute.InstanceTemplate: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstanceTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionInstanceTemplateRequest, @@ -172,12 +218,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionInstanceTemplatesRequest, @@ -198,12 +267,35 @@ def post_list( ) -> compute.InstanceTemplateList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstanceTemplates server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstanceTemplateList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstanceTemplateList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceTemplates server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionInstanceTemplatesRestStub: @@ -432,6 +524,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -588,6 +684,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -754,6 +852,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -897,6 +999,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py index f039c6e48b98..c737552eae16 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -460,6 +462,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py index 5500d15a738b..88fa7f6d328a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/transports/rest.py @@ -99,12 +99,35 @@ def pre_bulk_insert( def post_bulk_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for bulk_insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstances server but before - it is returned to user code. + it is returned to user code. This `post_bulk_insert` interceptor runs + before the `post_bulk_insert_with_metadata` interceptor. """ return response + def post_bulk_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstances server but before it is returned to user code. + + We recommend only using this `post_bulk_insert_with_metadata` + interceptor in new development instead of the `post_bulk_insert` interceptor. + When both interceptors are used, this `post_bulk_insert_with_metadata` interceptor runs after the + `post_bulk_insert` interceptor. The (possibly modified) response returned by + `post_bulk_insert` will be passed to + `post_bulk_insert_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionInstancesRestStub: @@ -338,6 +361,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py index 0fa5694874c2..a9a0fbde5d2e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/transports/rest.py index ce8ab0084d53..701538b9b660 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/transports/rest.py @@ -156,12 +156,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionInstantSnapshotRequest, @@ -179,12 +202,35 @@ def pre_get( def post_get(self, response: compute.InstantSnapshot) -> compute.InstantSnapshot: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.InstantSnapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstantSnapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyRegionInstantSnapshotRequest, @@ -203,12 +249,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionInstantSnapshotRequest, @@ -227,12 +296,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionInstantSnapshotsRequest, @@ -253,12 +345,35 @@ def post_list( ) -> compute.InstantSnapshotList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.InstantSnapshotList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstantSnapshotList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyRegionInstantSnapshotRequest, @@ -277,12 +392,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsRegionInstantSnapshotRequest, @@ -301,12 +439,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsRegionInstantSnapshotRequest, @@ -327,12 +488,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionInstantSnapshots server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshots server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionInstantSnapshotsRestStub: @@ -561,6 +747,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -707,6 +897,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -873,6 +1065,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1039,6 +1235,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1184,6 +1384,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1356,6 +1558,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1522,6 +1728,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1675,6 +1885,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index dbce8049e4a0..4512e724913e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py index caf1f7fbe745..ab572891a334 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py @@ -150,12 +150,35 @@ def post_attach_network_endpoints( ) -> compute.Operation: """Post-rpc interceptor for attach_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_attach_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_attach_network_endpoints` interceptor runs + before the `post_attach_network_endpoints_with_metadata` interceptor. """ return response + def post_attach_network_endpoints_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for attach_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_attach_network_endpoints_with_metadata` + interceptor in new development instead of the `post_attach_network_endpoints` interceptor. + When both interceptors are used, this `post_attach_network_endpoints_with_metadata` interceptor runs after the + `post_attach_network_endpoints` interceptor. The (possibly modified) response returned by + `post_attach_network_endpoints` will be passed to + `post_attach_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteRegionNetworkEndpointGroupRequest, @@ -174,12 +197,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_detach_network_endpoints( self, request: compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, @@ -200,12 +246,35 @@ def post_detach_network_endpoints( ) -> compute.Operation: """Post-rpc interceptor for detach_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_detach_network_endpoints` interceptor runs + before the `post_detach_network_endpoints_with_metadata` interceptor. """ return response + def post_detach_network_endpoints_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detach_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_detach_network_endpoints_with_metadata` + interceptor in new development instead of the `post_detach_network_endpoints` interceptor. + When both interceptors are used, this `post_detach_network_endpoints_with_metadata` interceptor runs after the + `post_detach_network_endpoints` interceptor. The (possibly modified) response returned by + `post_detach_network_endpoints` will be passed to + `post_detach_network_endpoints_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionNetworkEndpointGroupRequest, @@ -226,12 +295,35 @@ def post_get( ) -> compute.NetworkEndpointGroup: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NetworkEndpointGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NetworkEndpointGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionNetworkEndpointGroupRequest, @@ -250,12 +342,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionNetworkEndpointGroupsRequest, @@ -276,12 +391,37 @@ def post_list( ) -> compute.NetworkEndpointGroupList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NetworkEndpointGroupList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_network_endpoints( self, request: compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, @@ -302,12 +442,38 @@ def post_list_network_endpoints( ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: """Post-rpc interceptor for list_network_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_network_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkEndpointGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_network_endpoints` interceptor runs + before the `post_list_network_endpoints_with_metadata` interceptor. """ return response + def post_list_network_endpoints_with_metadata( + self, + response: compute.NetworkEndpointGroupsListNetworkEndpoints, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NetworkEndpointGroupsListNetworkEndpoints, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_network_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkEndpointGroups server but before it is returned to user code. + + We recommend only using this `post_list_network_endpoints_with_metadata` + interceptor in new development instead of the `post_list_network_endpoints` interceptor. + When both interceptors are used, this `post_list_network_endpoints_with_metadata` interceptor runs after the + `post_list_network_endpoints` interceptor. The (possibly modified) response returned by + `post_list_network_endpoints` will be passed to + `post_list_network_endpoints_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionNetworkEndpointGroupsRestStub: @@ -548,6 +714,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_attach_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_attach_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -708,6 +878,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -878,6 +1052,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detach_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1029,6 +1207,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1195,6 +1375,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1338,6 +1522,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +1671,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_network_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_network_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py index 0fe4acd9ec43..e3c1bc3483f6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py index fd6aa01c219f..b7ee14bf433b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/transports/rest.py @@ -228,12 +228,35 @@ def pre_add_association( def post_add_association(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_association` interceptor runs + before the `post_add_association_with_metadata` interceptor. """ return response + def post_add_association_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_add_association_with_metadata` + interceptor in new development instead of the `post_add_association` interceptor. + When both interceptors are used, this `post_add_association_with_metadata` interceptor runs after the + `post_add_association` interceptor. The (possibly modified) response returned by + `post_add_association` will be passed to + `post_add_association_with_metadata`. + """ + return response, metadata + def pre_add_rule( self, request: compute.AddRuleRegionNetworkFirewallPolicyRequest, @@ -252,12 +275,35 @@ def pre_add_rule( def post_add_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_rule` interceptor runs + before the `post_add_rule_with_metadata` interceptor. """ return response + def post_add_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_add_rule_with_metadata` + interceptor in new development instead of the `post_add_rule` interceptor. + When both interceptors are used, this `post_add_rule_with_metadata` interceptor runs after the + `post_add_rule` interceptor. The (possibly modified) response returned by + `post_add_rule` will be passed to + `post_add_rule_with_metadata`. + """ + return response, metadata + def pre_clone_rules( self, request: compute.CloneRulesRegionNetworkFirewallPolicyRequest, @@ -276,12 +322,35 @@ def pre_clone_rules( def post_clone_rules(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for clone_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_clone_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_clone_rules` interceptor runs + before the `post_clone_rules_with_metadata` interceptor. """ return response + def post_clone_rules_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for clone_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_clone_rules_with_metadata` + interceptor in new development instead of the `post_clone_rules` interceptor. + When both interceptors are used, this `post_clone_rules_with_metadata` interceptor runs after the + `post_clone_rules` interceptor. The (possibly modified) response returned by + `post_clone_rules` will be passed to + `post_clone_rules_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteRegionNetworkFirewallPolicyRequest, @@ -300,12 +369,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionNetworkFirewallPolicyRequest, @@ -324,12 +416,35 @@ def pre_get( def post_get(self, response: compute.FirewallPolicy) -> compute.FirewallPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.FirewallPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_association( self, request: compute.GetAssociationRegionNetworkFirewallPolicyRequest, @@ -350,12 +465,37 @@ def post_get_association( ) -> compute.FirewallPolicyAssociation: """Post-rpc interceptor for get_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_association` interceptor runs + before the `post_get_association_with_metadata` interceptor. """ return response + def post_get_association_with_metadata( + self, + response: compute.FirewallPolicyAssociation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.FirewallPolicyAssociation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_association_with_metadata` + interceptor in new development instead of the `post_get_association` interceptor. + When both interceptors are used, this `post_get_association_with_metadata` interceptor runs after the + `post_get_association` interceptor. The (possibly modified) response returned by + `post_get_association` will be passed to + `post_get_association_with_metadata`. + """ + return response, metadata + def pre_get_effective_firewalls( self, request: compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, @@ -377,12 +517,38 @@ def post_get_effective_firewalls( ) -> compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse: """Post-rpc interceptor for get_effective_firewalls - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_effective_firewalls_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_effective_firewalls` interceptor runs + before the `post_get_effective_firewalls_with_metadata` interceptor. """ return response + def post_get_effective_firewalls_with_metadata( + self, + response: compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_effective_firewalls + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_effective_firewalls_with_metadata` + interceptor in new development instead of the `post_get_effective_firewalls` interceptor. + When both interceptors are used, this `post_get_effective_firewalls_with_metadata` interceptor runs after the + `post_get_effective_firewalls` interceptor. The (possibly modified) response returned by + `post_get_effective_firewalls` will be passed to + `post_get_effective_firewalls_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, @@ -401,12 +567,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_rule( self, request: compute.GetRuleRegionNetworkFirewallPolicyRequest, @@ -427,12 +616,35 @@ def post_get_rule( ) -> compute.FirewallPolicyRule: """Post-rpc interceptor for get_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_rule` interceptor runs + before the `post_get_rule_with_metadata` interceptor. """ return response + def post_get_rule_with_metadata( + self, + response: compute.FirewallPolicyRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicyRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_get_rule_with_metadata` + interceptor in new development instead of the `post_get_rule` interceptor. + When both interceptors are used, this `post_get_rule_with_metadata` interceptor runs after the + `post_get_rule` interceptor. The (possibly modified) response returned by + `post_get_rule` will be passed to + `post_get_rule_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionNetworkFirewallPolicyRequest, @@ -451,12 +663,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionNetworkFirewallPoliciesRequest, @@ -477,12 +712,35 @@ def post_list( ) -> compute.FirewallPolicyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.FirewallPolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.FirewallPolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionNetworkFirewallPolicyRequest, @@ -501,12 +759,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_rule( self, request: compute.PatchRuleRegionNetworkFirewallPolicyRequest, @@ -525,12 +806,35 @@ def pre_patch_rule( def post_patch_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch_rule` interceptor runs + before the `post_patch_rule_with_metadata` interceptor. """ return response + def post_patch_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_rule_with_metadata` + interceptor in new development instead of the `post_patch_rule` interceptor. + When both interceptors are used, this `post_patch_rule_with_metadata` interceptor runs after the + `post_patch_rule` interceptor. The (possibly modified) response returned by + `post_patch_rule` will be passed to + `post_patch_rule_with_metadata`. + """ + return response, metadata + def pre_remove_association( self, request: compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, @@ -549,12 +853,35 @@ def pre_remove_association( def post_remove_association(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_association - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_association_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_association` interceptor runs + before the `post_remove_association_with_metadata` interceptor. """ return response + def post_remove_association_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_association + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_association_with_metadata` + interceptor in new development instead of the `post_remove_association` interceptor. + When both interceptors are used, this `post_remove_association_with_metadata` interceptor runs after the + `post_remove_association` interceptor. The (possibly modified) response returned by + `post_remove_association` will be passed to + `post_remove_association_with_metadata`. + """ + return response, metadata + def pre_remove_rule( self, request: compute.RemoveRuleRegionNetworkFirewallPolicyRequest, @@ -573,12 +900,35 @@ def pre_remove_rule( def post_remove_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_rule` interceptor runs + before the `post_remove_rule_with_metadata` interceptor. """ return response + def post_remove_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_rule_with_metadata` + interceptor in new development instead of the `post_remove_rule` interceptor. + When both interceptors are used, this `post_remove_rule_with_metadata` interceptor runs after the + `post_remove_rule` interceptor. The (possibly modified) response returned by + `post_remove_rule` will be passed to + `post_remove_rule_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, @@ -597,12 +947,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, @@ -623,12 +996,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNetworkFirewallPolicies server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNetworkFirewallPolicies server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionNetworkFirewallPoliciesRestStub: @@ -867,6 +1265,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1035,6 +1437,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1197,6 +1603,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_clone_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_clone_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1357,6 +1767,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1502,6 +1916,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1645,6 +2061,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1798,6 +2218,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_effective_firewalls(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_effective_firewalls_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1968,6 +2392,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2117,6 +2545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2283,6 +2715,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2426,6 +2862,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2592,6 +3030,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2760,6 +3202,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2922,6 +3368,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_association(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_association_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3084,6 +3534,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3258,6 +3712,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3409,6 +3867,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py index ec840c2311b0..22ed4a0a3528 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py index edbf0ba208d4..264486a4979a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py @@ -124,12 +124,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNotificationEndpoints server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNotificationEndpoints server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionNotificationEndpointRequest, @@ -150,12 +173,35 @@ def post_get( ) -> compute.NotificationEndpoint: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNotificationEndpoints server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.NotificationEndpoint, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NotificationEndpoint, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNotificationEndpoints server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionNotificationEndpointRequest, @@ -174,12 +220,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNotificationEndpoints server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNotificationEndpoints server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionNotificationEndpointsRequest, @@ -200,12 +269,37 @@ def post_list( ) -> compute.NotificationEndpointList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionNotificationEndpoints server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.NotificationEndpointList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.NotificationEndpointList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionNotificationEndpoints server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionNotificationEndpointsRestStub: @@ -436,6 +530,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -586,6 +684,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -752,6 +852,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -895,6 +999,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py index 4acc182d914d..bf7f5b56a90a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -458,6 +460,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py index 48d16f6f571a..0009d9b06ce8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/transports/rest.py @@ -125,12 +125,37 @@ def post_delete( ) -> compute.DeleteRegionOperationResponse: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionOperations server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.DeleteRegionOperationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteRegionOperationResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionOperations server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionOperationRequest, @@ -148,12 +173,35 @@ def pre_get( def post_get(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionOperations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionOperations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionOperationsRequest, @@ -171,12 +219,35 @@ def pre_list( def post_list(self, response: compute.OperationList) -> compute.OperationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionOperations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.OperationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.OperationList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionOperations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_wait( self, request: compute.WaitRegionOperationRequest, @@ -194,12 +265,35 @@ def pre_wait( def post_wait(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for wait - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_wait_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionOperations server but before - it is returned to user code. + it is returned to user code. This `post_wait` interceptor runs + before the `post_wait_with_metadata` interceptor. """ return response + def post_wait_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for wait + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionOperations server but before it is returned to user code. + + We recommend only using this `post_wait_with_metadata` + interceptor in new development instead of the `post_wait` interceptor. + When both interceptors are used, this `post_wait_with_metadata` interceptor runs after the + `post_wait` interceptor. The (possibly modified) response returned by + `post_wait` will be passed to + `post_wait_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionOperationsRestStub: @@ -417,6 +511,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -580,6 +678,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -726,6 +826,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -887,6 +989,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_wait(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_wait_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py index 5397a4f2c083..aa60a2548827 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py index 944ecf5f46ac..78d4924dec25 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/transports/rest.py @@ -172,12 +172,35 @@ def pre_add_rule( def post_add_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_rule` interceptor runs + before the `post_add_rule_with_metadata` interceptor. """ return response + def post_add_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_add_rule_with_metadata` + interceptor in new development instead of the `post_add_rule` interceptor. + When both interceptors are used, this `post_add_rule_with_metadata` interceptor runs after the + `post_add_rule` interceptor. The (possibly modified) response returned by + `post_add_rule` will be passed to + `post_add_rule_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteRegionSecurityPolicyRequest, @@ -196,12 +219,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionSecurityPolicyRequest, @@ -219,12 +265,35 @@ def pre_get( def post_get(self, response: compute.SecurityPolicy) -> compute.SecurityPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SecurityPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SecurityPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_rule( self, request: compute.GetRuleRegionSecurityPolicyRequest, @@ -245,12 +314,35 @@ def post_get_rule( ) -> compute.SecurityPolicyRule: """Post-rpc interceptor for get_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_rule` interceptor runs + before the `post_get_rule_with_metadata` interceptor. """ return response + def post_get_rule_with_metadata( + self, + response: compute.SecurityPolicyRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SecurityPolicyRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_get_rule_with_metadata` + interceptor in new development instead of the `post_get_rule` interceptor. + When both interceptors are used, this `post_get_rule_with_metadata` interceptor runs after the + `post_get_rule` interceptor. The (possibly modified) response returned by + `post_get_rule` will be passed to + `post_get_rule_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionSecurityPolicyRequest, @@ -269,12 +361,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionSecurityPoliciesRequest, @@ -295,12 +410,35 @@ def post_list( ) -> compute.SecurityPolicyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SecurityPolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SecurityPolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionSecurityPolicyRequest, @@ -319,12 +457,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_rule( self, request: compute.PatchRuleRegionSecurityPolicyRequest, @@ -343,12 +504,35 @@ def pre_patch_rule( def post_patch_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch_rule` interceptor runs + before the `post_patch_rule_with_metadata` interceptor. """ return response + def post_patch_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_rule_with_metadata` + interceptor in new development instead of the `post_patch_rule` interceptor. + When both interceptors are used, this `post_patch_rule_with_metadata` interceptor runs after the + `post_patch_rule` interceptor. The (possibly modified) response returned by + `post_patch_rule` will be passed to + `post_patch_rule_with_metadata`. + """ + return response, metadata + def pre_remove_rule( self, request: compute.RemoveRuleRegionSecurityPolicyRequest, @@ -367,12 +551,35 @@ def pre_remove_rule( def post_remove_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_rule` interceptor runs + before the `post_remove_rule_with_metadata` interceptor. """ return response + def post_remove_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_rule_with_metadata` + interceptor in new development instead of the `post_remove_rule` interceptor. + When both interceptors are used, this `post_remove_rule_with_metadata` interceptor runs after the + `post_remove_rule` interceptor. The (possibly modified) response returned by + `post_remove_rule` will be passed to + `post_remove_rule_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsRegionSecurityPolicyRequest, @@ -391,12 +598,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionSecurityPoliciesRestStub: @@ -631,6 +861,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -791,6 +1025,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -940,6 +1178,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1087,6 +1327,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1253,6 +1497,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1396,6 +1644,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1562,6 +1812,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1728,6 +1982,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1888,6 +2146,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2054,6 +2316,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py index 95b6fa9697ed..125f67334ff6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py index 0a623f2a845c..47da6f36956a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py @@ -124,12 +124,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslCertificates server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionSslCertificateRequest, @@ -147,12 +170,35 @@ def pre_get( def post_get(self, response: compute.SslCertificate) -> compute.SslCertificate: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SslCertificate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslCertificate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslCertificates server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionSslCertificateRequest, @@ -171,12 +217,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslCertificates server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionSslCertificatesRequest, @@ -197,12 +266,35 @@ def post_list( ) -> compute.SslCertificateList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SslCertificateList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslCertificateList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslCertificates server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionSslCertificatesRestStub: @@ -431,6 +523,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -595,6 +691,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -761,6 +859,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -905,6 +1007,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py index aa70dd7b6b0c..fb14fddb31de 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py index 7e0f71dd5fd4..23c658e3a650 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/transports/rest.py @@ -139,12 +139,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionSslPolicyRequest, @@ -162,12 +185,35 @@ def pre_get( def post_get(self, response: compute.SslPolicy) -> compute.SslPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SslPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionSslPolicyRequest, @@ -185,12 +231,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionSslPoliciesRequest, @@ -208,12 +277,35 @@ def pre_list( def post_list(self, response: compute.SslPoliciesList) -> compute.SslPoliciesList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SslPoliciesList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslPoliciesList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_available_features( self, request: compute.ListAvailableFeaturesRegionSslPoliciesRequest, @@ -234,12 +326,38 @@ def post_list_available_features( ) -> compute.SslPoliciesListAvailableFeaturesResponse: """Post-rpc interceptor for list_available_features - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_available_features_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list_available_features` interceptor runs + before the `post_list_available_features_with_metadata` interceptor. """ return response + def post_list_available_features_with_metadata( + self, + response: compute.SslPoliciesListAvailableFeaturesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SslPoliciesListAvailableFeaturesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_available_features + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslPolicies server but before it is returned to user code. + + We recommend only using this `post_list_available_features_with_metadata` + interceptor in new development instead of the `post_list_available_features` interceptor. + When both interceptors are used, this `post_list_available_features_with_metadata` interceptor runs after the + `post_list_available_features` interceptor. The (possibly modified) response returned by + `post_list_available_features` will be passed to + `post_list_available_features_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionSslPolicyRequest, @@ -257,12 +375,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionSslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSslPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionSslPoliciesRestStub: @@ -494,6 +635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -645,6 +790,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -816,6 +963,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -962,6 +1113,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1109,6 +1262,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_available_features(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_available_features_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1284,6 +1441,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py index fa54a59e5bb3..7f286724acac 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py index c68fd20b9ff7..5e85179e2b60 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py @@ -132,12 +132,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionTargetHttpProxyRequest, @@ -155,12 +178,35 @@ def pre_get( def post_get(self, response: compute.TargetHttpProxy) -> compute.TargetHttpProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetHttpProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionTargetHttpProxyRequest, @@ -179,12 +225,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionTargetHttpProxiesRequest, @@ -205,12 +274,35 @@ def post_list( ) -> compute.TargetHttpProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetHttpProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_url_map( self, request: compute.SetUrlMapRegionTargetHttpProxyRequest, @@ -229,12 +321,35 @@ def pre_set_url_map( def post_set_url_map(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_url_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_url_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_url_map` interceptor runs + before the `post_set_url_map_with_metadata` interceptor. """ return response + def post_set_url_map_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_url_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_set_url_map_with_metadata` + interceptor in new development instead of the `post_set_url_map` interceptor. + When both interceptors are used, this `post_set_url_map_with_metadata` interceptor runs after the + `post_set_url_map` interceptor. The (possibly modified) response returned by + `post_set_url_map` will be passed to + `post_set_url_map_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionTargetHttpProxiesRestStub: @@ -463,6 +578,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -622,6 +741,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -788,6 +909,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -931,6 +1056,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1097,6 +1224,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_url_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_url_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py index 73a0907ef29b..ea552f075818 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py index 8c0ca8797d45..e1aa79ffe801 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py @@ -148,12 +148,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionTargetHttpsProxyRequest, @@ -172,12 +195,35 @@ def pre_get( def post_get(self, response: compute.TargetHttpsProxy) -> compute.TargetHttpsProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetHttpsProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpsProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionTargetHttpsProxyRequest, @@ -196,12 +242,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionTargetHttpsProxiesRequest, @@ -222,12 +291,35 @@ def post_list( ) -> compute.TargetHttpsProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetHttpsProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpsProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionTargetHttpsProxyRequest, @@ -246,12 +338,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_ssl_certificates( self, request: compute.SetSslCertificatesRegionTargetHttpsProxyRequest, @@ -272,12 +387,35 @@ def post_set_ssl_certificates( ) -> compute.Operation: """Post-rpc interceptor for set_ssl_certificates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_ssl_certificates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_ssl_certificates` interceptor runs + before the `post_set_ssl_certificates_with_metadata` interceptor. """ return response + def post_set_ssl_certificates_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_ssl_certificates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_ssl_certificates_with_metadata` + interceptor in new development instead of the `post_set_ssl_certificates` interceptor. + When both interceptors are used, this `post_set_ssl_certificates_with_metadata` interceptor runs after the + `post_set_ssl_certificates` interceptor. The (possibly modified) response returned by + `post_set_ssl_certificates` will be passed to + `post_set_ssl_certificates_with_metadata`. + """ + return response, metadata + def pre_set_url_map( self, request: compute.SetUrlMapRegionTargetHttpsProxyRequest, @@ -296,12 +434,35 @@ def pre_set_url_map( def post_set_url_map(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_url_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_url_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_url_map` interceptor runs + before the `post_set_url_map_with_metadata` interceptor. """ return response + def post_set_url_map_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_url_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_url_map_with_metadata` + interceptor in new development instead of the `post_set_url_map` interceptor. + When both interceptors are used, this `post_set_url_map_with_metadata` interceptor runs after the + `post_set_url_map` interceptor. The (possibly modified) response returned by + `post_set_url_map` will be passed to + `post_set_url_map_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionTargetHttpsProxiesRestStub: @@ -530,6 +691,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -689,6 +854,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -855,6 +1022,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1000,6 +1171,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1166,6 +1339,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1336,6 +1513,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_ssl_certificates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_ssl_certificates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1502,6 +1683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_url_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_url_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py index 50ecedd90e0f..d37492e41567 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py index 37db37a45b23..c48907632d2c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/transports/rest.py @@ -124,12 +124,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionTargetTcpProxyRequest, @@ -147,12 +170,35 @@ def pre_get( def post_get(self, response: compute.TargetTcpProxy) -> compute.TargetTcpProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetTcpProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetTcpProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionTargetTcpProxyRequest, @@ -171,12 +217,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionTargetTcpProxiesRequest, @@ -197,12 +266,35 @@ def post_list( ) -> compute.TargetTcpProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionTargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetTcpProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetTcpProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionTargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionTargetTcpProxiesRestStub: @@ -431,6 +523,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -582,6 +678,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -748,6 +846,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -893,6 +995,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py index 4fd963616d99..75621483447f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py index e6e09e728059..d42df37e2e76 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/transports/rest.py @@ -147,12 +147,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRegionUrlMapRequest, @@ -168,12 +191,35 @@ def pre_get( def post_get(self, response: compute.UrlMap) -> compute.UrlMap: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.UrlMap, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.UrlMap, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionUrlMapRequest, @@ -191,12 +237,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionUrlMapsRequest, @@ -214,12 +283,35 @@ def pre_list( def post_list(self, response: compute.UrlMapList) -> compute.UrlMapList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.UrlMapList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.UrlMapList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRegionUrlMapRequest, @@ -237,12 +329,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRegionUrlMapRequest, @@ -260,12 +375,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + def pre_validate( self, request: compute.ValidateRegionUrlMapRequest, @@ -285,12 +423,37 @@ def post_validate( ) -> compute.UrlMapsValidateResponse: """Post-rpc interceptor for validate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionUrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_validate` interceptor runs + before the `post_validate_with_metadata` interceptor. """ return response + def post_validate_with_metadata( + self, + response: compute.UrlMapsValidateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UrlMapsValidateResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for validate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionUrlMaps server but before it is returned to user code. + + We recommend only using this `post_validate_with_metadata` + interceptor in new development instead of the `post_validate` interceptor. + When both interceptors are used, this `post_validate_with_metadata` interceptor runs after the + `post_validate` interceptor. The (possibly modified) response returned by + `post_validate` will be passed to + `post_validate_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionUrlMapsRestStub: @@ -520,6 +683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -687,6 +854,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -854,6 +1023,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -996,6 +1169,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1163,6 +1338,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1330,6 +1509,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1482,6 +1665,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py index 530559c6120b..e78218e0923f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -456,6 +458,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/transports/rest.py index 256ede8e847e..aed6e8d15287 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/transports/rest.py @@ -97,12 +97,35 @@ def pre_list( def post_list(self, response: compute.ZoneList) -> compute.ZoneList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RegionZones server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ZoneList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ZoneList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionZones server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionZonesRestStub: @@ -313,6 +336,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py index decc780fcfc5..abe714cae143 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -456,6 +458,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/transports/rest.py index f3c963e4cd20..267dd4fa6034 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/transports/rest.py @@ -105,12 +105,35 @@ def pre_get( def post_get(self, response: compute.Region) -> compute.Region: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Regions server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Region, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Region, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Regions server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRegionsRequest, @@ -126,12 +149,35 @@ def pre_list( def post_list(self, response: compute.RegionList) -> compute.RegionList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Regions server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RegionList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RegionList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Regions server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionsRestStub: @@ -343,6 +389,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -482,6 +530,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py index 7f9b16706854..252f66e28ac4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py index 3451435a917e..35e239cc1f24 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/transports/rest.py @@ -174,12 +174,37 @@ def post_aggregated_list( ) -> compute.ReservationAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.ReservationAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ReservationAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteReservationRequest, @@ -197,12 +222,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetReservationRequest, @@ -218,12 +266,35 @@ def pre_get( def post_get(self, response: compute.Reservation) -> compute.Reservation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Reservation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyReservationRequest, @@ -241,12 +312,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertReservationRequest, @@ -264,12 +358,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListReservationsRequest, @@ -287,12 +404,35 @@ def pre_list( def post_list(self, response: compute.ReservationList) -> compute.ReservationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ReservationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ReservationList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_resize( self, request: compute.ResizeReservationRequest, @@ -310,12 +450,35 @@ def pre_resize( def post_resize(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for resize - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_resize` interceptor runs + before the `post_resize_with_metadata` interceptor. """ return response + def post_resize_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_resize_with_metadata` + interceptor in new development instead of the `post_resize` interceptor. + When both interceptors are used, this `post_resize_with_metadata` interceptor runs after the + `post_resize` interceptor. The (possibly modified) response returned by + `post_resize` will be passed to + `post_resize_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyReservationRequest, @@ -333,12 +496,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsReservationRequest, @@ -359,12 +545,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateReservationRequest, @@ -382,12 +593,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Reservations server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Reservations server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ReservationsRestStub: @@ -598,6 +832,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -761,6 +999,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -909,6 +1151,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1076,6 +1320,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1243,6 +1491,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1385,6 +1637,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1552,6 +1806,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1727,6 +1985,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1877,6 +2139,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2044,6 +2310,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py index ac504cf7992c..c627e465e161 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py index c6844bf8cd3a..35da593982e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/transports/rest.py @@ -166,12 +166,37 @@ def post_aggregated_list( ) -> compute.ResourcePolicyAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.ResourcePolicyAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ResourcePolicyAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteResourcePolicyRequest, @@ -189,12 +214,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetResourcePolicyRequest, @@ -212,12 +260,35 @@ def pre_get( def post_get(self, response: compute.ResourcePolicy) -> compute.ResourcePolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.ResourcePolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ResourcePolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyResourcePolicyRequest, @@ -236,12 +307,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertResourcePolicyRequest, @@ -259,12 +353,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListResourcePoliciesRequest, @@ -284,12 +401,35 @@ def post_list( ) -> compute.ResourcePolicyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ResourcePolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ResourcePolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchResourcePolicyRequest, @@ -307,12 +447,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyResourcePolicyRequest, @@ -331,12 +494,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsResourcePolicyRequest, @@ -357,12 +543,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ResourcePolicies server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ResourcePolicies server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ResourcePoliciesRestStub: @@ -573,6 +784,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -738,6 +953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -888,6 +1107,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1053,6 +1274,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1224,6 +1449,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1368,6 +1597,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1537,6 +1768,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1708,6 +1943,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1859,6 +2098,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py index 7c0d840508e2..31b2c8e2d0a9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py index b93cd18f9006..7545ad578e44 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/transports/rest.py @@ -181,12 +181,35 @@ def post_aggregated_list( ) -> compute.RouterAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.RouterAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RouterAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteRouterRequest, @@ -202,12 +225,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRouterRequest, @@ -223,12 +269,35 @@ def pre_get( def post_get(self, response: compute.Router) -> compute.Router: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Router, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Router, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_nat_ip_info( self, request: compute.GetNatIpInfoRouterRequest, @@ -248,12 +317,35 @@ def post_get_nat_ip_info( ) -> compute.NatIpInfoResponse: """Post-rpc interceptor for get_nat_ip_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_nat_ip_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_get_nat_ip_info` interceptor runs + before the `post_get_nat_ip_info_with_metadata` interceptor. """ return response + def post_get_nat_ip_info_with_metadata( + self, + response: compute.NatIpInfoResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.NatIpInfoResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_nat_ip_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_get_nat_ip_info_with_metadata` + interceptor in new development instead of the `post_get_nat_ip_info` interceptor. + When both interceptors are used, this `post_get_nat_ip_info_with_metadata` interceptor runs after the + `post_get_nat_ip_info` interceptor. The (possibly modified) response returned by + `post_get_nat_ip_info` will be passed to + `post_get_nat_ip_info_with_metadata`. + """ + return response, metadata + def pre_get_nat_mapping_info( self, request: compute.GetNatMappingInfoRoutersRequest, @@ -273,12 +365,37 @@ def post_get_nat_mapping_info( ) -> compute.VmEndpointNatMappingsList: """Post-rpc interceptor for get_nat_mapping_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_nat_mapping_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_get_nat_mapping_info` interceptor runs + before the `post_get_nat_mapping_info_with_metadata` interceptor. """ return response + def post_get_nat_mapping_info_with_metadata( + self, + response: compute.VmEndpointNatMappingsList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.VmEndpointNatMappingsList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_nat_mapping_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_get_nat_mapping_info_with_metadata` + interceptor in new development instead of the `post_get_nat_mapping_info` interceptor. + When both interceptors are used, this `post_get_nat_mapping_info_with_metadata` interceptor runs after the + `post_get_nat_mapping_info` interceptor. The (possibly modified) response returned by + `post_get_nat_mapping_info` will be passed to + `post_get_nat_mapping_info_with_metadata`. + """ + return response, metadata + def pre_get_router_status( self, request: compute.GetRouterStatusRouterRequest, @@ -298,12 +415,35 @@ def post_get_router_status( ) -> compute.RouterStatusResponse: """Post-rpc interceptor for get_router_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_router_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_get_router_status` interceptor runs + before the `post_get_router_status_with_metadata` interceptor. """ return response + def post_get_router_status_with_metadata( + self, + response: compute.RouterStatusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RouterStatusResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_router_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_get_router_status_with_metadata` + interceptor in new development instead of the `post_get_router_status` interceptor. + When both interceptors are used, this `post_get_router_status_with_metadata` interceptor runs after the + `post_get_router_status` interceptor. The (possibly modified) response returned by + `post_get_router_status` will be passed to + `post_get_router_status_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRouterRequest, @@ -319,12 +459,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRoutersRequest, @@ -340,12 +503,35 @@ def pre_list( def post_list(self, response: compute.RouterList) -> compute.RouterList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RouterList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RouterList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchRouterRequest, @@ -361,12 +547,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_preview( self, request: compute.PreviewRouterRequest, @@ -384,12 +593,35 @@ def post_preview( ) -> compute.RoutersPreviewResponse: """Post-rpc interceptor for preview - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_preview_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_preview` interceptor runs + before the `post_preview_with_metadata` interceptor. """ return response + def post_preview_with_metadata( + self, + response: compute.RoutersPreviewResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RoutersPreviewResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for preview + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_preview_with_metadata` + interceptor in new development instead of the `post_preview` interceptor. + When both interceptors are used, this `post_preview_with_metadata` interceptor runs after the + `post_preview` interceptor. The (possibly modified) response returned by + `post_preview` will be passed to + `post_preview_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateRouterRequest, @@ -405,12 +637,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routers server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routers server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RoutersRestStub: @@ -625,6 +880,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -781,6 +1040,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -923,6 +1186,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1067,6 +1332,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_nat_ip_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_nat_ip_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1215,6 +1484,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_nat_mapping_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_nat_mapping_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1365,6 +1638,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_router_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_router_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1527,6 +1804,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1666,6 +1947,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1828,6 +2111,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1976,6 +2263,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_preview(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_preview_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2138,6 +2429,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py index 90fd226d634e..5d4cdc17da12 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py index a973f61e35ce..3c33ca12ef16 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/transports/rest.py @@ -121,12 +121,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routes server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routes server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetRouteRequest, @@ -142,12 +165,33 @@ def pre_get( def post_get(self, response: compute.Route) -> compute.Route: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, response: compute.Route, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[compute.Route, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRouteRequest, @@ -163,12 +207,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routes server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routes server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListRoutesRequest, @@ -184,12 +251,35 @@ def pre_list( def post_list(self, response: compute.RouteList) -> compute.RouteList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Routes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.RouteList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.RouteList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Routes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RoutesRestStub: @@ -414,6 +504,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -559,6 +653,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -721,6 +817,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -860,6 +960,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py index 1ff21e572484..35edca7fdae9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py index bce161ff8039..e4e230a500a5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/transports/rest.py @@ -187,12 +187,35 @@ def pre_add_rule( def post_add_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_add_rule` interceptor runs + before the `post_add_rule_with_metadata` interceptor. """ return response + def post_add_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_add_rule_with_metadata` + interceptor in new development instead of the `post_add_rule` interceptor. + When both interceptors are used, this `post_add_rule_with_metadata` interceptor runs after the + `post_add_rule` interceptor. The (possibly modified) response returned by + `post_add_rule` will be passed to + `post_add_rule_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListSecurityPoliciesRequest, @@ -213,12 +236,37 @@ def post_aggregated_list( ) -> compute.SecurityPoliciesAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.SecurityPoliciesAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SecurityPoliciesAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteSecurityPolicyRequest, @@ -236,12 +284,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetSecurityPolicyRequest, @@ -259,12 +330,35 @@ def pre_get( def post_get(self, response: compute.SecurityPolicy) -> compute.SecurityPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SecurityPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SecurityPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_rule( self, request: compute.GetRuleSecurityPolicyRequest, @@ -284,12 +378,35 @@ def post_get_rule( ) -> compute.SecurityPolicyRule: """Post-rpc interceptor for get_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get_rule` interceptor runs + before the `post_get_rule_with_metadata` interceptor. """ return response + def post_get_rule_with_metadata( + self, + response: compute.SecurityPolicyRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SecurityPolicyRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_get_rule_with_metadata` + interceptor in new development instead of the `post_get_rule` interceptor. + When both interceptors are used, this `post_get_rule_with_metadata` interceptor runs after the + `post_get_rule` interceptor. The (possibly modified) response returned by + `post_get_rule` will be passed to + `post_get_rule_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertSecurityPolicyRequest, @@ -307,12 +424,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListSecurityPoliciesRequest, @@ -332,12 +472,35 @@ def post_list( ) -> compute.SecurityPolicyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SecurityPolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SecurityPolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_preconfigured_expression_sets( self, request: compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, @@ -358,12 +521,38 @@ def post_list_preconfigured_expression_sets( ) -> compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: """Post-rpc interceptor for list_preconfigured_expression_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_preconfigured_expression_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list_preconfigured_expression_sets` interceptor runs + before the `post_list_preconfigured_expression_sets_with_metadata` interceptor. """ return response + def post_list_preconfigured_expression_sets_with_metadata( + self, + response: compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_preconfigured_expression_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_list_preconfigured_expression_sets_with_metadata` + interceptor in new development instead of the `post_list_preconfigured_expression_sets` interceptor. + When both interceptors are used, this `post_list_preconfigured_expression_sets_with_metadata` interceptor runs after the + `post_list_preconfigured_expression_sets` interceptor. The (possibly modified) response returned by + `post_list_preconfigured_expression_sets` will be passed to + `post_list_preconfigured_expression_sets_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchSecurityPolicyRequest, @@ -381,12 +570,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_patch_rule( self, request: compute.PatchRuleSecurityPolicyRequest, @@ -404,12 +616,35 @@ def pre_patch_rule( def post_patch_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch_rule` interceptor runs + before the `post_patch_rule_with_metadata` interceptor. """ return response + def post_patch_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_rule_with_metadata` + interceptor in new development instead of the `post_patch_rule` interceptor. + When both interceptors are used, this `post_patch_rule_with_metadata` interceptor runs after the + `post_patch_rule` interceptor. The (possibly modified) response returned by + `post_patch_rule` will be passed to + `post_patch_rule_with_metadata`. + """ + return response, metadata + def pre_remove_rule( self, request: compute.RemoveRuleSecurityPolicyRequest, @@ -427,12 +662,35 @@ def pre_remove_rule( def post_remove_rule(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_remove_rule` interceptor runs + before the `post_remove_rule_with_metadata` interceptor. """ return response + def post_remove_rule_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_remove_rule_with_metadata` + interceptor in new development instead of the `post_remove_rule` interceptor. + When both interceptors are used, this `post_remove_rule_with_metadata` interceptor runs after the + `post_remove_rule` interceptor. The (possibly modified) response returned by + `post_remove_rule` will be passed to + `post_remove_rule_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsSecurityPolicyRequest, @@ -450,12 +708,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecurityPolicies server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecurityPolicies server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SecurityPoliciesRestStub: @@ -695,6 +976,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -837,6 +1122,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1002,6 +1291,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1152,6 +1445,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1302,6 +1597,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1473,6 +1772,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1617,6 +1920,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1768,6 +2073,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_preconfigured_expression_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_preconfigured_expression_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1939,6 +2251,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2104,6 +2420,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2263,6 +2583,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2428,6 +2752,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py index 7c34bed46ae0..89457d93c343 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py index 85c9177639c3..50181b41a59a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/transports/rest.py @@ -166,12 +166,37 @@ def post_aggregated_list( ) -> compute.ServiceAttachmentAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.ServiceAttachmentAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ServiceAttachmentAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteServiceAttachmentRequest, @@ -189,12 +214,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetServiceAttachmentRequest, @@ -214,12 +262,35 @@ def post_get( ) -> compute.ServiceAttachment: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.ServiceAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ServiceAttachment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyServiceAttachmentRequest, @@ -238,12 +309,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertServiceAttachmentRequest, @@ -261,12 +355,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListServiceAttachmentsRequest, @@ -286,12 +403,35 @@ def post_list( ) -> compute.ServiceAttachmentList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ServiceAttachmentList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ServiceAttachmentList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchServiceAttachmentRequest, @@ -309,12 +449,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyServiceAttachmentRequest, @@ -333,12 +496,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsServiceAttachmentRequest, @@ -359,12 +545,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServiceAttachments server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServiceAttachments server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ServiceAttachmentsRestStub: @@ -578,6 +789,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -741,6 +956,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -895,6 +1114,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1061,6 +1282,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1230,6 +1455,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1376,6 +1605,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1547,6 +1778,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1719,6 +1954,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1872,6 +2111,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py index 39f1664f6682..9d8d6fe1f05d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -462,6 +464,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py index 5411c1eb79f5..ce9ddecc0120 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/transports/rest.py @@ -107,12 +107,35 @@ def pre_get( def post_get(self, response: compute.SnapshotSettings) -> compute.SnapshotSettings: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SnapshotSettingsService server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SnapshotSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SnapshotSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SnapshotSettingsService server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchSnapshotSettingRequest, @@ -130,12 +153,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SnapshotSettingsService server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SnapshotSettingsService server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SnapshotSettingsServiceRestStub: @@ -347,6 +393,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -513,6 +561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index 676dd0a725a7..5077976166b4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py index 80fa5237ebbe..42eb09d3128a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py @@ -153,12 +153,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetSnapshotRequest, @@ -174,12 +197,35 @@ def pre_get( def post_get(self, response: compute.Snapshot) -> compute.Snapshot: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicySnapshotRequest, @@ -197,12 +243,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertSnapshotRequest, @@ -218,12 +287,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListSnapshotsRequest, @@ -239,12 +331,35 @@ def pre_list( def post_list(self, response: compute.SnapshotList) -> compute.SnapshotList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SnapshotList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SnapshotList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicySnapshotRequest, @@ -262,12 +377,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsSnapshotRequest, @@ -285,12 +423,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsSnapshotRequest, @@ -311,12 +472,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Snapshots server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SnapshotsRestStub: @@ -544,6 +730,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -688,6 +878,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -857,6 +1049,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1022,6 +1218,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1163,6 +1363,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1338,6 +1540,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1505,6 +1711,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1655,6 +1865,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py index 7da7fb9a85b2..b946857dd507 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py index 8728a4510312..834f3ac3760a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py @@ -134,12 +134,37 @@ def post_aggregated_list( ) -> compute.SslCertificateAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.SslCertificateAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SslCertificateAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslCertificates server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteSslCertificateRequest, @@ -157,12 +182,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslCertificates server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetSslCertificateRequest, @@ -180,12 +228,35 @@ def pre_get( def post_get(self, response: compute.SslCertificate) -> compute.SslCertificate: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SslCertificate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslCertificate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslCertificates server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertSslCertificateRequest, @@ -203,12 +274,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslCertificates server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListSslCertificatesRequest, @@ -228,12 +322,35 @@ def post_list( ) -> compute.SslCertificateList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslCertificates server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SslCertificateList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslCertificateList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslCertificates server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SslCertificatesRestStub: @@ -444,6 +561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -609,6 +730,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -773,6 +898,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -942,6 +1069,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1219,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py index c5e3a107dc01..e64b10fd0680 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py index b04302bfa91d..41136ae161b1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/transports/rest.py @@ -150,12 +150,37 @@ def post_aggregated_list( ) -> compute.SslPoliciesAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.SslPoliciesAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SslPoliciesAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteSslPolicyRequest, @@ -171,12 +196,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetSslPolicyRequest, @@ -192,12 +240,35 @@ def pre_get( def post_get(self, response: compute.SslPolicy) -> compute.SslPolicy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.SslPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertSslPolicyRequest, @@ -213,12 +284,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListSslPoliciesRequest, @@ -234,12 +328,35 @@ def pre_list( def post_list(self, response: compute.SslPoliciesList) -> compute.SslPoliciesList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SslPoliciesList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SslPoliciesList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_available_features( self, request: compute.ListAvailableFeaturesSslPoliciesRequest, @@ -260,12 +377,38 @@ def post_list_available_features( ) -> compute.SslPoliciesListAvailableFeaturesResponse: """Post-rpc interceptor for list_available_features - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_available_features_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_list_available_features` interceptor runs + before the `post_list_available_features_with_metadata` interceptor. """ return response + def post_list_available_features_with_metadata( + self, + response: compute.SslPoliciesListAvailableFeaturesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SslPoliciesListAvailableFeaturesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_available_features + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_list_available_features_with_metadata` + interceptor in new development instead of the `post_list_available_features` interceptor. + When both interceptors are used, this `post_list_available_features_with_metadata` interceptor runs after the + `post_list_available_features` interceptor. The (possibly modified) response returned by + `post_list_available_features` will be passed to + `post_list_available_features_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchSslPolicyRequest, @@ -281,12 +424,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SslPolicies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SslPolicies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SslPoliciesRestStub: @@ -497,6 +663,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -658,6 +828,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -807,6 +981,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -972,6 +1148,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1114,6 +1294,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1258,6 +1440,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_available_features(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_available_features_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1427,6 +1613,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py index 8f46a1cc808f..5fd50e41385b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -458,6 +460,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py index 20d3fe744250..b2eb67322cbe 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py @@ -118,12 +118,37 @@ def post_aggregated_list( ) -> compute.StoragePoolTypeAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePoolTypes server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.StoragePoolTypeAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.StoragePoolTypeAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePoolTypes server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetStoragePoolTypeRequest, @@ -141,12 +166,35 @@ def pre_get( def post_get(self, response: compute.StoragePoolType) -> compute.StoragePoolType: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePoolTypes server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.StoragePoolType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.StoragePoolType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePoolTypes server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListStoragePoolTypesRequest, @@ -166,12 +214,35 @@ def post_list( ) -> compute.StoragePoolTypeList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePoolTypes server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.StoragePoolTypeList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.StoragePoolTypeList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePoolTypes server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class StoragePoolTypesRestStub: @@ -382,6 +453,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -528,6 +603,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -674,6 +751,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py index 8668935c421f..e14b8e19cbc4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py index e191b4b2723f..f2c7a03ee96e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py @@ -174,12 +174,37 @@ def post_aggregated_list( ) -> compute.StoragePoolAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.StoragePoolAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.StoragePoolAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteStoragePoolRequest, @@ -197,12 +222,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetStoragePoolRequest, @@ -218,12 +266,35 @@ def pre_get( def post_get(self, response: compute.StoragePool) -> compute.StoragePool: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.StoragePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.StoragePool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicyStoragePoolRequest, @@ -241,12 +312,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertStoragePoolRequest, @@ -264,12 +358,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListStoragePoolsRequest, @@ -287,12 +404,35 @@ def pre_list( def post_list(self, response: compute.StoragePoolList) -> compute.StoragePoolList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.StoragePoolList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.StoragePoolList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_disks( self, request: compute.ListDisksStoragePoolsRequest, @@ -312,12 +452,35 @@ def post_list_disks( ) -> compute.StoragePoolListDisks: """Post-rpc interceptor for list_disks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_disks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_list_disks` interceptor runs + before the `post_list_disks_with_metadata` interceptor. """ return response + def post_list_disks_with_metadata( + self, + response: compute.StoragePoolListDisks, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.StoragePoolListDisks, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_disks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_list_disks_with_metadata` + interceptor in new development instead of the `post_list_disks` interceptor. + When both interceptors are used, this `post_list_disks_with_metadata` interceptor runs after the + `post_list_disks` interceptor. The (possibly modified) response returned by + `post_list_disks` will be passed to + `post_list_disks_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicyStoragePoolRequest, @@ -335,12 +498,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsStoragePoolRequest, @@ -361,12 +547,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateStoragePoolRequest, @@ -384,12 +595,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the StoragePools server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the StoragePools server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class StoragePoolsRestStub: @@ -600,6 +834,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -763,6 +1001,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -907,6 +1149,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1074,6 +1318,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1241,6 +1489,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1383,6 +1635,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1529,6 +1783,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_disks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_disks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1704,6 +1962,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1854,6 +2116,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2021,6 +2287,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py index 530fef9cea2d..9557c121d6b1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py index 227303005d92..ef4ba50e6027 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/transports/rest.py @@ -190,12 +190,37 @@ def post_aggregated_list( ) -> compute.SubnetworkAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.SubnetworkAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SubnetworkAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteSubnetworkRequest, @@ -213,12 +238,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_expand_ip_cidr_range( self, request: compute.ExpandIpCidrRangeSubnetworkRequest, @@ -239,12 +287,35 @@ def post_expand_ip_cidr_range( ) -> compute.Operation: """Post-rpc interceptor for expand_ip_cidr_range - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_expand_ip_cidr_range_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_expand_ip_cidr_range` interceptor runs + before the `post_expand_ip_cidr_range_with_metadata` interceptor. """ return response + def post_expand_ip_cidr_range_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for expand_ip_cidr_range + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_expand_ip_cidr_range_with_metadata` + interceptor in new development instead of the `post_expand_ip_cidr_range` interceptor. + When both interceptors are used, this `post_expand_ip_cidr_range_with_metadata` interceptor runs after the + `post_expand_ip_cidr_range` interceptor. The (possibly modified) response returned by + `post_expand_ip_cidr_range` will be passed to + `post_expand_ip_cidr_range_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetSubnetworkRequest, @@ -260,12 +331,35 @@ def pre_get( def post_get(self, response: compute.Subnetwork) -> compute.Subnetwork: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Subnetwork, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Subnetwork, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: compute.GetIamPolicySubnetworkRequest, @@ -283,12 +377,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertSubnetworkRequest, @@ -306,12 +423,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListSubnetworksRequest, @@ -327,12 +467,35 @@ def pre_list( def post_list(self, response: compute.SubnetworkList) -> compute.SubnetworkList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.SubnetworkList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SubnetworkList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_list_usable( self, request: compute.ListUsableSubnetworksRequest, @@ -352,12 +515,37 @@ def post_list_usable( ) -> compute.UsableSubnetworksAggregatedList: """Post-rpc interceptor for list_usable - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_usable_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_list_usable` interceptor runs + before the `post_list_usable_with_metadata` interceptor. """ return response + def post_list_usable_with_metadata( + self, + response: compute.UsableSubnetworksAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UsableSubnetworksAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_usable + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_list_usable_with_metadata` + interceptor in new development instead of the `post_list_usable` interceptor. + When both interceptors are used, this `post_list_usable_with_metadata` interceptor runs after the + `post_list_usable` interceptor. The (possibly modified) response returned by + `post_list_usable` will be passed to + `post_list_usable_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchSubnetworkRequest, @@ -373,12 +561,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: compute.SetIamPolicySubnetworkRequest, @@ -396,12 +607,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_set_private_ip_google_access( self, request: compute.SetPrivateIpGoogleAccessSubnetworkRequest, @@ -422,12 +656,35 @@ def post_set_private_ip_google_access( ) -> compute.Operation: """Post-rpc interceptor for set_private_ip_google_access - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_private_ip_google_access_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_set_private_ip_google_access` interceptor runs + before the `post_set_private_ip_google_access_with_metadata` interceptor. """ return response + def post_set_private_ip_google_access_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_private_ip_google_access + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_set_private_ip_google_access_with_metadata` + interceptor in new development instead of the `post_set_private_ip_google_access` interceptor. + When both interceptors are used, this `post_set_private_ip_google_access_with_metadata` interceptor runs after the + `post_set_private_ip_google_access` interceptor. The (possibly modified) response returned by + `post_set_private_ip_google_access` will be passed to + `post_set_private_ip_google_access_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsSubnetworkRequest, @@ -448,12 +705,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subnetworks server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subnetworks server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SubnetworksRestStub: @@ -664,6 +946,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -825,6 +1111,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -992,6 +1282,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_expand_ip_cidr_range(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_expand_ip_cidr_range_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1141,6 +1435,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1310,6 +1606,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1475,6 +1775,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1619,6 +1923,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1765,6 +2071,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_usable(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_usable_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1932,6 +2242,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2109,6 +2423,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2277,6 +2595,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_private_ip_google_access(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_private_ip_google_access_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2427,6 +2749,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py index 63234e4dc5b6..1b811866b90e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py index 21bd690dd631..6f440caa4fbb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py @@ -131,12 +131,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetGrpcProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetGrpcProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetGrpcProxyRequest, @@ -154,12 +177,35 @@ def pre_get( def post_get(self, response: compute.TargetGrpcProxy) -> compute.TargetGrpcProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetGrpcProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetGrpcProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetGrpcProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetGrpcProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetGrpcProxyRequest, @@ -177,12 +223,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetGrpcProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetGrpcProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetGrpcProxiesRequest, @@ -202,12 +271,35 @@ def post_list( ) -> compute.TargetGrpcProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetGrpcProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetGrpcProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetGrpcProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetGrpcProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchTargetGrpcProxyRequest, @@ -225,12 +317,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetGrpcProxies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetGrpcProxies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetGrpcProxiesRestStub: @@ -462,6 +577,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -613,6 +732,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -784,6 +905,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -930,6 +1055,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1101,6 +1228,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py index 4c667adfc271..4fb5cc3e28fb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py index 612fc456e2b8..27761c7a1490 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py @@ -150,12 +150,37 @@ def post_aggregated_list( ) -> compute.TargetHttpProxyAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.TargetHttpProxyAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetHttpProxyAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteTargetHttpProxyRequest, @@ -173,12 +198,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetHttpProxyRequest, @@ -196,12 +244,35 @@ def pre_get( def post_get(self, response: compute.TargetHttpProxy) -> compute.TargetHttpProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetHttpProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetHttpProxyRequest, @@ -219,12 +290,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetHttpProxiesRequest, @@ -244,12 +338,35 @@ def post_list( ) -> compute.TargetHttpProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetHttpProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchTargetHttpProxyRequest, @@ -267,12 +384,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_url_map( self, request: compute.SetUrlMapTargetHttpProxyRequest, @@ -290,12 +430,35 @@ def pre_set_url_map( def post_set_url_map(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_url_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_url_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_url_map` interceptor runs + before the `post_set_url_map_with_metadata` interceptor. """ return response + def post_set_url_map_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_url_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpProxies server but before it is returned to user code. + + We recommend only using this `post_set_url_map_with_metadata` + interceptor in new development instead of the `post_set_url_map` interceptor. + When both interceptors are used, this `post_set_url_map_with_metadata` interceptor runs after the + `post_set_url_map` interceptor. The (possibly modified) response returned by + `post_set_url_map` will be passed to + `post_set_url_map_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetHttpProxiesRestStub: @@ -507,6 +670,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -672,6 +839,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -832,6 +1003,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1003,6 +1176,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1149,6 +1326,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1320,6 +1499,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +1668,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_url_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_url_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py index 1f3b423297db..75973566ca8a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py index a8e15f2f9cf4..3b76d7e57877 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py @@ -182,12 +182,37 @@ def post_aggregated_list( ) -> compute.TargetHttpsProxyAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.TargetHttpsProxyAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetHttpsProxyAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteTargetHttpsProxyRequest, @@ -205,12 +230,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetHttpsProxyRequest, @@ -228,12 +276,35 @@ def pre_get( def post_get(self, response: compute.TargetHttpsProxy) -> compute.TargetHttpsProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetHttpsProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpsProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetHttpsProxyRequest, @@ -251,12 +322,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetHttpsProxiesRequest, @@ -276,12 +370,35 @@ def post_list( ) -> compute.TargetHttpsProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetHttpsProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetHttpsProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchTargetHttpsProxyRequest, @@ -299,12 +416,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_set_certificate_map( self, request: compute.SetCertificateMapTargetHttpsProxyRequest, @@ -325,12 +465,35 @@ def post_set_certificate_map( ) -> compute.Operation: """Post-rpc interceptor for set_certificate_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_certificate_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_certificate_map` interceptor runs + before the `post_set_certificate_map_with_metadata` interceptor. """ return response + def post_set_certificate_map_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_certificate_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_certificate_map_with_metadata` + interceptor in new development instead of the `post_set_certificate_map` interceptor. + When both interceptors are used, this `post_set_certificate_map_with_metadata` interceptor runs after the + `post_set_certificate_map` interceptor. The (possibly modified) response returned by + `post_set_certificate_map` will be passed to + `post_set_certificate_map_with_metadata`. + """ + return response, metadata + def pre_set_quic_override( self, request: compute.SetQuicOverrideTargetHttpsProxyRequest, @@ -349,12 +512,35 @@ def pre_set_quic_override( def post_set_quic_override(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_quic_override - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_quic_override_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_quic_override` interceptor runs + before the `post_set_quic_override_with_metadata` interceptor. """ return response + def post_set_quic_override_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_quic_override + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_quic_override_with_metadata` + interceptor in new development instead of the `post_set_quic_override` interceptor. + When both interceptors are used, this `post_set_quic_override_with_metadata` interceptor runs after the + `post_set_quic_override` interceptor. The (possibly modified) response returned by + `post_set_quic_override` will be passed to + `post_set_quic_override_with_metadata`. + """ + return response, metadata + def pre_set_ssl_certificates( self, request: compute.SetSslCertificatesTargetHttpsProxyRequest, @@ -375,12 +561,35 @@ def post_set_ssl_certificates( ) -> compute.Operation: """Post-rpc interceptor for set_ssl_certificates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_ssl_certificates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_ssl_certificates` interceptor runs + before the `post_set_ssl_certificates_with_metadata` interceptor. """ return response + def post_set_ssl_certificates_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_ssl_certificates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_ssl_certificates_with_metadata` + interceptor in new development instead of the `post_set_ssl_certificates` interceptor. + When both interceptors are used, this `post_set_ssl_certificates_with_metadata` interceptor runs after the + `post_set_ssl_certificates` interceptor. The (possibly modified) response returned by + `post_set_ssl_certificates` will be passed to + `post_set_ssl_certificates_with_metadata`. + """ + return response, metadata + def pre_set_ssl_policy( self, request: compute.SetSslPolicyTargetHttpsProxyRequest, @@ -399,12 +608,35 @@ def pre_set_ssl_policy( def post_set_ssl_policy(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_ssl_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_ssl_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_ssl_policy` interceptor runs + before the `post_set_ssl_policy_with_metadata` interceptor. """ return response + def post_set_ssl_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_ssl_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_ssl_policy_with_metadata` + interceptor in new development instead of the `post_set_ssl_policy` interceptor. + When both interceptors are used, this `post_set_ssl_policy_with_metadata` interceptor runs after the + `post_set_ssl_policy` interceptor. The (possibly modified) response returned by + `post_set_ssl_policy` will be passed to + `post_set_ssl_policy_with_metadata`. + """ + return response, metadata + def pre_set_url_map( self, request: compute.SetUrlMapTargetHttpsProxyRequest, @@ -423,12 +655,35 @@ def pre_set_url_map( def post_set_url_map(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_url_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_url_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetHttpsProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_url_map` interceptor runs + before the `post_set_url_map_with_metadata` interceptor. """ return response + def post_set_url_map_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_url_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetHttpsProxies server but before it is returned to user code. + + We recommend only using this `post_set_url_map_with_metadata` + interceptor in new development instead of the `post_set_url_map` interceptor. + When both interceptors are used, this `post_set_url_map_with_metadata` interceptor runs after the + `post_set_url_map` interceptor. The (possibly modified) response returned by + `post_set_url_map` will be passed to + `post_set_url_map_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetHttpsProxiesRestStub: @@ -640,6 +895,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -803,6 +1062,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -965,6 +1228,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1134,6 +1399,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1282,6 +1551,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1453,6 +1724,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1621,6 +1896,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_certificate_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_certificate_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1789,6 +2068,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_quic_override(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_quic_override_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1959,6 +2242,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_ssl_certificates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_ssl_certificates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2125,6 +2412,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_ssl_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_ssl_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2290,6 +2581,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_url_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_url_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py index e04a462113be..44f1c37ffcc2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py index 95de85e6a359..5c76cbd37c0e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/transports/rest.py @@ -142,12 +142,37 @@ def post_aggregated_list( ) -> compute.TargetInstanceAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetInstances server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.TargetInstanceAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetInstanceAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetInstances server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteTargetInstanceRequest, @@ -165,12 +190,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetInstances server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetInstances server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetInstanceRequest, @@ -188,12 +236,35 @@ def pre_get( def post_get(self, response: compute.TargetInstance) -> compute.TargetInstance: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetInstances server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetInstance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetInstance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetInstances server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetInstanceRequest, @@ -211,12 +282,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetInstances server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetInstances server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetInstancesRequest, @@ -236,12 +330,35 @@ def post_list( ) -> compute.TargetInstanceList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetInstances server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetInstanceList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetInstanceList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetInstances server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_security_policy( self, request: compute.SetSecurityPolicyTargetInstanceRequest, @@ -262,12 +379,35 @@ def post_set_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetInstances server but before - it is returned to user code. + it is returned to user code. This `post_set_security_policy` interceptor runs + before the `post_set_security_policy_with_metadata` interceptor. """ return response + def post_set_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetInstances server but before it is returned to user code. + + We recommend only using this `post_set_security_policy_with_metadata` + interceptor in new development instead of the `post_set_security_policy` interceptor. + When both interceptors are used, this `post_set_security_policy_with_metadata` interceptor runs after the + `post_set_security_policy` interceptor. The (possibly modified) response returned by + `post_set_security_policy` will be passed to + `post_set_security_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetInstancesRestStub: @@ -478,6 +618,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -643,6 +787,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -795,6 +943,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -964,6 +1114,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1110,6 +1264,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1278,6 +1434,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py index 0c1117991183..52278d476f2d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py index 7797dad159a8..431730917988 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/transports/rest.py @@ -187,12 +187,35 @@ def pre_add_health_check( def post_add_health_check(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_health_check - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_health_check_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_add_health_check` interceptor runs + before the `post_add_health_check_with_metadata` interceptor. """ return response + def post_add_health_check_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_health_check + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_add_health_check_with_metadata` + interceptor in new development instead of the `post_add_health_check` interceptor. + When both interceptors are used, this `post_add_health_check_with_metadata` interceptor runs after the + `post_add_health_check` interceptor. The (possibly modified) response returned by + `post_add_health_check` will be passed to + `post_add_health_check_with_metadata`. + """ + return response, metadata + def pre_add_instance( self, request: compute.AddInstanceTargetPoolRequest, @@ -210,12 +233,35 @@ def pre_add_instance( def post_add_instance(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for add_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_add_instance` interceptor runs + before the `post_add_instance_with_metadata` interceptor. """ return response + def post_add_instance_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_add_instance_with_metadata` + interceptor in new development instead of the `post_add_instance` interceptor. + When both interceptors are used, this `post_add_instance_with_metadata` interceptor runs after the + `post_add_instance` interceptor. The (possibly modified) response returned by + `post_add_instance` will be passed to + `post_add_instance_with_metadata`. + """ + return response, metadata + def pre_aggregated_list( self, request: compute.AggregatedListTargetPoolsRequest, @@ -236,12 +282,37 @@ def post_aggregated_list( ) -> compute.TargetPoolAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.TargetPoolAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetPoolAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteTargetPoolRequest, @@ -259,12 +330,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetPoolRequest, @@ -280,12 +374,35 @@ def pre_get( def post_get(self, response: compute.TargetPool) -> compute.TargetPool: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetPool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetPool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_health( self, request: compute.GetHealthTargetPoolRequest, @@ -305,12 +422,37 @@ def post_get_health( ) -> compute.TargetPoolInstanceHealth: """Post-rpc interceptor for get_health - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. """ return response + def post_get_health_with_metadata( + self, + response: compute.TargetPoolInstanceHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetPoolInstanceHealth, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetPoolRequest, @@ -328,12 +470,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetPoolsRequest, @@ -349,12 +514,35 @@ def pre_list( def post_list(self, response: compute.TargetPoolList) -> compute.TargetPoolList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetPoolList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetPoolList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_remove_health_check( self, request: compute.RemoveHealthCheckTargetPoolRequest, @@ -375,12 +563,35 @@ def post_remove_health_check( ) -> compute.Operation: """Post-rpc interceptor for remove_health_check - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_health_check_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_remove_health_check` interceptor runs + before the `post_remove_health_check_with_metadata` interceptor. """ return response + def post_remove_health_check_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_health_check + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_remove_health_check_with_metadata` + interceptor in new development instead of the `post_remove_health_check` interceptor. + When both interceptors are used, this `post_remove_health_check_with_metadata` interceptor runs after the + `post_remove_health_check` interceptor. The (possibly modified) response returned by + `post_remove_health_check` will be passed to + `post_remove_health_check_with_metadata`. + """ + return response, metadata + def pre_remove_instance( self, request: compute.RemoveInstanceTargetPoolRequest, @@ -398,12 +609,35 @@ def pre_remove_instance( def post_remove_instance(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for remove_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_remove_instance` interceptor runs + before the `post_remove_instance_with_metadata` interceptor. """ return response + def post_remove_instance_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_remove_instance_with_metadata` + interceptor in new development instead of the `post_remove_instance` interceptor. + When both interceptors are used, this `post_remove_instance_with_metadata` interceptor runs after the + `post_remove_instance` interceptor. The (possibly modified) response returned by + `post_remove_instance` will be passed to + `post_remove_instance_with_metadata`. + """ + return response, metadata + def pre_set_backup( self, request: compute.SetBackupTargetPoolRequest, @@ -421,12 +655,35 @@ def pre_set_backup( def post_set_backup(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_set_backup` interceptor runs + before the `post_set_backup_with_metadata` interceptor. """ return response + def post_set_backup_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_set_backup_with_metadata` + interceptor in new development instead of the `post_set_backup` interceptor. + When both interceptors are used, this `post_set_backup_with_metadata` interceptor runs after the + `post_set_backup` interceptor. The (possibly modified) response returned by + `post_set_backup` will be passed to + `post_set_backup_with_metadata`. + """ + return response, metadata + def pre_set_security_policy( self, request: compute.SetSecurityPolicyTargetPoolRequest, @@ -447,12 +704,35 @@ def post_set_security_policy( ) -> compute.Operation: """Post-rpc interceptor for set_security_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_security_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetPools server but before - it is returned to user code. + it is returned to user code. This `post_set_security_policy` interceptor runs + before the `post_set_security_policy_with_metadata` interceptor. """ return response + def post_set_security_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_security_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetPools server but before it is returned to user code. + + We recommend only using this `post_set_security_policy_with_metadata` + interceptor in new development instead of the `post_set_security_policy` interceptor. + When both interceptors are used, this `post_set_security_policy_with_metadata` interceptor runs after the + `post_set_security_policy` interceptor. The (possibly modified) response returned by + `post_set_security_policy` will be passed to + `post_set_security_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetPoolsRestStub: @@ -688,6 +968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_health_check(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_health_check_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -859,6 +1143,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1001,6 +1289,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1162,6 +1454,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1312,6 +1608,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1462,6 +1760,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1629,6 +1931,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1773,6 +2079,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1940,6 +2248,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_health_check(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_health_check_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2105,6 +2417,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2272,6 +2588,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2439,6 +2759,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_security_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_security_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py index de9994658efa..d6ef68c4fbf2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py index d726f94fc0a7..5d9fac02890b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py @@ -163,12 +163,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetSslProxyRequest, @@ -186,12 +209,35 @@ def pre_get( def post_get(self, response: compute.TargetSslProxy) -> compute.TargetSslProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetSslProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetSslProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetSslProxyRequest, @@ -209,12 +255,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetSslProxiesRequest, @@ -234,12 +303,35 @@ def post_list( ) -> compute.TargetSslProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetSslProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetSslProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_backend_service( self, request: compute.SetBackendServiceTargetSslProxyRequest, @@ -260,12 +352,35 @@ def post_set_backend_service( ) -> compute.Operation: """Post-rpc interceptor for set_backend_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_backend_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_backend_service` interceptor runs + before the `post_set_backend_service_with_metadata` interceptor. """ return response + def post_set_backend_service_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_backend_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_set_backend_service_with_metadata` + interceptor in new development instead of the `post_set_backend_service` interceptor. + When both interceptors are used, this `post_set_backend_service_with_metadata` interceptor runs after the + `post_set_backend_service` interceptor. The (possibly modified) response returned by + `post_set_backend_service` will be passed to + `post_set_backend_service_with_metadata`. + """ + return response, metadata + def pre_set_certificate_map( self, request: compute.SetCertificateMapTargetSslProxyRequest, @@ -286,12 +401,35 @@ def post_set_certificate_map( ) -> compute.Operation: """Post-rpc interceptor for set_certificate_map - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_certificate_map_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_certificate_map` interceptor runs + before the `post_set_certificate_map_with_metadata` interceptor. """ return response + def post_set_certificate_map_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_certificate_map + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_set_certificate_map_with_metadata` + interceptor in new development instead of the `post_set_certificate_map` interceptor. + When both interceptors are used, this `post_set_certificate_map_with_metadata` interceptor runs after the + `post_set_certificate_map` interceptor. The (possibly modified) response returned by + `post_set_certificate_map` will be passed to + `post_set_certificate_map_with_metadata`. + """ + return response, metadata + def pre_set_proxy_header( self, request: compute.SetProxyHeaderTargetSslProxyRequest, @@ -310,12 +448,35 @@ def pre_set_proxy_header( def post_set_proxy_header(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_proxy_header - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_proxy_header_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_proxy_header` interceptor runs + before the `post_set_proxy_header_with_metadata` interceptor. """ return response + def post_set_proxy_header_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_proxy_header + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_set_proxy_header_with_metadata` + interceptor in new development instead of the `post_set_proxy_header` interceptor. + When both interceptors are used, this `post_set_proxy_header_with_metadata` interceptor runs after the + `post_set_proxy_header` interceptor. The (possibly modified) response returned by + `post_set_proxy_header` will be passed to + `post_set_proxy_header_with_metadata`. + """ + return response, metadata + def pre_set_ssl_certificates( self, request: compute.SetSslCertificatesTargetSslProxyRequest, @@ -336,12 +497,35 @@ def post_set_ssl_certificates( ) -> compute.Operation: """Post-rpc interceptor for set_ssl_certificates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_ssl_certificates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_ssl_certificates` interceptor runs + before the `post_set_ssl_certificates_with_metadata` interceptor. """ return response + def post_set_ssl_certificates_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_ssl_certificates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_set_ssl_certificates_with_metadata` + interceptor in new development instead of the `post_set_ssl_certificates` interceptor. + When both interceptors are used, this `post_set_ssl_certificates_with_metadata` interceptor runs after the + `post_set_ssl_certificates` interceptor. The (possibly modified) response returned by + `post_set_ssl_certificates` will be passed to + `post_set_ssl_certificates_with_metadata`. + """ + return response, metadata + def pre_set_ssl_policy( self, request: compute.SetSslPolicyTargetSslProxyRequest, @@ -360,12 +544,35 @@ def pre_set_ssl_policy( def post_set_ssl_policy(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_ssl_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_ssl_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetSslProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_ssl_policy` interceptor runs + before the `post_set_ssl_policy_with_metadata` interceptor. """ return response + def post_set_ssl_policy_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_ssl_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetSslProxies server but before it is returned to user code. + + We recommend only using this `post_set_ssl_policy_with_metadata` + interceptor in new development instead of the `post_set_ssl_policy` interceptor. + When both interceptors are used, this `post_set_ssl_policy_with_metadata` interceptor runs after the + `post_set_ssl_policy` interceptor. The (possibly modified) response returned by + `post_set_ssl_policy` will be passed to + `post_set_ssl_policy_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetSslProxiesRestStub: @@ -597,6 +804,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -749,6 +960,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -920,6 +1133,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1066,6 +1283,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1234,6 +1453,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_backend_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_backend_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1402,6 +1625,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_certificate_map(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_certificate_map_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1569,6 +1796,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_proxy_header(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_proxy_header_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1737,6 +1968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_ssl_certificates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_ssl_certificates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1902,6 +2137,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_ssl_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_ssl_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py index 64f885819f99..42a7a9f6efab 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py index 0a829769ab5f..7622eee821bb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py @@ -150,12 +150,37 @@ def post_aggregated_list( ) -> compute.TargetTcpProxyAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.TargetTcpProxyAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetTcpProxyAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteTargetTcpProxyRequest, @@ -173,12 +198,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetTcpProxyRequest, @@ -196,12 +244,35 @@ def pre_get( def post_get(self, response: compute.TargetTcpProxy) -> compute.TargetTcpProxy: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetTcpProxy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetTcpProxy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetTcpProxyRequest, @@ -219,12 +290,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetTcpProxiesRequest, @@ -244,12 +338,35 @@ def post_list( ) -> compute.TargetTcpProxyList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetTcpProxyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetTcpProxyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_backend_service( self, request: compute.SetBackendServiceTargetTcpProxyRequest, @@ -270,12 +387,35 @@ def post_set_backend_service( ) -> compute.Operation: """Post-rpc interceptor for set_backend_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_backend_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_backend_service` interceptor runs + before the `post_set_backend_service_with_metadata` interceptor. """ return response + def post_set_backend_service_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_backend_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_set_backend_service_with_metadata` + interceptor in new development instead of the `post_set_backend_service` interceptor. + When both interceptors are used, this `post_set_backend_service_with_metadata` interceptor runs after the + `post_set_backend_service` interceptor. The (possibly modified) response returned by + `post_set_backend_service` will be passed to + `post_set_backend_service_with_metadata`. + """ + return response, metadata + def pre_set_proxy_header( self, request: compute.SetProxyHeaderTargetTcpProxyRequest, @@ -294,12 +434,35 @@ def pre_set_proxy_header( def post_set_proxy_header(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_proxy_header - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_proxy_header_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetTcpProxies server but before - it is returned to user code. + it is returned to user code. This `post_set_proxy_header` interceptor runs + before the `post_set_proxy_header_with_metadata` interceptor. """ return response + def post_set_proxy_header_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_proxy_header + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetTcpProxies server but before it is returned to user code. + + We recommend only using this `post_set_proxy_header_with_metadata` + interceptor in new development instead of the `post_set_proxy_header` interceptor. + When both interceptors are used, this `post_set_proxy_header_with_metadata` interceptor runs after the + `post_set_proxy_header` interceptor. The (possibly modified) response returned by + `post_set_proxy_header` will be passed to + `post_set_proxy_header_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetTcpProxiesRestStub: @@ -510,6 +673,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -675,6 +842,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -827,6 +998,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -998,6 +1171,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1144,6 +1321,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1312,6 +1491,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_backend_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_backend_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1479,6 +1662,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_proxy_header(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_proxy_header_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py index 62d4ce13bace..7a3779ede321 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -461,6 +463,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py index 9d8c80468517..7f5154f4c540 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py @@ -142,12 +142,37 @@ def post_aggregated_list( ) -> compute.TargetVpnGatewayAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.TargetVpnGatewayAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TargetVpnGatewayAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetVpnGateways server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteTargetVpnGatewayRequest, @@ -165,12 +190,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetVpnGateways server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetTargetVpnGatewayRequest, @@ -188,12 +236,35 @@ def pre_get( def post_get(self, response: compute.TargetVpnGateway) -> compute.TargetVpnGateway: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.TargetVpnGateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetVpnGateway, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetVpnGateways server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertTargetVpnGatewayRequest, @@ -211,12 +282,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetVpnGateways server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListTargetVpnGatewaysRequest, @@ -236,12 +330,35 @@ def post_list( ) -> compute.TargetVpnGatewayList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.TargetVpnGatewayList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.TargetVpnGatewayList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetVpnGateways server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsTargetVpnGatewayRequest, @@ -260,12 +377,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TargetVpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TargetVpnGateways server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TargetVpnGatewaysRestStub: @@ -477,6 +617,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -642,6 +786,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -791,6 +939,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -962,6 +1112,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1110,6 +1264,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1275,6 +1431,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py index d87b650b1423..8078d321645b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py index 17e0523837ba..4afaa6800234 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/transports/rest.py @@ -165,12 +165,35 @@ def post_aggregated_list( ) -> compute.UrlMapsAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.UrlMapsAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.UrlMapsAggregatedList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteUrlMapRequest, @@ -186,12 +209,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetUrlMapRequest, @@ -207,12 +253,35 @@ def pre_get( def post_get(self, response: compute.UrlMap) -> compute.UrlMap: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.UrlMap, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.UrlMap, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertUrlMapRequest, @@ -228,12 +297,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_invalidate_cache( self, request: compute.InvalidateCacheUrlMapRequest, @@ -251,12 +343,35 @@ def pre_invalidate_cache( def post_invalidate_cache(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for invalidate_cache - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_invalidate_cache_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_invalidate_cache` interceptor runs + before the `post_invalidate_cache_with_metadata` interceptor. """ return response + def post_invalidate_cache_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for invalidate_cache + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_invalidate_cache_with_metadata` + interceptor in new development instead of the `post_invalidate_cache` interceptor. + When both interceptors are used, this `post_invalidate_cache_with_metadata` interceptor runs after the + `post_invalidate_cache` interceptor. The (possibly modified) response returned by + `post_invalidate_cache` will be passed to + `post_invalidate_cache_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListUrlMapsRequest, @@ -272,12 +387,35 @@ def pre_list( def post_list(self, response: compute.UrlMapList) -> compute.UrlMapList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.UrlMapList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.UrlMapList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchUrlMapRequest, @@ -293,12 +431,35 @@ def pre_patch( def post_patch(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for patch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. """ return response + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + def pre_update( self, request: compute.UpdateUrlMapRequest, @@ -314,12 +475,35 @@ def pre_update( def post_update(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. """ return response + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + def pre_validate( self, request: compute.ValidateUrlMapRequest, @@ -337,12 +521,37 @@ def post_validate( ) -> compute.UrlMapsValidateResponse: """Post-rpc interceptor for validate - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UrlMaps server but before - it is returned to user code. + it is returned to user code. This `post_validate` interceptor runs + before the `post_validate_with_metadata` interceptor. """ return response + def post_validate_with_metadata( + self, + response: compute.UrlMapsValidateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UrlMapsValidateResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for validate + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UrlMaps server but before it is returned to user code. + + We recommend only using this `post_validate_with_metadata` + interceptor in new development instead of the `post_validate` interceptor. + When both interceptors are used, this `post_validate_with_metadata` interceptor runs after the + `post_validate` interceptor. The (possibly modified) response returned by + `post_validate` will be passed to + `post_validate_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class UrlMapsRestStub: @@ -557,6 +766,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -713,6 +926,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -877,6 +1094,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1039,6 +1258,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1212,6 +1435,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_invalidate_cache(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_invalidate_cache_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1351,6 +1578,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1513,6 +1742,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1675,6 +1908,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1823,6 +2060,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py index 5cbd8d054495..9fadc8136bc4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py index 281b1961ab98..4bcdd29c0b0b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py @@ -158,12 +158,37 @@ def post_aggregated_list( ) -> compute.VpnGatewayAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.VpnGatewayAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.VpnGatewayAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteVpnGatewayRequest, @@ -181,12 +206,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetVpnGatewayRequest, @@ -202,12 +250,35 @@ def pre_get( def post_get(self, response: compute.VpnGateway) -> compute.VpnGateway: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.VpnGateway, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.VpnGateway, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_get_status( self, request: compute.GetStatusVpnGatewayRequest, @@ -227,12 +298,37 @@ def post_get_status( ) -> compute.VpnGatewaysGetStatusResponse: """Post-rpc interceptor for get_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_get_status` interceptor runs + before the `post_get_status_with_metadata` interceptor. """ return response + def post_get_status_with_metadata( + self, + response: compute.VpnGatewaysGetStatusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.VpnGatewaysGetStatusResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_get_status_with_metadata` + interceptor in new development instead of the `post_get_status` interceptor. + When both interceptors are used, this `post_get_status_with_metadata` interceptor runs after the + `post_get_status` interceptor. The (possibly modified) response returned by + `post_get_status` will be passed to + `post_get_status_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertVpnGatewayRequest, @@ -250,12 +346,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListVpnGatewaysRequest, @@ -271,12 +390,35 @@ def pre_list( def post_list(self, response: compute.VpnGatewayList) -> compute.VpnGatewayList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.VpnGatewayList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.VpnGatewayList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsVpnGatewayRequest, @@ -294,12 +436,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: compute.TestIamPermissionsVpnGatewayRequest, @@ -320,12 +485,37 @@ def post_test_iam_permissions( ) -> compute.TestPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnGateways server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnGateways server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class VpnGatewaysRestStub: @@ -536,6 +726,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -697,6 +891,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -848,6 +1046,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -992,6 +1192,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1159,6 +1363,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1303,6 +1511,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1470,6 +1680,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1620,6 +1834,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py index d1413f8e1425..9cf67bdce2f7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -15,6 +15,8 @@ # from collections import OrderedDict import functools +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -459,6 +461,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py index 22611696f5a3..18057ae4b29b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py @@ -141,12 +141,37 @@ def post_aggregated_list( ) -> compute.VpnTunnelAggregatedList: """Post-rpc interceptor for aggregated_list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnTunnels server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. """ return response + def post_aggregated_list_with_metadata( + self, + response: compute.VpnTunnelAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.VpnTunnelAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnTunnels server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteVpnTunnelRequest, @@ -162,12 +187,35 @@ def pre_delete( def post_delete(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnTunnels server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnTunnels server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetVpnTunnelRequest, @@ -183,12 +231,35 @@ def pre_get( def post_get(self, response: compute.VpnTunnel) -> compute.VpnTunnel: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnTunnels server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.VpnTunnel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.VpnTunnel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnTunnels server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertVpnTunnelRequest, @@ -204,12 +275,35 @@ def pre_insert( def post_insert(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for insert - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnTunnels server but before - it is returned to user code. + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. """ return response + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnTunnels server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListVpnTunnelsRequest, @@ -225,12 +319,35 @@ def pre_list( def post_list(self, response: compute.VpnTunnelList) -> compute.VpnTunnelList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnTunnels server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.VpnTunnelList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.VpnTunnelList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnTunnels server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: compute.SetLabelsVpnTunnelRequest, @@ -248,12 +365,35 @@ def pre_set_labels( def post_set_labels(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the VpnTunnels server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpnTunnels server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class VpnTunnelsRestStub: @@ -466,6 +606,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -625,6 +769,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -767,6 +915,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -932,6 +1082,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1076,6 +1230,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1243,6 +1399,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py index 25206835ba0d..7f124e78fa59 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -458,6 +460,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py index 669067058e57..f58e438d5b84 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/transports/rest.py @@ -125,12 +125,37 @@ def post_delete( ) -> compute.DeleteZoneOperationResponse: """Post-rpc interceptor for delete - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ZoneOperations server but before - it is returned to user code. + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. """ return response + def post_delete_with_metadata( + self, + response: compute.DeleteZoneOperationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteZoneOperationResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneOperations server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + def pre_get( self, request: compute.GetZoneOperationRequest, @@ -148,12 +173,35 @@ def pre_get( def post_get(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ZoneOperations server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneOperations server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListZoneOperationsRequest, @@ -171,12 +219,35 @@ def pre_list( def post_list(self, response: compute.OperationList) -> compute.OperationList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ZoneOperations server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.OperationList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.OperationList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneOperations server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + def pre_wait( self, request: compute.WaitZoneOperationRequest, @@ -194,12 +265,35 @@ def pre_wait( def post_wait(self, response: compute.Operation) -> compute.Operation: """Post-rpc interceptor for wait - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_wait_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ZoneOperations server but before - it is returned to user code. + it is returned to user code. This `post_wait` interceptor runs + before the `post_wait_with_metadata` interceptor. """ return response + def post_wait_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for wait + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneOperations server but before it is returned to user code. + + We recommend only using this `post_wait_with_metadata` + interceptor in new development instead of the `post_wait` interceptor. + When both interceptors are used, this `post_wait_with_metadata` interceptor runs after the + `post_wait` interceptor. The (possibly modified) response returned by + `post_wait` will be passed to + `post_wait_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ZoneOperationsRestStub: @@ -415,6 +509,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -576,6 +674,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -722,6 +822,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -883,6 +985,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_wait(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_wait_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py index 57a60c0dbd86..1cdad25daac5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -456,6 +458,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py index 9c3c7f7aceb2..98bb435ed8e3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/transports/rest.py @@ -105,12 +105,33 @@ def pre_get( def post_get(self, response: compute.Zone) -> compute.Zone: """Post-rpc interceptor for get - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Zones server but before - it is returned to user code. + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. """ return response + def post_get_with_metadata( + self, response: compute.Zone, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[compute.Zone, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Zones server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + def pre_list( self, request: compute.ListZonesRequest, @@ -126,12 +147,35 @@ def pre_list( def post_list(self, response: compute.ZoneList) -> compute.ZoneList: """Post-rpc interceptor for list - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Zones server but before - it is returned to user code. + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. """ return response + def post_list_with_metadata( + self, + response: compute.ZoneList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.ZoneList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Zones server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ZonesRestStub: @@ -345,6 +389,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -484,6 +530,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py index a8b9f8257d40..03d712bf890e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py @@ -66,6 +66,7 @@ AggregatedListNetworkAttachmentsRequest, AggregatedListNetworkEdgeSecurityServicesRequest, AggregatedListNetworkEndpointGroupsRequest, + AggregatedListNetworkFirewallPoliciesRequest, AggregatedListNodeGroupsRequest, AggregatedListNodeTemplatesRequest, AggregatedListNodeTypesRequest, @@ -332,6 +333,7 @@ FirewallList, FirewallLogConfig, FirewallPoliciesListAssociationsResponse, + FirewallPoliciesScopedList, FirewallPolicy, FirewallPolicyAssociation, FirewallPolicyList, @@ -909,6 +911,7 @@ NetworkEndpointGroupsListNetworkEndpoints, NetworkEndpointGroupsScopedList, NetworkEndpointWithHealthStatus, + NetworkFirewallPolicyAggregatedList, NetworkInterface, NetworkList, NetworkPeering, @@ -1592,6 +1595,7 @@ "AggregatedListNetworkAttachmentsRequest", "AggregatedListNetworkEdgeSecurityServicesRequest", "AggregatedListNetworkEndpointGroupsRequest", + "AggregatedListNetworkFirewallPoliciesRequest", "AggregatedListNodeGroupsRequest", "AggregatedListNodeTemplatesRequest", "AggregatedListNodeTypesRequest", @@ -1858,6 +1862,7 @@ "FirewallList", "FirewallLogConfig", "FirewallPoliciesListAssociationsResponse", + "FirewallPoliciesScopedList", "FirewallPolicy", "FirewallPolicyAssociation", "FirewallPolicyList", @@ -2435,6 +2440,7 @@ "NetworkEndpointGroupsListNetworkEndpoints", "NetworkEndpointGroupsScopedList", "NetworkEndpointWithHealthStatus", + "NetworkFirewallPolicyAggregatedList", "NetworkInterface", "NetworkList", "NetworkPeering", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py index 70daefa6ed54..3da4c79a12ae 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py @@ -76,6 +76,7 @@ "AggregatedListNetworkAttachmentsRequest", "AggregatedListNetworkEdgeSecurityServicesRequest", "AggregatedListNetworkEndpointGroupsRequest", + "AggregatedListNetworkFirewallPoliciesRequest", "AggregatedListNodeGroupsRequest", "AggregatedListNodeTemplatesRequest", "AggregatedListNodeTypesRequest", @@ -341,6 +342,7 @@ "FirewallList", "FirewallLogConfig", "FirewallPoliciesListAssociationsResponse", + "FirewallPoliciesScopedList", "FirewallPolicy", "FirewallPolicyAssociation", "FirewallPolicyList", @@ -918,6 +920,7 @@ "NetworkEndpointGroupsListNetworkEndpoints", "NetworkEndpointGroupsScopedList", "NetworkEndpointWithHealthStatus", + "NetworkFirewallPolicyAggregatedList", "NetworkInterface", "NetworkList", "NetworkPeering", @@ -2171,7 +2174,7 @@ class AccessConfig(proto.Message): This field is a member of `oneof`_ ``_public_ptr_domain_name``. security_policy (str): - [Output Only] The resource URL for the security policy + The resource URL for the security policy associated with this access config. This field is a member of `oneof`_ ``_security_policy``. @@ -7127,6 +7130,160 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): ) +class AggregatedListNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.AggregatedList. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + The Shared VPC service project id or service + project number for which aggregated list request + is invoked for subnetworks list-usable api. + + This field is a member of `oneof`_ ``_service_project_number``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + class AggregatedListNodeGroupsRequest(proto.Message): r"""A request message for NodeGroups.AggregatedList. See the method description for details. @@ -12382,7 +12539,10 @@ class AttachedDiskInitializeParams(proto.Message): you created, specify the snapshot name in the following format: global/snapshots/my-backup If the source snapshot is deleted later, this field - will not be set. + will not be set. Note: You cannot create VMs in + bulk using a snapshot as the source. Use an + image instead when you create VMs using the bulk + insert method. This field is a member of `oneof`_ ``_source_snapshot``. source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): @@ -18657,6 +18817,8 @@ class Type(proto.Enum): No description available. ACCELERATOR_OPTIMIZED_A3_MEGA (156517459): No description available. + ACCELERATOR_OPTIMIZED_A3_ULTRA (27812811): + No description available. COMPUTE_OPTIMIZED (158349023): No description available. COMPUTE_OPTIMIZED_C2D (383246453): @@ -18704,6 +18866,7 @@ class Type(proto.Enum): ACCELERATOR_OPTIMIZED = 280848403 ACCELERATOR_OPTIMIZED_A3 = 158574526 ACCELERATOR_OPTIMIZED_A3_MEGA = 156517459 + ACCELERATOR_OPTIMIZED_A3_ULTRA = 27812811 COMPUTE_OPTIMIZED = 158349023 COMPUTE_OPTIMIZED_C2D = 383246453 COMPUTE_OPTIMIZED_C3 = 428004784 @@ -26805,17 +26968,16 @@ class ErrorInfo(proto.Message): This field is a member of `oneof`_ ``_domain``. metadatas (MutableMapping[str, str]): - Additional structured details about this - error. Keys must match /a-z+/ but should ideally - be lowerCamelCase. Also they must be limited to - 64 characters in length. When identifying the - current value of an exceeded limit, the units - should be contained in the key, not the value. - For example, rather than {"instanceLimit": - "100/request"}, should be returned as, - {"instanceLimitPerRequest": "100"}, if the - client exceeds the number of instances that can - be created in a single (batch) request. + Additional structured details about this error. Keys must + match a regular expression of ``a-z+`` but should ideally be + lowerCamelCase. Also, they must be limited to 64 characters + in length. When identifying the current value of an exceeded + limit, the units should be contained in the key, not the + value. For example, rather than + ``{"instanceLimit": "100/request"}``, should be returned as, + ``{"instanceLimitPerRequest": "100"}``, if the client + exceeds the number of instances that can be created in a + single (batch) request. reason (str): The reason of the error. This is a constant value that identifies the proximate cause of the error. Error reasons @@ -27996,6 +28158,35 @@ class FirewallPoliciesListAssociationsResponse(proto.Message): ) +class FirewallPoliciesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policies (MutableSequence[google.cloud.compute_v1.types.FirewallPolicy]): + A list of firewall policies contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of firewall policies when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + firewall_policies: MutableSequence["FirewallPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=392512943, + message="FirewallPolicy", + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class FirewallPolicy(proto.Message): r"""Represents a Firewall Policy resource. @@ -42984,13 +43175,13 @@ class InstanceGroup(proto.Message): This field is a member of `oneof`_ ``_name``. named_ports (MutableSequence[google.cloud.compute_v1.types.NamedPort]): - Assigns a name to a port number. For example: {name: "http", - port: 80} This allows the system to reference ports by the - assigned name instead of a port number. Named ports can also - contain multiple ports. For example: [{name: "app1", port: - 8080}, {name: "app1", port: 8081}, {name: "app2", port: - 8082}] Named ports apply to all instances in this instance - group. + Optional. Assigns a name to a port number. For example: + {name: "http", port: 80} This allows the system to reference + ports by the assigned name instead of a port number. Named + ports can also contain multiple ports. For example: [{name: + "app1", port: 8080}, {name: "app1", port: 8081}, {name: + "app2", port: 8082}] Named ports apply to all instances in + this instance group. network (str): [Output Only] The URL of the network to which all instances in the instance group belong. If your instance has multiple @@ -48529,15 +48720,16 @@ class InterconnectAttachment(proto.Message): Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s - BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G: 5 Gbit/s - BPS_10G: 10 - Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s Check the - Bandwidth enum for the list of possible values. + Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s - BPS_100G: + 100 Gbit/s Check the Bandwidth enum for the list of possible + values. This field is a member of `oneof`_ ``_bandwidth``. candidate_ipv6_subnets (MutableSequence[str]): This field is not available. candidate_subnets (MutableSequence[str]): - Up to 16 candidate prefixes that can be used - to restrict the allocation of + Input only. Up to 16 candidate prefixes that + can be used to restrict the allocation of cloudRouterIpAddress and customerRouterIpAddress for this attachment. All prefixes must be within link-local address space (169.254.0.0/16) and @@ -48599,10 +48791,10 @@ class InterconnectAttachment(proto.Message): This field is a member of `oneof`_ ``_description``. edge_availability_domain (str): - Desired availability domain for the attachment. Only - available for type PARTNER, at creation time, and can take - one of the following values: - AVAILABILITY_DOMAIN_ANY - - AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved + Input only. Desired availability domain for the attachment. + Only available for type PARTNER, at creation time, and can + take one of the following values: - AVAILABILITY_DOMAIN_ANY + - AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should configure a pair of attachments, one per availability domain. The selected availability domain will be provided to the Partner via the @@ -48816,9 +49008,9 @@ class InterconnectAttachment(proto.Message): This field is a member of `oneof`_ ``_state``. subnet_length (int): - Length of the IPv4 subnet mask. Allowed - values: - 29 (default) - 30 The default value is - 29, except for Cross-Cloud Interconnect + Input only. Length of the IPv4 subnet mask. + Allowed values: - 29 (default) - 30 The default + value is 29, except for Cross-Cloud Interconnect connections that use an InterconnectRemoteLocation with a constraints.subnetLengthRange.min equal to 30. @@ -48859,12 +49051,14 @@ class Bandwidth(proto.Enum): - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s - BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G: 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - - BPS_50G: 50 Gbit/s + BPS_50G: 50 Gbit/s - BPS_100G: 100 Gbit/s Values: UNDEFINED_BANDWIDTH (0): A value indicating that the enum field is not set. + BPS_100G (49547952): + 100 Gbit/s BPS_100M (49547958): 100 Mbit/s BPS_10G (278693006): @@ -48891,6 +49085,7 @@ class Bandwidth(proto.Enum): 5 Gbit/s """ UNDEFINED_BANDWIDTH = 0 + BPS_100G = 49547952 BPS_100M = 49547958 BPS_10G = 278693006 BPS_1G = 355358448 @@ -48905,15 +49100,15 @@ class Bandwidth(proto.Enum): BPS_5G = 355358572 class EdgeAvailabilityDomain(proto.Enum): - r"""Desired availability domain for the attachment. Only available for - type PARTNER, at creation time, and can take one of the following - values: - AVAILABILITY_DOMAIN_ANY - AVAILABILITY_DOMAIN_1 - - AVAILABILITY_DOMAIN_2 For improved reliability, customers should - configure a pair of attachments, one per availability domain. The - selected availability domain will be provided to the Partner via the - pairing key, so that the provisioned circuit will lie in the - specified domain. If not specified, the value will default to - AVAILABILITY_DOMAIN_ANY. + r"""Input only. Desired availability domain for the attachment. Only + available for type PARTNER, at creation time, and can take one of + the following values: - AVAILABILITY_DOMAIN_ANY - + AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved + reliability, customers should configure a pair of attachments, one + per availability domain. The selected availability domain will be + provided to the Partner via the pairing key, so that the provisioned + circuit will lie in the specified domain. If not specified, the + value will default to AVAILABILITY_DOMAIN_ANY. Values: UNDEFINED_EDGE_AVAILABILITY_DOMAIN (0): @@ -69947,12 +70142,12 @@ class NetworkEndpoint(proto.Message): Attributes: annotations (MutableMapping[str, str]): - Metadata defined as annotations on the - network endpoint. + Optional metadata defined as annotations on + the network endpoint. client_destination_port (int): Represents the port number to which PSC consumer sends - packets. Only valid for network endpoint groups created with - GCE_VM_IP_PORTMAP endpoint type. + packets. Optional. Only valid for network endpoint groups + created with GCE_VM_IP_PORTMAP endpoint type. This field is a member of `oneof`_ ``_client_destination_port``. fqdn (str): @@ -69964,7 +70159,8 @@ class NetworkEndpoint(proto.Message): This field is a member of `oneof`_ ``_fqdn``. instance (str): The name or a URL of VM instance of this network endpoint. - This field is required for network endpoints of type + Optional, the field presence depends on the network endpoint + type. The field is required for network endpoints of type GCE_VM_IP and GCE_VM_IP_PORT. The instance must be in the same zone of network endpoint group (for zonal NEGs) or in the zone within the region of the NEG (for regional NEGs). @@ -70051,24 +70247,24 @@ class NetworkEndpointGroup(proto.Message): Attributes: annotations (MutableMapping[str, str]): - Metadata defined as annotations on the - network endpoint group. + Optional. Metadata defined as annotations on + the network endpoint group. app_engine (google.cloud.compute_v1.types.NetworkEndpointGroupAppEngine): - Only valid when networkEndpointType is - SERVERLESS. Only one of cloudRun, appEngine or - cloudFunction may be set. + Optional. Only valid when networkEndpointType + is SERVERLESS. Only one of cloudRun, appEngine + or cloudFunction may be set. This field is a member of `oneof`_ ``_app_engine``. cloud_function (google.cloud.compute_v1.types.NetworkEndpointGroupCloudFunction): - Only valid when networkEndpointType is - SERVERLESS. Only one of cloudRun, appEngine or - cloudFunction may be set. + Optional. Only valid when networkEndpointType + is SERVERLESS. Only one of cloudRun, appEngine + or cloudFunction may be set. This field is a member of `oneof`_ ``_cloud_function``. cloud_run (google.cloud.compute_v1.types.NetworkEndpointGroupCloudRun): - Only valid when networkEndpointType is - SERVERLESS. Only one of cloudRun, appEngine or - cloudFunction may be set. + Optional. Only valid when networkEndpointType + is SERVERLESS. Only one of cloudRun, appEngine + or cloudFunction may be set. This field is a member of `oneof`_ ``_cloud_run``. creation_timestamp (str): @@ -70077,9 +70273,9 @@ class NetworkEndpointGroup(proto.Message): This field is a member of `oneof`_ ``_creation_timestamp``. default_port (int): The default port used if the port number is not specified in - the network endpoint. If the network endpoint type is either - GCE_VM_IP, SERVERLESS or PRIVATE_SERVICE_CONNECT, this field - must not be specified. + the network endpoint. Optional. If the network endpoint type + is either GCE_VM_IP, SERVERLESS or PRIVATE_SERVICE_CONNECT, + this field must not be specified. This field is a member of `oneof`_ ``_default_port``. description (str): @@ -70125,13 +70321,16 @@ class NetworkEndpointGroup(proto.Message): This field is a member of `oneof`_ ``_network_endpoint_type``. psc_data (google.cloud.compute_v1.types.NetworkEndpointGroupPscData): + Optional. Only valid when networkEndpointType is + PRIVATE_SERVICE_CONNECT. This field is a member of `oneof`_ ``_psc_data``. psc_target_service (str): - The target service url used to set up private - service connection to a Google API or a PSC - Producer Service Attachment. An example value - is: asia-northeast3-cloudkms.googleapis.com + The target service url used to set up private service + connection to a Google API or a PSC Producer Service + Attachment. An example value is: + asia-northeast3-cloudkms.googleapis.com. Optional. Only + valid when networkEndpointType is PRIVATE_SERVICE_CONNECT. This field is a member of `oneof`_ ``_psc_target_service``. region (str): @@ -70906,9 +71105,11 @@ class NetworkEndpointWithHealthStatus(proto.Message): Attributes: healths (MutableSequence[google.cloud.compute_v1.types.HealthStatusForNetworkEndpoint]): - [Output only] The health status of network endpoint; + [Output only] The health status of network endpoint. + Optional. Displayed only if the network endpoint has + centralized health checking configured. network_endpoint (google.cloud.compute_v1.types.NetworkEndpoint): - [Output only] The network endpoint; + [Output only] The network endpoint. This field is a member of `oneof`_ ``_network_endpoint``. """ @@ -70926,6 +71127,89 @@ class NetworkEndpointWithHealthStatus(proto.Message): ) +class NetworkFirewallPolicyAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.FirewallPoliciesScopedList]): + A list of FirewallPoliciesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#networkFirewallPoliciesAggregatedList for lists of + network firewall policies. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, "FirewallPoliciesScopedList"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message="FirewallPoliciesScopedList", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class NetworkInterface(proto.Message): r"""A network interface resource attached to an instance. @@ -72466,6 +72750,17 @@ class NetworkRoutingConfig(proto.Message): the list of possible values. This field is a member of `oneof`_ ``_bgp_inter_region_cost``. + effective_bgp_always_compare_med (bool): + [Output Only] Effective value of the bgp_always_compare_med + field. + + This field is a member of `oneof`_ ``_effective_bgp_always_compare_med``. + effective_bgp_inter_region_cost (str): + [Output Only] Effective value of the bgp_inter_region_cost + field. Check the EffectiveBgpInterRegionCost enum for the + list of possible values. + + This field is a member of `oneof`_ ``_effective_bgp_inter_region_cost``. routing_mode (str): The network-wide routing mode to use. If set to REGIONAL, this network's Cloud Routers will @@ -72500,7 +72795,9 @@ class BgpBestPathSelectionMode(proto.Enum): class BgpInterRegionCost(proto.Enum): r"""Allows to define a preferred approach for handling inter-region cost in the selection process when using the STANDARD BGP best path - selection algorithm. Can be DEFAULT or ADD_COST_TO_MED. + selection algorithm. Can be DEFAULT or ADD_COST_TO_MED. Additional + supported values which may be not listed in the enum directly due to + technical reasons: ADD_COST_TO_MED DEFAULT Values: UNDEFINED_BGP_INTER_REGION_COST (0): @@ -72515,6 +72812,18 @@ class BgpInterRegionCost(proto.Enum): ADD_COST_TO_MED = 490583004 DEFAULT = 115302945 + class EffectiveBgpInterRegionCost(proto.Enum): + r"""[Output Only] Effective value of the bgp_inter_region_cost field. + Additional supported values which may be not listed in the enum + directly due to technical reasons: ADD_COST_TO_MED DEFAULT + + Values: + UNDEFINED_EFFECTIVE_BGP_INTER_REGION_COST (0): + A value indicating that the enum field is not + set. + """ + UNDEFINED_EFFECTIVE_BGP_INTER_REGION_COST = 0 + class RoutingMode(proto.Enum): r"""The network-wide routing mode to use. If set to REGIONAL, this network's Cloud Routers will only advertise routes with @@ -72550,6 +72859,16 @@ class RoutingMode(proto.Enum): number=462142689, optional=True, ) + effective_bgp_always_compare_med: bool = proto.Field( + proto.BOOL, + number=214661838, + optional=True, + ) + effective_bgp_inter_region_cost: str = proto.Field( + proto.STRING, + number=185098313, + optional=True, + ) routing_mode: str = proto.Field( proto.STRING, number=475143548, @@ -90564,6 +90883,13 @@ class Scheduling(proto.Message): attached to the instance. This field is a member of `oneof`_ ``_availability_domain``. + host_error_timeout_seconds (int): + Specify the time in seconds for host error detection, the + value must be within the range of [90, 330] with the + increment of 30, if unset, the default behavior of host + error recovery will be used. + + This field is a member of `oneof`_ ``_host_error_timeout_seconds``. instance_termination_action (str): Specifies the termination action for the instance. Check the InstanceTerminationAction @@ -90714,6 +91040,11 @@ class ProvisioningModel(proto.Enum): number=252514344, optional=True, ) + host_error_timeout_seconds: int = proto.Field( + proto.INT32, + number=408317459, + optional=True, + ) instance_termination_action: str = proto.Field( proto.STRING, number=107380667, @@ -105129,12 +105460,14 @@ class TargetHttpsProxy(proto.Message): SSL Certificate resource or Certificate Manager Certificate resource. Mixing Classic Certificates and Certificate Manager Certificates is not allowed. Certificate Manager - Certificates must include the certificatemanager API. - Certificate Manager Certificates are not supported by Global - external Application Load Balancer or Classic Application - Load Balancer, use certificate_map instead. Currently, you - may specify up to 15 Classic SSL Certificates. Certificate - Manager Certificates accepted formats are: - + Certificates must include the certificatemanager API + namespace. Using Certificate Manager Certificates in this + field is not supported by Global external Application Load + Balancer or Classic Application Load Balancer, use + certificate_map instead. Currently, you may specify up to 15 + Classic SSL Certificates or up to 100 Certificate Manager + Certificates. Certificate Manager Certificates accepted + formats are: - //certificatemanager.googleapis.com/projects/{project}/locations/{ location}/certificates/{resourceName}. - https://certificatemanager.googleapis.com/v1alpha1/projects/{project diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_firewall_policies_aggregated_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_firewall_policies_aggregated_list_sync.py new file mode 100644 index 000000000000..3d4c70a1bd00 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_network_firewall_policies_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NetworkFirewallPolicies_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.NetworkFirewallPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListNetworkFirewallPoliciesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_NetworkFirewallPolicies_AggregatedList_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index f749c3d6ced8..06de52dc78c7 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-compute", - "version": "1.23.0" + "version": "1.25.0" }, "snippets": [ { @@ -28119,6 +28119,86 @@ ], "title": "compute_v1_generated_network_firewall_policies_add_rule_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListNetworkFirewallPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_firewall_policies.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_firewall_policies_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_aggregated_list_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py index 332b851fa082..a19fed862fb2 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AcceleratorTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AcceleratorTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1835,10 +1885,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AcceleratorTypesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.AcceleratorTypesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AcceleratorTypesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListAcceleratorTypesRequest.pb( compute.AggregatedListAcceleratorTypesRequest() ) @@ -1864,6 +1917,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AcceleratorTypeAggregatedList() + post_with_metadata.return_value = ( + compute.AcceleratorTypeAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -1875,6 +1932,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetAcceleratorTypeRequest): @@ -1979,10 +2037,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AcceleratorTypesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.AcceleratorTypesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AcceleratorTypesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetAcceleratorTypeRequest.pb( compute.GetAcceleratorTypeRequest() ) @@ -2006,6 +2067,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AcceleratorType() + post_with_metadata.return_value = compute.AcceleratorType(), metadata client.get( request, @@ -2017,6 +2079,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListAcceleratorTypesRequest): @@ -2105,10 +2168,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AcceleratorTypesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.AcceleratorTypesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AcceleratorTypesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListAcceleratorTypesRequest.pb( compute.ListAcceleratorTypesRequest() ) @@ -2134,6 +2200,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AcceleratorTypeList() + post_with_metadata.return_value = compute.AcceleratorTypeList(), metadata client.list( request, @@ -2145,6 +2212,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py index c31ef30aa987..68abd1e8f751 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -294,6 +301,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AddressesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AddressesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3462,10 +3512,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListAddressesRequest.pb( compute.AggregatedListAddressesRequest() ) @@ -3491,6 +3544,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AddressAggregatedList() + post_with_metadata.return_value = compute.AddressAggregatedList(), metadata client.aggregated_list( request, @@ -3502,6 +3556,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteAddressRequest): @@ -3624,10 +3679,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteAddressRequest.pb(compute.DeleteAddressRequest()) transcode.return_value = { "method": "post", @@ -3649,6 +3707,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3660,6 +3719,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetAddressRequest): @@ -3776,10 +3836,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetAddressRequest.pb(compute.GetAddressRequest()) transcode.return_value = { "method": "post", @@ -3801,6 +3864,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Address() + post_with_metadata.return_value = compute.Address(), metadata client.get( request, @@ -3812,6 +3876,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertAddressRequest): @@ -4023,10 +4088,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertAddressRequest.pb(compute.InsertAddressRequest()) transcode.return_value = { "method": "post", @@ -4048,6 +4116,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4059,6 +4128,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListAddressesRequest): @@ -4145,10 +4215,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListAddressesRequest.pb(compute.ListAddressesRequest()) transcode.return_value = { "method": "post", @@ -4170,6 +4243,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AddressList() + post_with_metadata.return_value = compute.AddressList(), metadata client.list( request, @@ -4181,6 +4255,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_rest_bad_request(request_type=compute.MoveAddressRequest): @@ -4385,10 +4460,13 @@ def test_move_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_move" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_move_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_move" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.MoveAddressRequest.pb(compute.MoveAddressRequest()) transcode.return_value = { "method": "post", @@ -4410,6 +4488,7 @@ def test_move_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.move( request, @@ -4421,6 +4500,7 @@ def test_move_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsAddressRequest): @@ -4622,10 +4702,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AddressesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.AddressesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AddressesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsAddressRequest.pb( compute.SetLabelsAddressRequest() ) @@ -4649,6 +4732,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4660,6 +4744,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py index e8db5d6b4b18..8a33c1a9cc7d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutoscalersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutoscalersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3483,10 +3533,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListAutoscalersRequest.pb( compute.AggregatedListAutoscalersRequest() ) @@ -3512,6 +3565,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AutoscalerAggregatedList() + post_with_metadata.return_value = compute.AutoscalerAggregatedList(), metadata client.aggregated_list( request, @@ -3523,6 +3577,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteAutoscalerRequest): @@ -3647,10 +3702,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteAutoscalerRequest.pb( compute.DeleteAutoscalerRequest() ) @@ -3674,6 +3732,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3685,6 +3744,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetAutoscalerRequest): @@ -3787,10 +3847,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetAutoscalerRequest.pb(compute.GetAutoscalerRequest()) transcode.return_value = { "method": "post", @@ -3812,6 +3875,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Autoscaler() + post_with_metadata.return_value = compute.Autoscaler(), metadata client.get( request, @@ -3823,6 +3887,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertAutoscalerRequest): @@ -4058,10 +4123,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertAutoscalerRequest.pb( compute.InsertAutoscalerRequest() ) @@ -4085,6 +4153,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4096,6 +4165,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListAutoscalersRequest): @@ -4184,10 +4254,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListAutoscalersRequest.pb(compute.ListAutoscalersRequest()) transcode.return_value = { "method": "post", @@ -4209,6 +4282,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AutoscalerList() + post_with_metadata.return_value = compute.AutoscalerList(), metadata client.list( request, @@ -4220,6 +4294,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchAutoscalerRequest): @@ -4455,10 +4530,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchAutoscalerRequest.pb(compute.PatchAutoscalerRequest()) transcode.return_value = { "method": "post", @@ -4480,6 +4558,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4491,6 +4570,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateAutoscalerRequest): @@ -4726,10 +4806,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoscalersRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.AutoscalersRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutoscalersRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateAutoscalerRequest.pb( compute.UpdateAutoscalerRequest() ) @@ -4753,6 +4836,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -4764,6 +4848,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py index 5acf61735682..e30d89bb8acd 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BackendBucketsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BackendBucketsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5129,10 +5179,14 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_add_signed_url_key" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, + "post_add_signed_url_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_add_signed_url_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddSignedUrlKeyBackendBucketRequest.pb( compute.AddSignedUrlKeyBackendBucketRequest() ) @@ -5156,6 +5210,7 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_signed_url_key( request, @@ -5167,6 +5222,7 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteBackendBucketRequest): @@ -5291,10 +5347,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteBackendBucketRequest.pb( compute.DeleteBackendBucketRequest() ) @@ -5318,6 +5377,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -5329,6 +5389,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_signed_url_key_rest_bad_request( @@ -5455,10 +5516,14 @@ def test_delete_signed_url_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_delete_signed_url_key" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, + "post_delete_signed_url_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_delete_signed_url_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSignedUrlKeyBackendBucketRequest.pb( compute.DeleteSignedUrlKeyBackendBucketRequest() ) @@ -5482,6 +5547,7 @@ def test_delete_signed_url_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_signed_url_key( request, @@ -5493,6 +5559,7 @@ def test_delete_signed_url_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetBackendBucketRequest): @@ -5595,10 +5662,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetBackendBucketRequest.pb( compute.GetBackendBucketRequest() ) @@ -5622,6 +5692,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendBucket() + post_with_metadata.return_value = compute.BackendBucket(), metadata client.get( request, @@ -5633,6 +5704,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -5721,10 +5793,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyBackendBucketRequest.pb( compute.GetIamPolicyBackendBucketRequest() ) @@ -5748,6 +5823,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -5759,6 +5835,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertBackendBucketRequest): @@ -5997,10 +6074,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertBackendBucketRequest.pb( compute.InsertBackendBucketRequest() ) @@ -6024,6 +6104,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -6035,6 +6116,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListBackendBucketsRequest): @@ -6123,10 +6205,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListBackendBucketsRequest.pb( compute.ListBackendBucketsRequest() ) @@ -6150,6 +6235,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendBucketList() + post_with_metadata.return_value = compute.BackendBucketList(), metadata client.list( request, @@ -6161,6 +6247,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchBackendBucketRequest): @@ -6399,10 +6486,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchBackendBucketRequest.pb( compute.PatchBackendBucketRequest() ) @@ -6426,6 +6516,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6437,6 +6528,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_edge_security_policy_rest_bad_request( @@ -6641,10 +6733,14 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_set_edge_security_policy" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, + "post_set_edge_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_set_edge_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetEdgeSecurityPolicyBackendBucketRequest.pb( compute.SetEdgeSecurityPolicyBackendBucketRequest() ) @@ -6668,6 +6764,7 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_edge_security_policy( request, @@ -6679,6 +6776,7 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -6883,10 +6981,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyBackendBucketRequest.pb( compute.SetIamPolicyBackendBucketRequest() ) @@ -6910,6 +7011,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -6921,6 +7023,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -7083,10 +7186,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsBackendBucketRequest.pb( compute.TestIamPermissionsBackendBucketRequest() ) @@ -7112,6 +7219,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -7123,6 +7231,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateBackendBucketRequest): @@ -7361,10 +7470,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendBucketsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendBucketsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateBackendBucketRequest.pb( compute.UpdateBackendBucketRequest() ) @@ -7388,6 +7500,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -7399,6 +7512,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py index 2a066c7451bd..4a229f6d7de6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BackendServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BackendServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6301,10 +6351,14 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_add_signed_url_key" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, + "post_add_signed_url_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_add_signed_url_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddSignedUrlKeyBackendServiceRequest.pb( compute.AddSignedUrlKeyBackendServiceRequest() ) @@ -6328,6 +6382,7 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_signed_url_key( request, @@ -6339,6 +6394,7 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -6431,10 +6487,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListBackendServicesRequest.pb( compute.AggregatedListBackendServicesRequest() ) @@ -6460,6 +6519,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceAggregatedList() + post_with_metadata.return_value = ( + compute.BackendServiceAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -6471,6 +6534,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteBackendServiceRequest): @@ -6595,10 +6659,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteBackendServiceRequest.pb( compute.DeleteBackendServiceRequest() ) @@ -6622,6 +6689,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -6633,6 +6701,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_signed_url_key_rest_bad_request( @@ -6759,10 +6828,14 @@ def test_delete_signed_url_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_delete_signed_url_key" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, + "post_delete_signed_url_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_delete_signed_url_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSignedUrlKeyBackendServiceRequest.pb( compute.DeleteSignedUrlKeyBackendServiceRequest() ) @@ -6786,6 +6859,7 @@ def test_delete_signed_url_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_signed_url_key( request, @@ -6797,6 +6871,7 @@ def test_delete_signed_url_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetBackendServiceRequest): @@ -6931,10 +7006,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetBackendServiceRequest.pb( compute.GetBackendServiceRequest() ) @@ -6958,6 +7036,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendService() + post_with_metadata.return_value = compute.BackendService(), metadata client.get( request, @@ -6969,6 +7048,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_health_rest_bad_request( @@ -7129,10 +7209,13 @@ def test_get_health_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_get_health" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_get_health_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_get_health" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetHealthBackendServiceRequest.pb( compute.GetHealthBackendServiceRequest() ) @@ -7158,6 +7241,7 @@ def test_get_health_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceGroupHealth() + post_with_metadata.return_value = compute.BackendServiceGroupHealth(), metadata client.get_health( request, @@ -7169,6 +7253,7 @@ def test_get_health_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -7257,10 +7342,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyBackendServiceRequest.pb( compute.GetIamPolicyBackendServiceRequest() ) @@ -7284,6 +7372,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -7295,6 +7384,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertBackendServiceRequest): @@ -7660,10 +7750,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertBackendServiceRequest.pb( compute.InsertBackendServiceRequest() ) @@ -7687,6 +7780,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -7698,6 +7792,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListBackendServicesRequest): @@ -7786,10 +7881,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListBackendServicesRequest.pb( compute.ListBackendServicesRequest() ) @@ -7813,6 +7911,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceList() + post_with_metadata.return_value = compute.BackendServiceList(), metadata client.list( request, @@ -7824,6 +7923,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_usable_rest_bad_request( @@ -7914,10 +8014,13 @@ def test_list_usable_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_list_usable" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_list_usable_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_list_usable" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListUsableBackendServicesRequest.pb( compute.ListUsableBackendServicesRequest() ) @@ -7943,6 +8046,7 @@ def test_list_usable_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceListUsable() + post_with_metadata.return_value = compute.BackendServiceListUsable(), metadata client.list_usable( request, @@ -7954,6 +8058,7 @@ def test_list_usable_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchBackendServiceRequest): @@ -8319,10 +8424,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchBackendServiceRequest.pb( compute.PatchBackendServiceRequest() ) @@ -8346,6 +8454,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -8357,6 +8466,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_edge_security_policy_rest_bad_request( @@ -8561,10 +8671,14 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_set_edge_security_policy" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, + "post_set_edge_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_set_edge_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetEdgeSecurityPolicyBackendServiceRequest.pb( compute.SetEdgeSecurityPolicyBackendServiceRequest() ) @@ -8588,6 +8702,7 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_edge_security_policy( request, @@ -8599,6 +8714,7 @@ def test_set_edge_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -8803,10 +8919,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyBackendServiceRequest.pb( compute.SetIamPolicyBackendServiceRequest() ) @@ -8830,6 +8949,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -8841,6 +8961,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_security_policy_rest_bad_request( @@ -9045,10 +9166,14 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, + "post_set_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSecurityPolicyBackendServiceRequest.pb( compute.SetSecurityPolicyBackendServiceRequest() ) @@ -9072,6 +9197,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_security_policy( request, @@ -9083,6 +9209,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -9245,10 +9372,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsBackendServiceRequest.pb( compute.TestIamPermissionsBackendServiceRequest() ) @@ -9274,6 +9405,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -9285,6 +9417,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateBackendServiceRequest): @@ -9650,10 +9783,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackendServicesRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.BackendServicesRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackendServicesRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateBackendServiceRequest.pb( compute.UpdateBackendServiceRequest() ) @@ -9677,6 +9813,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -9688,6 +9825,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py index cc53d755d9cf..ccd4a1b640b7 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -287,6 +294,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DiskTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DiskTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1781,10 +1831,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiskTypesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.DiskTypesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DiskTypesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListDiskTypesRequest.pb( compute.AggregatedListDiskTypesRequest() ) @@ -1810,6 +1863,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskTypeAggregatedList() + post_with_metadata.return_value = compute.DiskTypeAggregatedList(), metadata client.aggregated_list( request, @@ -1821,6 +1875,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetDiskTypeRequest): @@ -1919,10 +1974,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiskTypesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.DiskTypesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DiskTypesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetDiskTypeRequest.pb(compute.GetDiskTypeRequest()) transcode.return_value = { "method": "post", @@ -1944,6 +2002,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskType() + post_with_metadata.return_value = compute.DiskType(), metadata client.get( request, @@ -1955,6 +2014,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListDiskTypesRequest): @@ -2041,10 +2101,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DiskTypesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.DiskTypesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DiskTypesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListDiskTypesRequest.pb(compute.ListDiskTypesRequest()) transcode.return_value = { "method": "post", @@ -2066,6 +2129,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskTypeList() + post_with_metadata.return_value = compute.DiskTypeList(), metadata client.list( request, @@ -2077,6 +2141,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py index 0e0ca05287c5..0a68c2492925 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.disks import DisksClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -275,6 +282,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DisksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DisksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7621,10 +7671,13 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_add_resource_policies" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_add_resource_policies_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_add_resource_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddResourcePoliciesDiskRequest.pb( compute.AddResourcePoliciesDiskRequest() ) @@ -7648,6 +7701,7 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_resource_policies( request, @@ -7659,6 +7713,7 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -7749,10 +7804,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListDisksRequest.pb( compute.AggregatedListDisksRequest() ) @@ -7776,6 +7834,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskAggregatedList() + post_with_metadata.return_value = compute.DiskAggregatedList(), metadata client.aggregated_list( request, @@ -7787,6 +7846,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_insert_rest_bad_request(request_type=compute.BulkInsertDiskRequest): @@ -7987,10 +8047,13 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_bulk_insert" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_bulk_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_bulk_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.BulkInsertDiskRequest.pb(compute.BulkInsertDiskRequest()) transcode.return_value = { "method": "post", @@ -8012,6 +8075,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.bulk_insert( request, @@ -8023,6 +8087,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_snapshot_rest_bad_request( @@ -8258,10 +8323,13 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_create_snapshot" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_create_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_create_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CreateSnapshotDiskRequest.pb( compute.CreateSnapshotDiskRequest() ) @@ -8285,6 +8353,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.create_snapshot( request, @@ -8296,6 +8365,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteDiskRequest): @@ -8418,10 +8488,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteDiskRequest.pb(compute.DeleteDiskRequest()) transcode.return_value = { "method": "post", @@ -8443,6 +8516,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -8454,6 +8528,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetDiskRequest): @@ -8620,10 +8695,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetDiskRequest.pb(compute.GetDiskRequest()) transcode.return_value = { "method": "post", @@ -8645,6 +8723,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Disk() + post_with_metadata.return_value = compute.Disk(), metadata client.get( request, @@ -8656,6 +8735,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request(request_type=compute.GetIamPolicyDiskRequest): @@ -8740,10 +8820,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyDiskRequest.pb( compute.GetIamPolicyDiskRequest() ) @@ -8767,6 +8850,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -8778,6 +8862,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertDiskRequest): @@ -9033,10 +9118,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertDiskRequest.pb(compute.InsertDiskRequest()) transcode.return_value = { "method": "post", @@ -9058,6 +9146,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -9069,6 +9158,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListDisksRequest): @@ -9155,10 +9245,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListDisksRequest.pb(compute.ListDisksRequest()) transcode.return_value = { "method": "post", @@ -9180,6 +9273,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskList() + post_with_metadata.return_value = compute.DiskList(), metadata client.list( request, @@ -9191,6 +9285,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_resource_policies_rest_bad_request( @@ -9400,10 +9495,13 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_remove_resource_policies" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_remove_resource_policies_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_remove_resource_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveResourcePoliciesDiskRequest.pb( compute.RemoveResourcePoliciesDiskRequest() ) @@ -9427,6 +9525,7 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_resource_policies( request, @@ -9438,6 +9537,7 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_rest_bad_request(request_type=compute.ResizeDiskRequest): @@ -9634,10 +9734,13 @@ def test_resize_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_resize" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_resize_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_resize" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResizeDiskRequest.pb(compute.ResizeDiskRequest()) transcode.return_value = { "method": "post", @@ -9659,6 +9762,7 @@ def test_resize_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resize( request, @@ -9670,6 +9774,7 @@ def test_resize_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request(request_type=compute.SetIamPolicyDiskRequest): @@ -9870,10 +9975,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyDiskRequest.pb( compute.SetIamPolicyDiskRequest() ) @@ -9897,6 +10005,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -9908,6 +10017,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsDiskRequest): @@ -10109,10 +10219,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsDiskRequest.pb(compute.SetLabelsDiskRequest()) transcode.return_value = { "method": "post", @@ -10134,6 +10247,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -10145,6 +10259,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_async_replication_rest_bad_request( @@ -10354,10 +10469,13 @@ def test_start_async_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_start_async_replication" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_start_async_replication_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_start_async_replication" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StartAsyncReplicationDiskRequest.pb( compute.StartAsyncReplicationDiskRequest() ) @@ -10381,6 +10499,7 @@ def test_start_async_replication_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.start_async_replication( request, @@ -10392,6 +10511,7 @@ def test_start_async_replication_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_async_replication_rest_bad_request( @@ -10516,10 +10636,13 @@ def test_stop_async_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_stop_async_replication" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_stop_async_replication_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_stop_async_replication" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopAsyncReplicationDiskRequest.pb( compute.StopAsyncReplicationDiskRequest() ) @@ -10543,6 +10666,7 @@ def test_stop_async_replication_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop_async_replication( request, @@ -10554,6 +10678,7 @@ def test_stop_async_replication_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_group_async_replication_rest_bad_request( @@ -10763,10 +10888,14 @@ def test_stop_group_async_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_stop_group_async_replication" ) as post, mock.patch.object( + transports.DisksRestInterceptor, + "post_stop_group_async_replication_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_stop_group_async_replication" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopGroupAsyncReplicationDiskRequest.pb( compute.StopGroupAsyncReplicationDiskRequest() ) @@ -10790,6 +10919,7 @@ def test_stop_group_async_replication_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop_group_async_replication( request, @@ -10801,6 +10931,7 @@ def test_stop_group_async_replication_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -10961,10 +11092,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsDiskRequest.pb( compute.TestIamPermissionsDiskRequest() ) @@ -10990,6 +11124,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -11001,6 +11136,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateDiskRequest): @@ -11256,10 +11392,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DisksRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.DisksRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DisksRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateDiskRequest.pb(compute.UpdateDiskRequest()) transcode.return_value = { "method": "post", @@ -11281,6 +11420,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -11292,6 +11432,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py index 052b17cf0be6..2cc67015768e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ExternalVpnGatewaysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ExternalVpnGatewaysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3002,10 +3052,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ExternalVpnGatewaysRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteExternalVpnGatewayRequest.pb( compute.DeleteExternalVpnGatewayRequest() ) @@ -3029,6 +3082,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3040,6 +3094,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetExternalVpnGatewayRequest): @@ -3136,10 +3191,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ExternalVpnGatewaysRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetExternalVpnGatewayRequest.pb( compute.GetExternalVpnGatewayRequest() ) @@ -3163,6 +3221,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ExternalVpnGateway() + post_with_metadata.return_value = compute.ExternalVpnGateway(), metadata client.get( request, @@ -3174,6 +3233,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertExternalVpnGatewayRequest): @@ -3391,10 +3451,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.ExternalVpnGatewaysRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertExternalVpnGatewayRequest.pb( compute.InsertExternalVpnGatewayRequest() ) @@ -3418,6 +3481,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3429,6 +3493,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListExternalVpnGatewaysRequest): @@ -3519,10 +3584,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ExternalVpnGatewaysRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListExternalVpnGatewaysRequest.pb( compute.ListExternalVpnGatewaysRequest() ) @@ -3548,6 +3616,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ExternalVpnGatewayList() + post_with_metadata.return_value = compute.ExternalVpnGatewayList(), metadata client.list( request, @@ -3559,6 +3628,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -3764,10 +3834,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.ExternalVpnGatewaysRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsExternalVpnGatewayRequest.pb( compute.SetLabelsExternalVpnGatewayRequest() ) @@ -3791,6 +3864,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -3802,6 +3876,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -3964,10 +4039,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ExternalVpnGatewaysRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ExternalVpnGatewaysRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsExternalVpnGatewayRequest.pb( compute.TestIamPermissionsExternalVpnGatewayRequest() ) @@ -3993,6 +4072,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4004,6 +4084,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py index d84d086889d4..a62bd4efc062 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FirewallPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FirewallPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6533,10 +6583,13 @@ def test_add_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_add_association" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_add_association_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_add_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddAssociationFirewallPolicyRequest.pb( compute.AddAssociationFirewallPolicyRequest() ) @@ -6560,6 +6613,7 @@ def test_add_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_association( request, @@ -6571,6 +6625,7 @@ def test_add_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_add_rule_rest_bad_request(request_type=compute.AddRuleFirewallPolicyRequest): @@ -6823,10 +6878,13 @@ def test_add_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_add_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddRuleFirewallPolicyRequest.pb( compute.AddRuleFirewallPolicyRequest() ) @@ -6850,6 +6908,7 @@ def test_add_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_rule( request, @@ -6861,6 +6920,7 @@ def test_add_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_clone_rules_rest_bad_request( @@ -6987,10 +7047,13 @@ def test_clone_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_clone_rules" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_clone_rules_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_clone_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CloneRulesFirewallPolicyRequest.pb( compute.CloneRulesFirewallPolicyRequest() ) @@ -7014,6 +7077,7 @@ def test_clone_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.clone_rules( request, @@ -7025,6 +7089,7 @@ def test_clone_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteFirewallPolicyRequest): @@ -7149,10 +7214,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteFirewallPolicyRequest.pb( compute.DeleteFirewallPolicyRequest() ) @@ -7176,6 +7244,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -7187,6 +7256,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetFirewallPolicyRequest): @@ -7293,10 +7363,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetFirewallPolicyRequest.pb( compute.GetFirewallPolicyRequest() ) @@ -7320,6 +7393,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicy() + post_with_metadata.return_value = compute.FirewallPolicy(), metadata client.get( request, @@ -7331,6 +7405,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_association_rest_bad_request( @@ -7423,10 +7498,13 @@ def test_get_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_get_association" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_get_association_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_get_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetAssociationFirewallPolicyRequest.pb( compute.GetAssociationFirewallPolicyRequest() ) @@ -7452,6 +7530,7 @@ def test_get_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyAssociation() + post_with_metadata.return_value = compute.FirewallPolicyAssociation(), metadata client.get_association( request, @@ -7463,6 +7542,7 @@ def test_get_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -7551,10 +7631,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyFirewallPolicyRequest.pb( compute.GetIamPolicyFirewallPolicyRequest() ) @@ -7578,6 +7661,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -7589,6 +7673,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_rest_bad_request(request_type=compute.GetRuleFirewallPolicyRequest): @@ -7695,10 +7780,13 @@ def test_get_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_get_rule" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_get_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRuleFirewallPolicyRequest.pb( compute.GetRuleFirewallPolicyRequest() ) @@ -7722,6 +7810,7 @@ def test_get_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyRule() + post_with_metadata.return_value = compute.FirewallPolicyRule(), metadata client.get_rule( request, @@ -7733,6 +7822,7 @@ def test_get_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertFirewallPolicyRequest): @@ -8016,10 +8106,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertFirewallPolicyRequest.pb( compute.InsertFirewallPolicyRequest() ) @@ -8043,6 +8136,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -8054,6 +8148,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListFirewallPoliciesRequest): @@ -8140,10 +8235,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListFirewallPoliciesRequest.pb( compute.ListFirewallPoliciesRequest() ) @@ -8167,6 +8265,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyList() + post_with_metadata.return_value = compute.FirewallPolicyList(), metadata client.list( request, @@ -8178,6 +8277,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_associations_rest_bad_request( @@ -8262,10 +8362,14 @@ def test_list_associations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_list_associations" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, + "post_list_associations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_list_associations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListAssociationsFirewallPolicyRequest.pb( compute.ListAssociationsFirewallPolicyRequest() ) @@ -8291,6 +8395,10 @@ def test_list_associations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPoliciesListAssociationsResponse() + post_with_metadata.return_value = ( + compute.FirewallPoliciesListAssociationsResponse(), + metadata, + ) client.list_associations( request, @@ -8302,6 +8410,7 @@ def test_list_associations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_rest_bad_request(request_type=compute.MoveFirewallPolicyRequest): @@ -8426,10 +8535,13 @@ def test_move_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_move" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_move_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_move" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.MoveFirewallPolicyRequest.pb( compute.MoveFirewallPolicyRequest() ) @@ -8453,6 +8565,7 @@ def test_move_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.move( request, @@ -8464,6 +8577,7 @@ def test_move_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchFirewallPolicyRequest): @@ -8747,10 +8861,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchFirewallPolicyRequest.pb( compute.PatchFirewallPolicyRequest() ) @@ -8774,6 +8891,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -8785,6 +8903,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rule_rest_bad_request( @@ -9039,10 +9158,13 @@ def test_patch_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_patch_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRuleFirewallPolicyRequest.pb( compute.PatchRuleFirewallPolicyRequest() ) @@ -9066,6 +9188,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_rule( request, @@ -9077,6 +9200,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_association_rest_bad_request( @@ -9203,10 +9327,14 @@ def test_remove_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_remove_association" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, + "post_remove_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_remove_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveAssociationFirewallPolicyRequest.pb( compute.RemoveAssociationFirewallPolicyRequest() ) @@ -9230,6 +9358,7 @@ def test_remove_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_association( request, @@ -9241,6 +9370,7 @@ def test_remove_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_rule_rest_bad_request( @@ -9367,10 +9497,13 @@ def test_remove_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_remove_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveRuleFirewallPolicyRequest.pb( compute.RemoveRuleFirewallPolicyRequest() ) @@ -9394,6 +9527,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_rule( request, @@ -9405,6 +9539,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -9616,10 +9751,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyFirewallPolicyRequest.pb( compute.SetIamPolicyFirewallPolicyRequest() ) @@ -9643,6 +9781,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -9654,6 +9793,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -9816,10 +9956,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.FirewallPoliciesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FirewallPoliciesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsFirewallPolicyRequest.pb( compute.TestIamPermissionsFirewallPolicyRequest() ) @@ -9845,6 +9989,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -9856,6 +10001,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py index 5eb85a6aabb7..5d7b8af739fa 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -294,6 +301,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FirewallsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FirewallsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3125,10 +3175,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.FirewallsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteFirewallRequest.pb(compute.DeleteFirewallRequest()) transcode.return_value = { "method": "post", @@ -3150,6 +3203,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3161,6 +3215,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetFirewallRequest): @@ -3271,10 +3326,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.FirewallsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetFirewallRequest.pb(compute.GetFirewallRequest()) transcode.return_value = { "method": "post", @@ -3296,6 +3354,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Firewall() + post_with_metadata.return_value = compute.Firewall(), metadata client.get( request, @@ -3307,6 +3366,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertFirewallRequest): @@ -3536,10 +3596,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.FirewallsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertFirewallRequest.pb(compute.InsertFirewallRequest()) transcode.return_value = { "method": "post", @@ -3561,6 +3624,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3572,6 +3636,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListFirewallsRequest): @@ -3658,10 +3723,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.FirewallsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListFirewallsRequest.pb(compute.ListFirewallsRequest()) transcode.return_value = { "method": "post", @@ -3683,6 +3751,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallList() + post_with_metadata.return_value = compute.FirewallList(), metadata client.list( request, @@ -3694,6 +3763,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchFirewallRequest): @@ -3923,10 +3993,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.FirewallsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchFirewallRequest.pb(compute.PatchFirewallRequest()) transcode.return_value = { "method": "post", @@ -3948,6 +4021,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -3959,6 +4033,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateFirewallRequest): @@ -4188,10 +4263,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FirewallsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.FirewallsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FirewallsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateFirewallRequest.pb(compute.UpdateFirewallRequest()) transcode.return_value = { "method": "post", @@ -4213,6 +4291,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -4224,6 +4303,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py index 618f63aa6044..018fb06863c6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ForwardingRulesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ForwardingRulesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3954,10 +4004,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListForwardingRulesRequest.pb( compute.AggregatedListForwardingRulesRequest() ) @@ -3983,6 +4036,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ForwardingRuleAggregatedList() + post_with_metadata.return_value = ( + compute.ForwardingRuleAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3994,6 +4051,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteForwardingRuleRequest): @@ -4126,10 +4184,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteForwardingRuleRequest.pb( compute.DeleteForwardingRuleRequest() ) @@ -4153,6 +4214,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4164,6 +4226,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetForwardingRuleRequest): @@ -4316,10 +4379,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetForwardingRuleRequest.pb( compute.GetForwardingRuleRequest() ) @@ -4343,6 +4409,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ForwardingRule() + post_with_metadata.return_value = compute.ForwardingRule(), metadata client.get( request, @@ -4354,6 +4421,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertForwardingRuleRequest): @@ -4597,10 +4665,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertForwardingRuleRequest.pb( compute.InsertForwardingRuleRequest() ) @@ -4624,6 +4695,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4635,6 +4707,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListForwardingRulesRequest): @@ -4723,10 +4796,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListForwardingRulesRequest.pb( compute.ListForwardingRulesRequest() ) @@ -4750,6 +4826,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ForwardingRuleList() + post_with_metadata.return_value = compute.ForwardingRuleList(), metadata client.list( request, @@ -4761,6 +4838,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchForwardingRuleRequest): @@ -5012,10 +5090,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchForwardingRuleRequest.pb( compute.PatchForwardingRuleRequest() ) @@ -5039,6 +5120,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -5050,6 +5132,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -5255,10 +5338,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsForwardingRuleRequest.pb( compute.SetLabelsForwardingRuleRequest() ) @@ -5282,6 +5368,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -5293,6 +5380,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_target_rest_bad_request( @@ -5501,10 +5589,13 @@ def test_set_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ForwardingRulesRestInterceptor, "post_set_target" ) as post, mock.patch.object( + transports.ForwardingRulesRestInterceptor, "post_set_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ForwardingRulesRestInterceptor, "pre_set_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetTargetForwardingRuleRequest.pb( compute.SetTargetForwardingRuleRequest() ) @@ -5528,6 +5619,7 @@ def test_set_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_target( request, @@ -5539,6 +5631,7 @@ def test_set_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py index d22d485d5bce..164215519287 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GlobalAddressesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GlobalAddressesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3166,10 +3216,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalAddressesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.GlobalAddressesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalAddressesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteGlobalAddressRequest.pb( compute.DeleteGlobalAddressRequest() ) @@ -3193,6 +3246,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3204,6 +3258,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetGlobalAddressRequest): @@ -3322,10 +3377,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalAddressesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.GlobalAddressesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalAddressesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGlobalAddressRequest.pb( compute.GetGlobalAddressRequest() ) @@ -3349,6 +3407,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Address() + post_with_metadata.return_value = compute.Address(), metadata client.get( request, @@ -3360,6 +3419,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertGlobalAddressRequest): @@ -3573,10 +3633,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalAddressesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.GlobalAddressesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalAddressesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertGlobalAddressRequest.pb( compute.InsertGlobalAddressRequest() ) @@ -3600,6 +3663,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3611,6 +3675,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListGlobalAddressesRequest): @@ -3699,10 +3764,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalAddressesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.GlobalAddressesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalAddressesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListGlobalAddressesRequest.pb( compute.ListGlobalAddressesRequest() ) @@ -3726,6 +3794,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.AddressList() + post_with_metadata.return_value = compute.AddressList(), metadata client.list( request, @@ -3737,6 +3806,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_rest_bad_request(request_type=compute.MoveGlobalAddressRequest): @@ -3943,10 +4013,13 @@ def test_move_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalAddressesRestInterceptor, "post_move" ) as post, mock.patch.object( + transports.GlobalAddressesRestInterceptor, "post_move_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalAddressesRestInterceptor, "pre_move" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.MoveGlobalAddressRequest.pb( compute.MoveGlobalAddressRequest() ) @@ -3970,6 +4043,7 @@ def test_move_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.move( request, @@ -3981,6 +4055,7 @@ def test_move_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -4186,10 +4261,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalAddressesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.GlobalAddressesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalAddressesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsGlobalAddressRequest.pb( compute.SetLabelsGlobalAddressRequest() ) @@ -4213,6 +4291,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4224,6 +4303,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py index 558cfa26346e..3628e9ec38c3 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GlobalForwardingRulesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GlobalForwardingRulesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3620,10 +3670,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteGlobalForwardingRuleRequest.pb( compute.DeleteGlobalForwardingRuleRequest() ) @@ -3647,6 +3700,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3658,6 +3712,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetGlobalForwardingRuleRequest): @@ -3802,10 +3857,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGlobalForwardingRuleRequest.pb( compute.GetGlobalForwardingRuleRequest() ) @@ -3829,6 +3887,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ForwardingRule() + post_with_metadata.return_value = compute.ForwardingRule(), metadata client.get( request, @@ -3840,6 +3899,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4085,10 +4145,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertGlobalForwardingRuleRequest.pb( compute.InsertGlobalForwardingRuleRequest() ) @@ -4112,6 +4175,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4123,6 +4187,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListGlobalForwardingRulesRequest): @@ -4211,10 +4276,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListGlobalForwardingRulesRequest.pb( compute.ListGlobalForwardingRulesRequest() ) @@ -4238,6 +4306,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ForwardingRuleList() + post_with_metadata.return_value = compute.ForwardingRuleList(), metadata client.list( request, @@ -4249,6 +4318,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchGlobalForwardingRuleRequest): @@ -4492,10 +4562,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchGlobalForwardingRuleRequest.pb( compute.PatchGlobalForwardingRuleRequest() ) @@ -4519,6 +4592,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4530,6 +4604,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -4735,10 +4810,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsGlobalForwardingRuleRequest.pb( compute.SetLabelsGlobalForwardingRuleRequest() ) @@ -4762,6 +4840,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4773,6 +4852,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_target_rest_bad_request( @@ -4973,10 +5053,13 @@ def test_set_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "post_set_target" ) as post, mock.patch.object( + transports.GlobalForwardingRulesRestInterceptor, "post_set_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalForwardingRulesRestInterceptor, "pre_set_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetTargetGlobalForwardingRuleRequest.pb( compute.SetTargetGlobalForwardingRuleRequest() ) @@ -5000,6 +5083,7 @@ def test_set_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_target( request, @@ -5011,6 +5095,7 @@ def test_set_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py index 260428d94e47..3c86edee23d6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GlobalNetworkEndpointGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GlobalNetworkEndpointGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3657,11 +3707,15 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints", ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, + "post_attach_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb( compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest() ) @@ -3685,6 +3739,7 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.attach_network_endpoints( request, @@ -3696,6 +3751,7 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -3822,10 +3878,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteGlobalNetworkEndpointGroupRequest.pb( compute.DeleteGlobalNetworkEndpointGroupRequest() ) @@ -3849,6 +3909,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3860,6 +3921,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_detach_network_endpoints_rest_bad_request( @@ -4084,11 +4146,15 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints", ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, + "post_detach_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.pb( compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest() ) @@ -4112,6 +4178,7 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.detach_network_endpoints( request, @@ -4123,6 +4190,7 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -4233,10 +4301,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGlobalNetworkEndpointGroupRequest.pb( compute.GetGlobalNetworkEndpointGroupRequest() ) @@ -4262,6 +4333,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroup() + post_with_metadata.return_value = compute.NetworkEndpointGroup(), metadata client.get( request, @@ -4273,6 +4345,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4508,10 +4581,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertGlobalNetworkEndpointGroupRequest.pb( compute.InsertGlobalNetworkEndpointGroupRequest() ) @@ -4535,6 +4612,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4546,6 +4624,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -4636,10 +4715,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListGlobalNetworkEndpointGroupsRequest.pb( compute.ListGlobalNetworkEndpointGroupsRequest() ) @@ -4665,6 +4747,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupList() + post_with_metadata.return_value = compute.NetworkEndpointGroupList(), metadata client.list( request, @@ -4676,6 +4759,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_network_endpoints_rest_bad_request( @@ -4767,11 +4851,15 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): transports.GlobalNetworkEndpointGroupsRestInterceptor, "post_list_network_endpoints", ) as post, mock.patch.object( + transports.GlobalNetworkEndpointGroupsRestInterceptor, + "post_list_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalNetworkEndpointGroupsRestInterceptor, "pre_list_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.pb( compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest() ) @@ -4797,6 +4885,10 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + post_with_metadata.return_value = ( + compute.NetworkEndpointGroupsListNetworkEndpoints(), + metadata, + ) client.list_network_endpoints( request, @@ -4808,6 +4900,7 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py index eb967c15a5ef..253dbeea20fc 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GlobalOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GlobalOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2188,10 +2238,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOperationsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.GlobalOperationsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalOperationsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListGlobalOperationsRequest.pb( compute.AggregatedListGlobalOperationsRequest() ) @@ -2217,6 +2270,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.OperationAggregatedList() + post_with_metadata.return_value = compute.OperationAggregatedList(), metadata client.aggregated_list( request, @@ -2228,6 +2282,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteGlobalOperationRequest): @@ -2307,10 +2362,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOperationsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.GlobalOperationsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalOperationsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteGlobalOperationRequest.pb( compute.DeleteGlobalOperationRequest() ) @@ -2336,6 +2394,10 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DeleteGlobalOperationResponse() + post_with_metadata.return_value = ( + compute.DeleteGlobalOperationResponse(), + metadata, + ) client.delete( request, @@ -2347,6 +2409,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetGlobalOperationRequest): @@ -2471,10 +2534,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOperationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.GlobalOperationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalOperationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGlobalOperationRequest.pb( compute.GetGlobalOperationRequest() ) @@ -2498,6 +2564,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.get( request, @@ -2509,6 +2576,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListGlobalOperationsRequest): @@ -2597,10 +2665,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOperationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.GlobalOperationsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalOperationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListGlobalOperationsRequest.pb( compute.ListGlobalOperationsRequest() ) @@ -2624,6 +2695,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.OperationList() + post_with_metadata.return_value = compute.OperationList(), metadata client.list( request, @@ -2635,6 +2707,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_wait_rest_bad_request(request_type=compute.WaitGlobalOperationRequest): @@ -2759,10 +2832,13 @@ def test_wait_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOperationsRestInterceptor, "post_wait" ) as post, mock.patch.object( + transports.GlobalOperationsRestInterceptor, "post_wait_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalOperationsRestInterceptor, "pre_wait" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.WaitGlobalOperationRequest.pb( compute.WaitGlobalOperationRequest() ) @@ -2786,6 +2862,7 @@ def test_wait_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.wait( request, @@ -2797,6 +2874,7 @@ def test_wait_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py index 8b95225d51aa..9668de44c98a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -339,6 +346,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GlobalOrganizationOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GlobalOrganizationOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1583,10 +1633,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOrganizationOperationsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.GlobalOrganizationOperationsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalOrganizationOperationsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteGlobalOrganizationOperationRequest.pb( compute.DeleteGlobalOrganizationOperationRequest() ) @@ -1612,6 +1666,10 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DeleteGlobalOrganizationOperationResponse() + post_with_metadata.return_value = ( + compute.DeleteGlobalOrganizationOperationResponse(), + metadata, + ) client.delete( request, @@ -1623,6 +1681,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -1749,10 +1808,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOrganizationOperationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.GlobalOrganizationOperationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GlobalOrganizationOperationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGlobalOrganizationOperationRequest.pb( compute.GetGlobalOrganizationOperationRequest() ) @@ -1776,6 +1838,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.get( request, @@ -1787,6 +1850,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -1877,10 +1941,14 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalOrganizationOperationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.GlobalOrganizationOperationsRestInterceptor, + "post_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalOrganizationOperationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListGlobalOrganizationOperationsRequest.pb( compute.ListGlobalOrganizationOperationsRequest() ) @@ -1904,6 +1972,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.OperationList() + post_with_metadata.return_value = compute.OperationList(), metadata client.list( request, @@ -1915,6 +1984,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py index e6f045d7de82..325eb0bc9c3b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GlobalPublicDelegatedPrefixesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GlobalPublicDelegatedPrefixesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2832,10 +2882,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.GlobalPublicDelegatedPrefixesRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteGlobalPublicDelegatedPrefixeRequest.pb( compute.DeleteGlobalPublicDelegatedPrefixeRequest() ) @@ -2859,6 +2913,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2870,6 +2925,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -2982,10 +3038,14 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.GlobalPublicDelegatedPrefixesRestInterceptor, + "post_get_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGlobalPublicDelegatedPrefixeRequest.pb( compute.GetGlobalPublicDelegatedPrefixeRequest() ) @@ -3011,6 +3071,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicDelegatedPrefix() + post_with_metadata.return_value = compute.PublicDelegatedPrefix(), metadata client.get( request, @@ -3022,6 +3083,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -3253,10 +3315,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.GlobalPublicDelegatedPrefixesRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertGlobalPublicDelegatedPrefixeRequest.pb( compute.InsertGlobalPublicDelegatedPrefixeRequest() ) @@ -3280,6 +3346,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3291,6 +3358,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -3381,10 +3449,14 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.GlobalPublicDelegatedPrefixesRestInterceptor, + "post_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListGlobalPublicDelegatedPrefixesRequest.pb( compute.ListGlobalPublicDelegatedPrefixesRequest() ) @@ -3410,6 +3482,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicDelegatedPrefixList() + post_with_metadata.return_value = compute.PublicDelegatedPrefixList(), metadata client.list( request, @@ -3421,6 +3494,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -3652,10 +3726,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.GlobalPublicDelegatedPrefixesRestInterceptor, + "post_patch_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GlobalPublicDelegatedPrefixesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchGlobalPublicDelegatedPrefixeRequest.pb( compute.PatchGlobalPublicDelegatedPrefixeRequest() ) @@ -3679,6 +3757,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -3690,6 +3769,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py index bb08eab4a608..d45c01e7b2ae 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = HealthChecksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = HealthChecksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3367,10 +3417,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListHealthChecksRequest.pb( compute.AggregatedListHealthChecksRequest() ) @@ -3396,6 +3449,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthChecksAggregatedList() + post_with_metadata.return_value = compute.HealthChecksAggregatedList(), metadata client.aggregated_list( request, @@ -3407,6 +3461,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteHealthCheckRequest): @@ -3531,10 +3586,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteHealthCheckRequest.pb( compute.DeleteHealthCheckRequest() ) @@ -3558,6 +3616,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3569,6 +3628,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetHealthCheckRequest): @@ -3675,10 +3735,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetHealthCheckRequest.pb(compute.GetHealthCheckRequest()) transcode.return_value = { "method": "post", @@ -3700,6 +3763,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthCheck() + post_with_metadata.return_value = compute.HealthCheck(), metadata client.get( request, @@ -3711,6 +3775,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertHealthCheckRequest): @@ -3969,10 +4034,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertHealthCheckRequest.pb( compute.InsertHealthCheckRequest() ) @@ -3996,6 +4064,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4007,6 +4076,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListHealthChecksRequest): @@ -4095,10 +4165,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListHealthChecksRequest.pb( compute.ListHealthChecksRequest() ) @@ -4122,6 +4195,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthCheckList() + post_with_metadata.return_value = compute.HealthCheckList(), metadata client.list( request, @@ -4133,6 +4207,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchHealthCheckRequest): @@ -4391,10 +4466,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchHealthCheckRequest.pb( compute.PatchHealthCheckRequest() ) @@ -4418,6 +4496,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4429,6 +4508,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateHealthCheckRequest): @@ -4687,10 +4767,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.HealthChecksRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.HealthChecksRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.HealthChecksRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateHealthCheckRequest.pb( compute.UpdateHealthCheckRequest() ) @@ -4714,6 +4797,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -4725,6 +4809,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py index 035cf135c323..8438ad351d07 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py @@ -58,6 +58,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -312,6 +319,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ImageFamilyViewsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ImageFamilyViewsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1270,10 +1320,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImageFamilyViewsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ImageFamilyViewsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImageFamilyViewsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetImageFamilyViewRequest.pb( compute.GetImageFamilyViewRequest() ) @@ -1297,6 +1350,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ImageFamilyView() + post_with_metadata.return_value = compute.ImageFamilyView(), metadata client.get( request, @@ -1308,6 +1362,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py index 22530a0379cb..ce83cd72024f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.images import ImagesClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -279,6 +286,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ImagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ImagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4307,10 +4357,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteImageRequest.pb(compute.DeleteImageRequest()) transcode.return_value = { "method": "post", @@ -4332,6 +4385,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4343,6 +4397,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deprecate_rest_bad_request(request_type=compute.DeprecateImageRequest): @@ -4545,10 +4600,13 @@ def test_deprecate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_deprecate" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_deprecate_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_deprecate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeprecateImageRequest.pb(compute.DeprecateImageRequest()) transcode.return_value = { "method": "post", @@ -4570,6 +4628,7 @@ def test_deprecate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.deprecate( request, @@ -4581,6 +4640,7 @@ def test_deprecate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetImageRequest): @@ -4709,10 +4769,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetImageRequest.pb(compute.GetImageRequest()) transcode.return_value = { "method": "post", @@ -4734,6 +4797,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Image() + post_with_metadata.return_value = compute.Image(), metadata client.get( request, @@ -4745,6 +4809,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_from_family_rest_bad_request( @@ -4875,10 +4940,13 @@ def test_get_from_family_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_get_from_family" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_get_from_family_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_get_from_family" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetFromFamilyImageRequest.pb( compute.GetFromFamilyImageRequest() ) @@ -4902,6 +4970,7 @@ def test_get_from_family_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Image() + post_with_metadata.return_value = compute.Image(), metadata client.get_from_family( request, @@ -4913,6 +4982,7 @@ def test_get_from_family_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request(request_type=compute.GetIamPolicyImageRequest): @@ -4997,10 +5067,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyImageRequest.pb( compute.GetIamPolicyImageRequest() ) @@ -5024,6 +5097,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -5035,6 +5109,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertImageRequest): @@ -5281,10 +5356,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertImageRequest.pb(compute.InsertImageRequest()) transcode.return_value = { "method": "post", @@ -5306,6 +5384,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5317,6 +5396,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListImagesRequest): @@ -5403,10 +5483,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListImagesRequest.pb(compute.ListImagesRequest()) transcode.return_value = { "method": "post", @@ -5428,6 +5511,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ImageList() + post_with_metadata.return_value = compute.ImageList(), metadata client.list( request, @@ -5439,6 +5523,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchImageRequest): @@ -5685,10 +5770,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchImageRequest.pb(compute.PatchImageRequest()) transcode.return_value = { "method": "post", @@ -5710,6 +5798,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -5721,6 +5810,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request(request_type=compute.SetIamPolicyImageRequest): @@ -5921,10 +6011,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyImageRequest.pb( compute.SetIamPolicyImageRequest() ) @@ -5948,6 +6041,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -5959,6 +6053,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsImageRequest): @@ -6160,10 +6255,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsImageRequest.pb(compute.SetLabelsImageRequest()) transcode.return_value = { "method": "post", @@ -6185,6 +6283,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -6196,6 +6295,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -6356,10 +6456,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ImagesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ImagesRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ImagesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsImageRequest.pb( compute.TestIamPermissionsImageRequest() ) @@ -6385,6 +6488,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -6396,6 +6500,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py index 708ce99d3b33..a790def08ef4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -354,6 +361,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceGroupManagerResizeRequestsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceGroupManagerResizeRequestsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3000,10 +3050,14 @@ def test_cancel_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_cancel" ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, + "post_cancel_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_cancel" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CancelInstanceGroupManagerResizeRequestRequest.pb( compute.CancelInstanceGroupManagerResizeRequestRequest() ) @@ -3027,6 +3081,7 @@ def test_cancel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.cancel( request, @@ -3038,6 +3093,7 @@ def test_cancel_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -3174,10 +3230,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstanceGroupManagerResizeRequestRequest.pb( compute.DeleteInstanceGroupManagerResizeRequestRequest() ) @@ -3201,6 +3261,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3212,6 +3273,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -3324,10 +3386,14 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, + "post_get_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstanceGroupManagerResizeRequestRequest.pb( compute.GetInstanceGroupManagerResizeRequestRequest() ) @@ -3353,6 +3419,10 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagerResizeRequest() + post_with_metadata.return_value = ( + compute.InstanceGroupManagerResizeRequest(), + metadata, + ) client.get( request, @@ -3364,6 +3434,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -3634,10 +3705,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInstanceGroupManagerResizeRequestRequest.pb( compute.InsertInstanceGroupManagerResizeRequestRequest() ) @@ -3661,6 +3736,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3672,6 +3748,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -3772,10 +3849,14 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, + "post_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstanceGroupManagerResizeRequestsRequest.pb( compute.ListInstanceGroupManagerResizeRequestsRequest() ) @@ -3801,6 +3882,10 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagerResizeRequestsListResponse() + post_with_metadata.return_value = ( + compute.InstanceGroupManagerResizeRequestsListResponse(), + metadata, + ) client.list( request, @@ -3812,6 +3897,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py index 1062275be377..487694590718 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceGroupManagersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceGroupManagersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10815,10 +10865,14 @@ def test_abandon_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_abandon_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_abandon_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_abandon_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AbandonInstancesInstanceGroupManagerRequest.pb( compute.AbandonInstancesInstanceGroupManagerRequest() ) @@ -10842,6 +10896,7 @@ def test_abandon_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.abandon_instances( request, @@ -10853,6 +10908,7 @@ def test_abandon_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -10945,10 +11001,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListInstanceGroupManagersRequest.pb( compute.AggregatedListInstanceGroupManagersRequest() ) @@ -10974,6 +11034,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagerAggregatedList() + post_with_metadata.return_value = ( + compute.InstanceGroupManagerAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -10985,6 +11049,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_apply_updates_to_instances_rest_bad_request( @@ -11208,11 +11273,15 @@ def test_apply_updates_to_instances_rest_interceptors(null_interceptor): transports.InstanceGroupManagersRestInterceptor, "post_apply_updates_to_instances", ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_apply_updates_to_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_apply_updates_to_instances", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.pb( compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest() ) @@ -11236,6 +11305,7 @@ def test_apply_updates_to_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.apply_updates_to_instances( request, @@ -11247,6 +11317,7 @@ def test_apply_updates_to_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instances_rest_bad_request( @@ -11478,10 +11549,14 @@ def test_create_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_create_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_create_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_create_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CreateInstancesInstanceGroupManagerRequest.pb( compute.CreateInstancesInstanceGroupManagerRequest() ) @@ -11505,6 +11580,7 @@ def test_create_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.create_instances( request, @@ -11516,6 +11592,7 @@ def test_create_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -11650,10 +11727,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstanceGroupManagerRequest.pb( compute.DeleteInstanceGroupManagerRequest() ) @@ -11677,6 +11757,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -11688,6 +11769,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instances_rest_bad_request( @@ -11908,10 +11990,14 @@ def test_delete_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_delete_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_delete_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_delete_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstancesInstanceGroupManagerRequest.pb( compute.DeleteInstancesInstanceGroupManagerRequest() ) @@ -11935,6 +12021,7 @@ def test_delete_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_instances( request, @@ -11946,6 +12033,7 @@ def test_delete_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_per_instance_configs_rest_bad_request( @@ -12168,11 +12256,15 @@ def test_delete_per_instance_configs_rest_interceptors(null_interceptor): transports.InstanceGroupManagersRestInterceptor, "post_delete_per_instance_configs", ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_delete_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_delete_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.pb( compute.DeletePerInstanceConfigsInstanceGroupManagerRequest() ) @@ -12196,6 +12288,7 @@ def test_delete_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_per_instance_configs( request, @@ -12207,6 +12300,7 @@ def test_delete_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInstanceGroupManagerRequest): @@ -12336,10 +12430,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstanceGroupManagerRequest.pb( compute.GetInstanceGroupManagerRequest() ) @@ -12365,6 +12462,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManager() + post_with_metadata.return_value = compute.InstanceGroupManager(), metadata client.get( request, @@ -12376,6 +12474,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -12660,10 +12759,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInstanceGroupManagerRequest.pb( compute.InsertInstanceGroupManagerRequest() ) @@ -12687,6 +12789,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -12698,6 +12801,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInstanceGroupManagersRequest): @@ -12786,10 +12890,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstanceGroupManagersRequest.pb( compute.ListInstanceGroupManagersRequest() ) @@ -12815,6 +12922,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagerList() + post_with_metadata.return_value = compute.InstanceGroupManagerList(), metadata client.list( request, @@ -12826,6 +12934,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_errors_rest_bad_request( @@ -12918,10 +13027,14 @@ def test_list_errors_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_list_errors" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_list_errors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_list_errors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListErrorsInstanceGroupManagersRequest.pb( compute.ListErrorsInstanceGroupManagersRequest() ) @@ -12947,6 +13060,10 @@ def test_list_errors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagersListErrorsResponse() + post_with_metadata.return_value = ( + compute.InstanceGroupManagersListErrorsResponse(), + metadata, + ) client.list_errors( request, @@ -12958,6 +13075,7 @@ def test_list_errors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_managed_instances_rest_bad_request( @@ -13052,10 +13170,14 @@ def test_list_managed_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_list_managed_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_list_managed_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_list_managed_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListManagedInstancesInstanceGroupManagersRequest.pb( compute.ListManagedInstancesInstanceGroupManagersRequest() ) @@ -13083,6 +13205,10 @@ def test_list_managed_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagersListManagedInstancesResponse() + post_with_metadata.return_value = ( + compute.InstanceGroupManagersListManagedInstancesResponse(), + metadata, + ) client.list_managed_instances( request, @@ -13094,6 +13220,7 @@ def test_list_managed_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_per_instance_configs_rest_bad_request( @@ -13189,10 +13316,14 @@ def test_list_per_instance_configs_rest_interceptors(null_interceptor): transports.InstanceGroupManagersRestInterceptor, "post_list_per_instance_configs", ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_list_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_list_per_instance_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListPerInstanceConfigsInstanceGroupManagersRequest.pb( compute.ListPerInstanceConfigsInstanceGroupManagersRequest() ) @@ -13218,6 +13349,10 @@ def test_list_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp() + post_with_metadata.return_value = ( + compute.InstanceGroupManagersListPerInstanceConfigsResp(), + metadata, + ) client.list_per_instance_configs( request, @@ -13229,6 +13364,7 @@ def test_list_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchInstanceGroupManagerRequest): @@ -13519,10 +13655,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchInstanceGroupManagerRequest.pb( compute.PatchInstanceGroupManagerRequest() ) @@ -13546,6 +13685,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -13557,6 +13697,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_per_instance_configs_rest_bad_request( @@ -13789,11 +13930,15 @@ def test_patch_per_instance_configs_rest_interceptors(null_interceptor): transports.InstanceGroupManagersRestInterceptor, "post_patch_per_instance_configs", ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_patch_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_patch_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.pb( compute.PatchPerInstanceConfigsInstanceGroupManagerRequest() ) @@ -13817,6 +13962,7 @@ def test_patch_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_per_instance_configs( request, @@ -13828,6 +13974,7 @@ def test_patch_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_recreate_instances_rest_bad_request( @@ -14047,10 +14194,14 @@ def test_recreate_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_recreate_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_recreate_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_recreate_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RecreateInstancesInstanceGroupManagerRequest.pb( compute.RecreateInstancesInstanceGroupManagerRequest() ) @@ -14074,6 +14225,7 @@ def test_recreate_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.recreate_instances( request, @@ -14085,6 +14237,7 @@ def test_recreate_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_rest_bad_request( @@ -14219,10 +14372,13 @@ def test_resize_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_resize" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, "post_resize_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_resize" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResizeInstanceGroupManagerRequest.pb( compute.ResizeInstanceGroupManagerRequest() ) @@ -14246,6 +14402,7 @@ def test_resize_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resize( request, @@ -14257,6 +14414,7 @@ def test_resize_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resume_instances_rest_bad_request( @@ -14476,10 +14634,14 @@ def test_resume_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_resume_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_resume_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_resume_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResumeInstancesInstanceGroupManagerRequest.pb( compute.ResumeInstancesInstanceGroupManagerRequest() ) @@ -14503,6 +14665,7 @@ def test_resume_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resume_instances( request, @@ -14514,6 +14677,7 @@ def test_resume_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_instance_template_rest_bad_request( @@ -14733,10 +14897,14 @@ def test_set_instance_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_set_instance_template" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_set_instance_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_set_instance_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetInstanceTemplateInstanceGroupManagerRequest.pb( compute.SetInstanceTemplateInstanceGroupManagerRequest() ) @@ -14760,6 +14928,7 @@ def test_set_instance_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_instance_template( request, @@ -14771,6 +14940,7 @@ def test_set_instance_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_target_pools_rest_bad_request( @@ -14991,10 +15161,14 @@ def test_set_target_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_set_target_pools" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_set_target_pools_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_set_target_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetTargetPoolsInstanceGroupManagerRequest.pb( compute.SetTargetPoolsInstanceGroupManagerRequest() ) @@ -15018,6 +15192,7 @@ def test_set_target_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_target_pools( request, @@ -15029,6 +15204,7 @@ def test_set_target_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_instances_rest_bad_request( @@ -15248,10 +15424,14 @@ def test_start_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_start_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_start_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_start_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StartInstancesInstanceGroupManagerRequest.pb( compute.StartInstancesInstanceGroupManagerRequest() ) @@ -15275,6 +15455,7 @@ def test_start_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.start_instances( request, @@ -15286,6 +15467,7 @@ def test_start_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_instances_rest_bad_request( @@ -15506,10 +15688,14 @@ def test_stop_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_stop_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_stop_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_stop_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopInstancesInstanceGroupManagerRequest.pb( compute.StopInstancesInstanceGroupManagerRequest() ) @@ -15533,6 +15719,7 @@ def test_stop_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop_instances( request, @@ -15544,6 +15731,7 @@ def test_stop_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suspend_instances_rest_bad_request( @@ -15764,10 +15952,14 @@ def test_suspend_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "post_suspend_instances" ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_suspend_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_suspend_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SuspendInstancesInstanceGroupManagerRequest.pb( compute.SuspendInstancesInstanceGroupManagerRequest() ) @@ -15791,6 +15983,7 @@ def test_suspend_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.suspend_instances( request, @@ -15802,6 +15995,7 @@ def test_suspend_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_per_instance_configs_rest_bad_request( @@ -16036,11 +16230,15 @@ def test_update_per_instance_configs_rest_interceptors(null_interceptor): transports.InstanceGroupManagersRestInterceptor, "post_update_per_instance_configs", ) as post, mock.patch.object( + transports.InstanceGroupManagersRestInterceptor, + "post_update_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupManagersRestInterceptor, "pre_update_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.pb( compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest() ) @@ -16064,6 +16262,7 @@ def test_update_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_per_instance_configs( request, @@ -16075,6 +16274,7 @@ def test_update_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py index a6e30fd275a0..1ce1ee1381f0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4382,10 +4432,13 @@ def test_add_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_add_instances" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_add_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_add_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddInstancesInstanceGroupRequest.pb( compute.AddInstancesInstanceGroupRequest() ) @@ -4409,6 +4462,7 @@ def test_add_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_instances( request, @@ -4420,6 +4474,7 @@ def test_add_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -4512,10 +4567,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListInstanceGroupsRequest.pb( compute.AggregatedListInstanceGroupsRequest() ) @@ -4541,6 +4599,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupAggregatedList() + post_with_metadata.return_value = ( + compute.InstanceGroupAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -4552,6 +4614,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteInstanceGroupRequest): @@ -4684,10 +4747,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstanceGroupRequest.pb( compute.DeleteInstanceGroupRequest() ) @@ -4711,6 +4777,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4722,6 +4789,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInstanceGroupRequest): @@ -4834,10 +4902,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstanceGroupRequest.pb( compute.GetInstanceGroupRequest() ) @@ -4861,6 +4932,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroup() + post_with_metadata.return_value = compute.InstanceGroup(), metadata client.get( request, @@ -4872,6 +4944,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertInstanceGroupRequest): @@ -5082,10 +5155,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInstanceGroupRequest.pb( compute.InsertInstanceGroupRequest() ) @@ -5109,6 +5185,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5120,6 +5197,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInstanceGroupsRequest): @@ -5208,10 +5286,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstanceGroupsRequest.pb( compute.ListInstanceGroupsRequest() ) @@ -5235,6 +5316,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupList() + post_with_metadata.return_value = compute.InstanceGroupList(), metadata client.list( request, @@ -5246,6 +5328,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_instances_rest_bad_request( @@ -5429,10 +5512,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstancesInstanceGroupsRequest.pb( compute.ListInstancesInstanceGroupsRequest() ) @@ -5458,6 +5544,10 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupsListInstances() + post_with_metadata.return_value = ( + compute.InstanceGroupsListInstances(), + metadata, + ) client.list_instances( request, @@ -5469,6 +5559,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_instances_rest_bad_request( @@ -5688,10 +5779,13 @@ def test_remove_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_remove_instances" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_remove_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_remove_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveInstancesInstanceGroupRequest.pb( compute.RemoveInstancesInstanceGroupRequest() ) @@ -5715,6 +5809,7 @@ def test_remove_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_instances( request, @@ -5726,6 +5821,7 @@ def test_remove_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_named_ports_rest_bad_request( @@ -5946,10 +6042,13 @@ def test_set_named_ports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceGroupsRestInterceptor, "post_set_named_ports" ) as post, mock.patch.object( + transports.InstanceGroupsRestInterceptor, "post_set_named_ports_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceGroupsRestInterceptor, "pre_set_named_ports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetNamedPortsInstanceGroupRequest.pb( compute.SetNamedPortsInstanceGroupRequest() ) @@ -5973,6 +6072,7 @@ def test_set_named_ports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_named_ports( request, @@ -5984,6 +6084,7 @@ def test_set_named_ports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py index 70a016423df3..3d1659cae28c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py @@ -65,6 +65,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -335,6 +342,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceSettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceSettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1733,10 +1783,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceSettingsServiceRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceSettingsServiceRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstanceSettingRequest.pb( compute.GetInstanceSettingRequest() ) @@ -1760,6 +1813,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceSettings() + post_with_metadata.return_value = compute.InstanceSettings(), metadata client.get( request, @@ -1771,6 +1825,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchInstanceSettingRequest): @@ -1974,10 +2029,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceSettingsServiceRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceSettingsServiceRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchInstanceSettingRequest.pb( compute.PatchInstanceSettingRequest() ) @@ -2001,6 +2059,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -2012,6 +2071,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py index 852af628bbae..9bb4e1faaa6b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstanceTemplatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstanceTemplatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3218,10 +3268,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListInstanceTemplatesRequest.pb( compute.AggregatedListInstanceTemplatesRequest() ) @@ -3247,6 +3301,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceTemplateAggregatedList() + post_with_metadata.return_value = ( + compute.InstanceTemplateAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3258,6 +3316,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteInstanceTemplateRequest): @@ -3382,10 +3441,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstanceTemplateRequest.pb( compute.DeleteInstanceTemplateRequest() ) @@ -3409,6 +3471,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3420,6 +3483,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInstanceTemplateRequest): @@ -3516,10 +3580,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstanceTemplateRequest.pb( compute.GetInstanceTemplateRequest() ) @@ -3543,6 +3610,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceTemplate() + post_with_metadata.return_value = compute.InstanceTemplate(), metadata client.get( request, @@ -3554,6 +3622,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3642,10 +3711,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyInstanceTemplateRequest.pb( compute.GetIamPolicyInstanceTemplateRequest() ) @@ -3669,6 +3741,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -3680,6 +3753,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertInstanceTemplateRequest): @@ -3871,6 +3945,7 @@ def test_insert_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -4074,10 +4149,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInstanceTemplateRequest.pb( compute.InsertInstanceTemplateRequest() ) @@ -4101,6 +4179,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4112,6 +4191,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInstanceTemplatesRequest): @@ -4200,10 +4280,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstanceTemplatesRequest.pb( compute.ListInstanceTemplatesRequest() ) @@ -4229,6 +4312,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceTemplateList() + post_with_metadata.return_value = compute.InstanceTemplateList(), metadata client.list( request, @@ -4240,6 +4324,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -4444,10 +4529,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyInstanceTemplateRequest.pb( compute.SetIamPolicyInstanceTemplateRequest() ) @@ -4471,6 +4559,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -4482,6 +4571,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4644,10 +4734,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.InstanceTemplatesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstanceTemplatesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsInstanceTemplateRequest.pb( compute.TestIamPermissionsInstanceTemplateRequest() ) @@ -4673,6 +4767,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4684,6 +4779,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py index 6f428a2c81bd..0f766e1861e6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -294,6 +301,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -19741,10 +19791,13 @@ def test_add_access_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_add_access_config" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_add_access_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_add_access_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddAccessConfigInstanceRequest.pb( compute.AddAccessConfigInstanceRequest() ) @@ -19768,6 +19821,7 @@ def test_add_access_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_access_config( request, @@ -19779,6 +19833,7 @@ def test_add_access_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_add_resource_policies_rest_bad_request( @@ -19988,10 +20043,13 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_add_resource_policies" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_add_resource_policies_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_add_resource_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddResourcePoliciesInstanceRequest.pb( compute.AddResourcePoliciesInstanceRequest() ) @@ -20015,6 +20073,7 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_resource_policies( request, @@ -20026,6 +20085,7 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -20116,10 +20176,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListInstancesRequest.pb( compute.AggregatedListInstancesRequest() ) @@ -20145,6 +20208,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceAggregatedList() + post_with_metadata.return_value = compute.InstanceAggregatedList(), metadata client.aggregated_list( request, @@ -20156,6 +20220,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_attach_disk_rest_bad_request(request_type=compute.AttachDiskInstanceRequest): @@ -20401,10 +20466,13 @@ def test_attach_disk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_attach_disk" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_attach_disk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_attach_disk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AttachDiskInstanceRequest.pb( compute.AttachDiskInstanceRequest() ) @@ -20428,6 +20496,7 @@ def test_attach_disk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.attach_disk( request, @@ -20439,6 +20508,7 @@ def test_attach_disk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_insert_rest_bad_request(request_type=compute.BulkInsertInstanceRequest): @@ -20626,6 +20696,7 @@ def test_bulk_insert_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -20824,10 +20895,13 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_bulk_insert" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_bulk_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_bulk_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.BulkInsertInstanceRequest.pb( compute.BulkInsertInstanceRequest() ) @@ -20851,6 +20925,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.bulk_insert( request, @@ -20862,6 +20937,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteInstanceRequest): @@ -20984,10 +21060,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstanceRequest.pb(compute.DeleteInstanceRequest()) transcode.return_value = { "method": "post", @@ -21009,6 +21088,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -21020,6 +21100,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_access_config_rest_bad_request( @@ -21144,10 +21225,13 @@ def test_delete_access_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_delete_access_config" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_delete_access_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_delete_access_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteAccessConfigInstanceRequest.pb( compute.DeleteAccessConfigInstanceRequest() ) @@ -21171,6 +21255,7 @@ def test_delete_access_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_access_config( request, @@ -21182,6 +21267,7 @@ def test_delete_access_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_detach_disk_rest_bad_request(request_type=compute.DetachDiskInstanceRequest): @@ -21304,10 +21390,13 @@ def test_detach_disk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_detach_disk" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_detach_disk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_detach_disk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DetachDiskInstanceRequest.pb( compute.DetachDiskInstanceRequest() ) @@ -21331,6 +21420,7 @@ def test_detach_disk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.detach_disk( request, @@ -21342,6 +21432,7 @@ def test_detach_disk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInstanceRequest): @@ -21474,10 +21565,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstanceRequest.pb(compute.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -21499,6 +21593,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Instance() + post_with_metadata.return_value = compute.Instance(), metadata client.get( request, @@ -21510,6 +21605,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_effective_firewalls_rest_bad_request( @@ -21589,10 +21685,14 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_effective_firewalls" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_get_effective_firewalls_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_effective_firewalls" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetEffectiveFirewallsInstanceRequest.pb( compute.GetEffectiveFirewallsInstanceRequest() ) @@ -21618,6 +21718,10 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstancesGetEffectiveFirewallsResponse() + post_with_metadata.return_value = ( + compute.InstancesGetEffectiveFirewallsResponse(), + metadata, + ) client.get_effective_firewalls( request, @@ -21629,6 +21733,7 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_guest_attributes_rest_bad_request( @@ -21719,10 +21824,13 @@ def test_get_guest_attributes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_guest_attributes" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_get_guest_attributes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_guest_attributes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetGuestAttributesInstanceRequest.pb( compute.GetGuestAttributesInstanceRequest() ) @@ -21746,6 +21854,7 @@ def test_get_guest_attributes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.GuestAttributes() + post_with_metadata.return_value = compute.GuestAttributes(), metadata client.get_guest_attributes( request, @@ -21757,6 +21866,7 @@ def test_get_guest_attributes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -21843,10 +21953,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyInstanceRequest.pb( compute.GetIamPolicyInstanceRequest() ) @@ -21870,6 +21983,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -21881,6 +21995,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_screenshot_rest_bad_request( @@ -21965,10 +22080,13 @@ def test_get_screenshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_screenshot" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_get_screenshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_screenshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetScreenshotInstanceRequest.pb( compute.GetScreenshotInstanceRequest() ) @@ -21992,6 +22110,7 @@ def test_get_screenshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Screenshot() + post_with_metadata.return_value = compute.Screenshot(), metadata client.get_screenshot( request, @@ -22003,6 +22122,7 @@ def test_get_screenshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_serial_port_output_rest_bad_request( @@ -22093,10 +22213,13 @@ def test_get_serial_port_output_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_serial_port_output" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_get_serial_port_output_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_serial_port_output" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSerialPortOutputInstanceRequest.pb( compute.GetSerialPortOutputInstanceRequest() ) @@ -22120,6 +22243,7 @@ def test_get_serial_port_output_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SerialPortOutput() + post_with_metadata.return_value = compute.SerialPortOutput(), metadata client.get_serial_port_output( request, @@ -22131,6 +22255,7 @@ def test_get_serial_port_output_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_shielded_instance_identity_rest_bad_request( @@ -22213,10 +22338,14 @@ def test_get_shielded_instance_identity_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_get_shielded_instance_identity" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_get_shielded_instance_identity_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_get_shielded_instance_identity" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetShieldedInstanceIdentityInstanceRequest.pb( compute.GetShieldedInstanceIdentityInstanceRequest() ) @@ -22242,6 +22371,7 @@ def test_get_shielded_instance_identity_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ShieldedInstanceIdentity() + post_with_metadata.return_value = compute.ShieldedInstanceIdentity(), metadata client.get_shielded_instance_identity( request, @@ -22253,6 +22383,7 @@ def test_get_shielded_instance_identity_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertInstanceRequest): @@ -22457,6 +22588,7 @@ def test_insert_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -22646,10 +22778,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInstanceRequest.pb(compute.InsertInstanceRequest()) transcode.return_value = { "method": "post", @@ -22671,6 +22806,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -22682,6 +22818,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInstancesRequest): @@ -22768,10 +22905,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstancesRequest.pb(compute.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -22793,6 +22933,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceList() + post_with_metadata.return_value = compute.InstanceList(), metadata client.list( request, @@ -22804,6 +22945,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_referrers_rest_bad_request( @@ -22892,10 +23034,13 @@ def test_list_referrers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_list_referrers" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_list_referrers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_list_referrers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListReferrersInstancesRequest.pb( compute.ListReferrersInstancesRequest() ) @@ -22921,6 +23066,7 @@ def test_list_referrers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceListReferrers() + post_with_metadata.return_value = compute.InstanceListReferrers(), metadata client.list_referrers( request, @@ -22932,6 +23078,7 @@ def test_list_referrers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_perform_maintenance_rest_bad_request( @@ -23056,10 +23203,13 @@ def test_perform_maintenance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_perform_maintenance" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_perform_maintenance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_perform_maintenance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PerformMaintenanceInstanceRequest.pb( compute.PerformMaintenanceInstanceRequest() ) @@ -23083,6 +23233,7 @@ def test_perform_maintenance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.perform_maintenance( request, @@ -23094,6 +23245,7 @@ def test_perform_maintenance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_resource_policies_rest_bad_request( @@ -23303,10 +23455,14 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_remove_resource_policies" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_remove_resource_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_remove_resource_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveResourcePoliciesInstanceRequest.pb( compute.RemoveResourcePoliciesInstanceRequest() ) @@ -23330,6 +23486,7 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_resource_policies( request, @@ -23341,6 +23498,7 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reset_rest_bad_request(request_type=compute.ResetInstanceRequest): @@ -23463,10 +23621,13 @@ def test_reset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_reset" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_reset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_reset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResetInstanceRequest.pb(compute.ResetInstanceRequest()) transcode.return_value = { "method": "post", @@ -23488,6 +23649,7 @@ def test_reset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.reset( request, @@ -23499,6 +23661,7 @@ def test_reset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resume_rest_bad_request(request_type=compute.ResumeInstanceRequest): @@ -23621,10 +23784,13 @@ def test_resume_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_resume" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_resume_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_resume" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResumeInstanceRequest.pb(compute.ResumeInstanceRequest()) transcode.return_value = { "method": "post", @@ -23646,6 +23812,7 @@ def test_resume_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resume( request, @@ -23657,6 +23824,7 @@ def test_resume_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_send_diagnostic_interrupt_rest_bad_request( @@ -23736,10 +23904,14 @@ def test_send_diagnostic_interrupt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_send_diagnostic_interrupt" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_send_diagnostic_interrupt_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_send_diagnostic_interrupt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SendDiagnosticInterruptInstanceRequest.pb( compute.SendDiagnosticInterruptInstanceRequest() ) @@ -23765,6 +23937,10 @@ def test_send_diagnostic_interrupt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SendDiagnosticInterruptInstanceResponse() + post_with_metadata.return_value = ( + compute.SendDiagnosticInterruptInstanceResponse(), + metadata, + ) client.send_diagnostic_interrupt( request, @@ -23776,6 +23952,7 @@ def test_send_diagnostic_interrupt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_deletion_protection_rest_bad_request( @@ -23900,10 +24077,14 @@ def test_set_deletion_protection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_deletion_protection" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_set_deletion_protection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_deletion_protection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetDeletionProtectionInstanceRequest.pb( compute.SetDeletionProtectionInstanceRequest() ) @@ -23927,6 +24108,7 @@ def test_set_deletion_protection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_deletion_protection( request, @@ -23938,6 +24120,7 @@ def test_set_deletion_protection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_disk_auto_delete_rest_bad_request( @@ -24062,10 +24245,13 @@ def test_set_disk_auto_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_disk_auto_delete" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_disk_auto_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_disk_auto_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetDiskAutoDeleteInstanceRequest.pb( compute.SetDiskAutoDeleteInstanceRequest() ) @@ -24089,6 +24275,7 @@ def test_set_disk_auto_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_disk_auto_delete( request, @@ -24100,6 +24287,7 @@ def test_set_disk_auto_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -24302,10 +24490,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyInstanceRequest.pb( compute.SetIamPolicyInstanceRequest() ) @@ -24329,6 +24520,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -24340,6 +24532,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsInstanceRequest): @@ -24543,10 +24736,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsInstanceRequest.pb( compute.SetLabelsInstanceRequest() ) @@ -24570,6 +24766,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -24581,6 +24778,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_machine_resources_rest_bad_request( @@ -24792,10 +24990,13 @@ def test_set_machine_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_machine_resources" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_machine_resources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_machine_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetMachineResourcesInstanceRequest.pb( compute.SetMachineResourcesInstanceRequest() ) @@ -24819,6 +25020,7 @@ def test_set_machine_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_machine_resources( request, @@ -24830,6 +25032,7 @@ def test_set_machine_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_machine_type_rest_bad_request( @@ -25039,10 +25242,13 @@ def test_set_machine_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_machine_type" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_machine_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_machine_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetMachineTypeInstanceRequest.pb( compute.SetMachineTypeInstanceRequest() ) @@ -25066,6 +25272,7 @@ def test_set_machine_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_machine_type( request, @@ -25077,6 +25284,7 @@ def test_set_machine_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_metadata_rest_bad_request(request_type=compute.SetMetadataInstanceRequest): @@ -25271,10 +25479,13 @@ def test_set_metadata_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_metadata" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_metadata_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetMetadataInstanceRequest.pb( compute.SetMetadataInstanceRequest() ) @@ -25298,6 +25509,7 @@ def test_set_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_metadata( request, @@ -25309,6 +25521,7 @@ def test_set_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_min_cpu_platform_rest_bad_request( @@ -25518,10 +25731,13 @@ def test_set_min_cpu_platform_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_min_cpu_platform" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_min_cpu_platform_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_min_cpu_platform" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetMinCpuPlatformInstanceRequest.pb( compute.SetMinCpuPlatformInstanceRequest() ) @@ -25545,6 +25761,7 @@ def test_set_min_cpu_platform_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_min_cpu_platform( request, @@ -25556,6 +25773,7 @@ def test_set_min_cpu_platform_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_name_rest_bad_request(request_type=compute.SetNameInstanceRequest): @@ -25757,10 +25975,13 @@ def test_set_name_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_name" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_name_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_name" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetNameInstanceRequest.pb(compute.SetNameInstanceRequest()) transcode.return_value = { "method": "post", @@ -25782,6 +26003,7 @@ def test_set_name_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_name( request, @@ -25793,6 +26015,7 @@ def test_set_name_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_scheduling_rest_bad_request( @@ -25837,6 +26060,7 @@ def test_set_scheduling_rest_call_success(request_type): request_init["scheduling_resource"] = { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -26005,10 +26229,13 @@ def test_set_scheduling_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_scheduling" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_scheduling_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_scheduling" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSchedulingInstanceRequest.pb( compute.SetSchedulingInstanceRequest() ) @@ -26032,6 +26259,7 @@ def test_set_scheduling_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_scheduling( request, @@ -26043,6 +26271,7 @@ def test_set_scheduling_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_security_policy_rest_bad_request( @@ -26256,10 +26485,13 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_security_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSecurityPolicyInstanceRequest.pb( compute.SetSecurityPolicyInstanceRequest() ) @@ -26283,6 +26515,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_security_policy( request, @@ -26294,6 +26527,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_service_account_rest_bad_request( @@ -26504,10 +26738,13 @@ def test_set_service_account_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_service_account" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_service_account_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_service_account" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetServiceAccountInstanceRequest.pb( compute.SetServiceAccountInstanceRequest() ) @@ -26531,6 +26768,7 @@ def test_set_service_account_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_service_account( request, @@ -26542,6 +26780,7 @@ def test_set_service_account_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_shielded_instance_integrity_policy_rest_bad_request( @@ -26752,11 +26991,15 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept transports.InstancesRestInterceptor, "post_set_shielded_instance_integrity_policy", ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_set_shielded_instance_integrity_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_shielded_instance_integrity_policy", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.pb( compute.SetShieldedInstanceIntegrityPolicyInstanceRequest() ) @@ -26780,6 +27023,7 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_shielded_instance_integrity_policy( request, @@ -26791,6 +27035,7 @@ def test_set_shielded_instance_integrity_policy_rest_interceptors(null_intercept pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_tags_rest_bad_request(request_type=compute.SetTagsInstanceRequest): @@ -26984,10 +27229,13 @@ def test_set_tags_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_set_tags" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_set_tags_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_set_tags" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetTagsInstanceRequest.pb(compute.SetTagsInstanceRequest()) transcode.return_value = { "method": "post", @@ -27009,6 +27257,7 @@ def test_set_tags_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_tags( request, @@ -27020,6 +27269,7 @@ def test_set_tags_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_simulate_maintenance_event_rest_bad_request( @@ -27144,10 +27394,14 @@ def test_simulate_maintenance_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_simulate_maintenance_event" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_simulate_maintenance_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_simulate_maintenance_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SimulateMaintenanceEventInstanceRequest.pb( compute.SimulateMaintenanceEventInstanceRequest() ) @@ -27171,6 +27425,7 @@ def test_simulate_maintenance_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.simulate_maintenance_event( request, @@ -27182,6 +27437,7 @@ def test_simulate_maintenance_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_rest_bad_request(request_type=compute.StartInstanceRequest): @@ -27304,10 +27560,13 @@ def test_start_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_start" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_start_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_start" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StartInstanceRequest.pb(compute.StartInstanceRequest()) transcode.return_value = { "method": "post", @@ -27329,6 +27588,7 @@ def test_start_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.start( request, @@ -27340,6 +27600,7 @@ def test_start_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_with_encryption_key_rest_bad_request( @@ -27560,10 +27821,14 @@ def test_start_with_encryption_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_start_with_encryption_key" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_start_with_encryption_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_start_with_encryption_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StartWithEncryptionKeyInstanceRequest.pb( compute.StartWithEncryptionKeyInstanceRequest() ) @@ -27587,6 +27852,7 @@ def test_start_with_encryption_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.start_with_encryption_key( request, @@ -27598,6 +27864,7 @@ def test_start_with_encryption_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_rest_bad_request(request_type=compute.StopInstanceRequest): @@ -27720,10 +27987,13 @@ def test_stop_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_stop" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_stop_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_stop" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopInstanceRequest.pb(compute.StopInstanceRequest()) transcode.return_value = { "method": "post", @@ -27745,6 +28015,7 @@ def test_stop_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop( request, @@ -27756,6 +28027,7 @@ def test_stop_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suspend_rest_bad_request(request_type=compute.SuspendInstanceRequest): @@ -27878,10 +28150,13 @@ def test_suspend_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_suspend" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_suspend_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_suspend" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SuspendInstanceRequest.pb(compute.SuspendInstanceRequest()) transcode.return_value = { "method": "post", @@ -27903,6 +28178,7 @@ def test_suspend_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.suspend( request, @@ -27914,6 +28190,7 @@ def test_suspend_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -28074,10 +28351,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsInstanceRequest.pb( compute.TestIamPermissionsInstanceRequest() ) @@ -28103,6 +28383,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -28114,6 +28395,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateInstanceRequest): @@ -28318,6 +28600,7 @@ def test_update_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -28507,10 +28790,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateInstanceRequest.pb(compute.UpdateInstanceRequest()) transcode.return_value = { "method": "post", @@ -28532,6 +28818,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -28543,6 +28830,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_access_config_rest_bad_request( @@ -28750,10 +29038,13 @@ def test_update_access_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_update_access_config" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_update_access_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_update_access_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateAccessConfigInstanceRequest.pb( compute.UpdateAccessConfigInstanceRequest() ) @@ -28777,6 +29068,7 @@ def test_update_access_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_access_config( request, @@ -28788,6 +29080,7 @@ def test_update_access_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_display_device_rest_bad_request( @@ -28984,10 +29277,13 @@ def test_update_display_device_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_update_display_device" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, "post_update_display_device_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_update_display_device" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateDisplayDeviceInstanceRequest.pb( compute.UpdateDisplayDeviceInstanceRequest() ) @@ -29011,6 +29307,7 @@ def test_update_display_device_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_display_device( request, @@ -29022,6 +29319,7 @@ def test_update_display_device_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_network_interface_rest_bad_request( @@ -29255,10 +29553,14 @@ def test_update_network_interface_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_update_network_interface" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_update_network_interface_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_update_network_interface" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateNetworkInterfaceInstanceRequest.pb( compute.UpdateNetworkInterfaceInstanceRequest() ) @@ -29282,6 +29584,7 @@ def test_update_network_interface_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_network_interface( request, @@ -29293,6 +29596,7 @@ def test_update_network_interface_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_shielded_instance_config_rest_bad_request( @@ -29497,10 +29801,14 @@ def test_update_shielded_instance_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstancesRestInterceptor, "post_update_shielded_instance_config" ) as post, mock.patch.object( + transports.InstancesRestInterceptor, + "post_update_shielded_instance_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstancesRestInterceptor, "pre_update_shielded_instance_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateShieldedInstanceConfigInstanceRequest.pb( compute.UpdateShieldedInstanceConfigInstanceRequest() ) @@ -29524,6 +29832,7 @@ def test_update_shielded_instance_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_shielded_instance_config( request, @@ -29535,6 +29844,7 @@ def test_update_shielded_instance_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py index 67d5ca93669d..6470546cac4e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstantSnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstantSnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3733,10 +3783,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListInstantSnapshotsRequest.pb( compute.AggregatedListInstantSnapshotsRequest() ) @@ -3762,6 +3815,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstantSnapshotAggregatedList() + post_with_metadata.return_value = ( + compute.InstantSnapshotAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3773,6 +3830,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteInstantSnapshotRequest): @@ -3905,10 +3963,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstantSnapshotRequest.pb( compute.DeleteInstantSnapshotRequest() ) @@ -3932,6 +3993,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3943,6 +4005,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInstantSnapshotRequest): @@ -4065,10 +4128,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInstantSnapshotRequest.pb( compute.GetInstantSnapshotRequest() ) @@ -4092,6 +4158,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstantSnapshot() + post_with_metadata.return_value = compute.InstantSnapshot(), metadata client.get( request, @@ -4103,6 +4170,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -4191,10 +4259,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyInstantSnapshotRequest.pb( compute.GetIamPolicyInstantSnapshotRequest() ) @@ -4218,6 +4289,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -4229,6 +4301,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertInstantSnapshotRequest): @@ -4447,10 +4520,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInstantSnapshotRequest.pb( compute.InsertInstantSnapshotRequest() ) @@ -4474,6 +4550,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4485,6 +4562,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInstantSnapshotsRequest): @@ -4573,10 +4651,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstantSnapshotsRequest.pb( compute.ListInstantSnapshotsRequest() ) @@ -4602,6 +4683,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstantSnapshotList() + post_with_metadata.return_value = compute.InstantSnapshotList(), metadata client.list( request, @@ -4613,6 +4695,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -4817,10 +4900,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyInstantSnapshotRequest.pb( compute.SetIamPolicyInstantSnapshotRequest() ) @@ -4844,6 +4930,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -4855,6 +4942,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -5060,10 +5148,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsInstantSnapshotRequest.pb( compute.SetLabelsInstantSnapshotRequest() ) @@ -5087,6 +5178,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -5098,6 +5190,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5260,10 +5353,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.InstantSnapshotsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InstantSnapshotsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsInstantSnapshotRequest.pb( compute.TestIamPermissionsInstantSnapshotRequest() ) @@ -5289,6 +5386,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5300,6 +5398,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py index 3ea3f85fad5c..350a42d0808f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InterconnectAttachmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InterconnectAttachmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3602,10 +3652,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListInterconnectAttachmentsRequest.pb( compute.AggregatedListInterconnectAttachmentsRequest() ) @@ -3631,6 +3685,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectAttachmentAggregatedList() + post_with_metadata.return_value = ( + compute.InterconnectAttachmentAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3642,6 +3700,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -3776,10 +3835,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInterconnectAttachmentRequest.pb( compute.DeleteInterconnectAttachmentRequest() ) @@ -3803,6 +3865,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3814,6 +3877,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInterconnectAttachmentRequest): @@ -3980,10 +4044,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInterconnectAttachmentRequest.pb( compute.GetInterconnectAttachmentRequest() ) @@ -4009,6 +4076,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectAttachment() + post_with_metadata.return_value = compute.InterconnectAttachment(), metadata client.get( request, @@ -4020,6 +4088,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4276,10 +4345,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInterconnectAttachmentRequest.pb( compute.InsertInterconnectAttachmentRequest() ) @@ -4303,6 +4375,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4314,6 +4387,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInterconnectAttachmentsRequest): @@ -4402,10 +4476,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInterconnectAttachmentsRequest.pb( compute.ListInterconnectAttachmentsRequest() ) @@ -4431,6 +4508,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectAttachmentList() + post_with_metadata.return_value = compute.InterconnectAttachmentList(), metadata client.list( request, @@ -4442,6 +4520,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -4706,10 +4785,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchInterconnectAttachmentRequest.pb( compute.PatchInterconnectAttachmentRequest() ) @@ -4733,6 +4815,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4744,6 +4827,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -4949,10 +5033,14 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.InterconnectAttachmentsRestInterceptor, + "post_set_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.InterconnectAttachmentsRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsInterconnectAttachmentRequest.pb( compute.SetLabelsInterconnectAttachmentRequest() ) @@ -4976,6 +5064,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4987,6 +5076,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py index 4bf17a1cc2d0..c9bda82b4ab7 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InterconnectLocationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InterconnectLocationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1585,10 +1635,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectLocationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InterconnectLocationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectLocationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInterconnectLocationRequest.pb( compute.GetInterconnectLocationRequest() ) @@ -1614,6 +1667,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectLocation() + post_with_metadata.return_value = compute.InterconnectLocation(), metadata client.get( request, @@ -1625,6 +1679,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInterconnectLocationsRequest): @@ -1713,10 +1768,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectLocationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InterconnectLocationsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectLocationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInterconnectLocationsRequest.pb( compute.ListInterconnectLocationsRequest() ) @@ -1742,6 +1800,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectLocationList() + post_with_metadata.return_value = compute.InterconnectLocationList(), metadata client.list( request, @@ -1753,6 +1812,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py index 02065dfc9b7c..04ae391b8a09 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -337,6 +344,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InterconnectRemoteLocationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InterconnectRemoteLocationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1615,10 +1665,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectRemoteLocationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InterconnectRemoteLocationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectRemoteLocationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInterconnectRemoteLocationRequest.pb( compute.GetInterconnectRemoteLocationRequest() ) @@ -1644,6 +1697,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectRemoteLocation() + post_with_metadata.return_value = compute.InterconnectRemoteLocation(), metadata client.get( request, @@ -1655,6 +1709,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -1745,10 +1800,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectRemoteLocationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InterconnectRemoteLocationsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectRemoteLocationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInterconnectRemoteLocationsRequest.pb( compute.ListInterconnectRemoteLocationsRequest() ) @@ -1774,6 +1832,10 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectRemoteLocationList() + post_with_metadata.return_value = ( + compute.InterconnectRemoteLocationList(), + metadata, + ) client.list( request, @@ -1785,6 +1847,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py index c69a038c4195..e8e5b82d7598 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -315,6 +322,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InterconnectsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InterconnectsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3528,10 +3578,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInterconnectRequest.pb( compute.DeleteInterconnectRequest() ) @@ -3555,6 +3608,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3566,6 +3620,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetInterconnectRequest): @@ -3698,10 +3753,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetInterconnectRequest.pb(compute.GetInterconnectRequest()) transcode.return_value = { "method": "post", @@ -3723,6 +3781,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Interconnect() + post_with_metadata.return_value = compute.Interconnect(), metadata client.get( request, @@ -3734,6 +3793,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_diagnostics_rest_bad_request( @@ -3815,10 +3875,13 @@ def test_get_diagnostics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_get_diagnostics" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_get_diagnostics_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_get_diagnostics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetDiagnosticsInterconnectRequest.pb( compute.GetDiagnosticsInterconnectRequest() ) @@ -3844,6 +3907,10 @@ def test_get_diagnostics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectsGetDiagnosticsResponse() + post_with_metadata.return_value = ( + compute.InterconnectsGetDiagnosticsResponse(), + metadata, + ) client.get_diagnostics( request, @@ -3855,6 +3922,7 @@ def test_get_diagnostics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_macsec_config_rest_bad_request( @@ -3939,10 +4007,13 @@ def test_get_macsec_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_get_macsec_config" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_get_macsec_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_get_macsec_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetMacsecConfigInterconnectRequest.pb( compute.GetMacsecConfigInterconnectRequest() ) @@ -3968,6 +4039,10 @@ def test_get_macsec_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectsGetMacsecConfigResponse() + post_with_metadata.return_value = ( + compute.InterconnectsGetMacsecConfigResponse(), + metadata, + ) client.get_macsec_config( request, @@ -3979,6 +4054,7 @@ def test_get_macsec_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertInterconnectRequest): @@ -4238,10 +4314,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertInterconnectRequest.pb( compute.InsertInterconnectRequest() ) @@ -4265,6 +4344,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4276,6 +4356,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListInterconnectsRequest): @@ -4364,10 +4445,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInterconnectsRequest.pb( compute.ListInterconnectsRequest() ) @@ -4391,6 +4475,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InterconnectList() + post_with_metadata.return_value = compute.InterconnectList(), metadata client.list( request, @@ -4402,6 +4487,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchInterconnectRequest): @@ -4661,10 +4747,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchInterconnectRequest.pb( compute.PatchInterconnectRequest() ) @@ -4688,6 +4777,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4699,6 +4789,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsInterconnectRequest): @@ -4902,10 +4993,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.InterconnectsRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.InterconnectsRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.InterconnectsRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsInterconnectRequest.pb( compute.SetLabelsInterconnectRequest() ) @@ -4929,6 +5023,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4940,6 +5035,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py index e452c3388bf3..04821055e21f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py @@ -58,6 +58,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -296,6 +303,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LicenseCodesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LicenseCodesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1447,10 +1497,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicenseCodesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.LicenseCodesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicenseCodesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetLicenseCodeRequest.pb(compute.GetLicenseCodeRequest()) transcode.return_value = { "method": "post", @@ -1472,6 +1525,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.LicenseCode() + post_with_metadata.return_value = compute.LicenseCode(), metadata client.get( request, @@ -1483,6 +1537,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -1645,10 +1700,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicenseCodesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.LicenseCodesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LicenseCodesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsLicenseCodeRequest.pb( compute.TestIamPermissionsLicenseCodeRequest() ) @@ -1674,6 +1733,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -1685,6 +1745,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py index 389b933d59cf..a9644830f882 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.licenses import LicensesClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -287,6 +294,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LicensesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LicensesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2935,10 +2985,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteLicenseRequest.pb(compute.DeleteLicenseRequest()) transcode.return_value = { "method": "post", @@ -2960,6 +3013,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2971,6 +3025,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetLicenseRequest): @@ -3067,10 +3122,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetLicenseRequest.pb(compute.GetLicenseRequest()) transcode.return_value = { "method": "post", @@ -3092,6 +3150,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.License() + post_with_metadata.return_value = compute.License(), metadata client.get( request, @@ -3103,6 +3162,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3189,10 +3249,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyLicenseRequest.pb( compute.GetIamPolicyLicenseRequest() ) @@ -3216,6 +3279,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -3227,6 +3291,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertLicenseRequest): @@ -3428,10 +3493,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertLicenseRequest.pb(compute.InsertLicenseRequest()) transcode.return_value = { "method": "post", @@ -3453,6 +3521,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3464,6 +3533,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListLicensesRequest): @@ -3548,10 +3618,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListLicensesRequest.pb(compute.ListLicensesRequest()) transcode.return_value = { "method": "post", @@ -3575,6 +3648,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.LicensesListResponse() + post_with_metadata.return_value = compute.LicensesListResponse(), metadata client.list( request, @@ -3586,6 +3660,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -3788,10 +3863,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyLicenseRequest.pb( compute.SetIamPolicyLicenseRequest() ) @@ -3815,6 +3893,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -3826,6 +3905,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -3986,10 +4066,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LicensesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.LicensesRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LicensesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsLicenseRequest.pb( compute.TestIamPermissionsLicenseRequest() ) @@ -4015,6 +4098,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4026,6 +4110,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py index 7f6207057db0..f9163e3d7c4f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -315,6 +322,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MachineImagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MachineImagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2958,10 +3008,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteMachineImageRequest.pb( compute.DeleteMachineImageRequest() ) @@ -2985,6 +3038,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2996,6 +3050,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetMachineImageRequest): @@ -3102,10 +3157,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetMachineImageRequest.pb(compute.GetMachineImageRequest()) transcode.return_value = { "method": "post", @@ -3127,6 +3185,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.MachineImage() + post_with_metadata.return_value = compute.MachineImage(), metadata client.get( request, @@ -3138,6 +3197,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3226,10 +3286,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyMachineImageRequest.pb( compute.GetIamPolicyMachineImageRequest() ) @@ -3253,6 +3316,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -3264,6 +3328,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertMachineImageRequest): @@ -3454,6 +3519,7 @@ def test_insert_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -3697,10 +3763,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertMachineImageRequest.pb( compute.InsertMachineImageRequest() ) @@ -3724,6 +3793,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3735,6 +3805,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListMachineImagesRequest): @@ -3823,10 +3894,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListMachineImagesRequest.pb( compute.ListMachineImagesRequest() ) @@ -3850,6 +3924,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.MachineImageList() + post_with_metadata.return_value = compute.MachineImageList(), metadata client.list( request, @@ -3861,6 +3936,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -4065,10 +4141,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyMachineImageRequest.pb( compute.SetIamPolicyMachineImageRequest() ) @@ -4092,6 +4171,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -4103,6 +4183,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4265,10 +4346,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineImagesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.MachineImagesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MachineImagesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsMachineImageRequest.pb( compute.TestIamPermissionsMachineImageRequest() ) @@ -4294,6 +4379,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4305,6 +4391,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py index a2b42a4bf3c8..86985a85a4ff 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -297,6 +304,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MachineTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MachineTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1796,10 +1846,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineTypesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.MachineTypesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineTypesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListMachineTypesRequest.pb( compute.AggregatedListMachineTypesRequest() ) @@ -1825,6 +1878,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.MachineTypeAggregatedList() + post_with_metadata.return_value = compute.MachineTypeAggregatedList(), metadata client.aggregated_list( request, @@ -1836,6 +1890,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetMachineTypeRequest): @@ -1944,10 +1999,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineTypesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.MachineTypesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineTypesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetMachineTypeRequest.pb(compute.GetMachineTypeRequest()) transcode.return_value = { "method": "post", @@ -1969,6 +2027,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.MachineType() + post_with_metadata.return_value = compute.MachineType(), metadata client.get( request, @@ -1980,6 +2039,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListMachineTypesRequest): @@ -2068,10 +2128,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MachineTypesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.MachineTypesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MachineTypesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListMachineTypesRequest.pb( compute.ListMachineTypesRequest() ) @@ -2095,6 +2158,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.MachineTypeList() + post_with_metadata.return_value = compute.MachineTypeList(), metadata client.list( request, @@ -2106,6 +2170,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py index 414ace4b7790..025774c3d881 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -332,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworkAttachmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworkAttachmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3777,10 +3827,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListNetworkAttachmentsRequest.pb( compute.AggregatedListNetworkAttachmentsRequest() ) @@ -3806,6 +3860,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkAttachmentAggregatedList() + post_with_metadata.return_value = ( + compute.NetworkAttachmentAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3817,6 +3875,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteNetworkAttachmentRequest): @@ -3949,10 +4008,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNetworkAttachmentRequest.pb( compute.DeleteNetworkAttachmentRequest() ) @@ -3976,6 +4038,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3987,6 +4050,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNetworkAttachmentRequest): @@ -4103,10 +4167,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNetworkAttachmentRequest.pb( compute.GetNetworkAttachmentRequest() ) @@ -4130,6 +4197,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkAttachment() + post_with_metadata.return_value = compute.NetworkAttachment(), metadata client.get( request, @@ -4141,6 +4209,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -4229,10 +4298,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyNetworkAttachmentRequest.pb( compute.GetIamPolicyNetworkAttachmentRequest() ) @@ -4256,6 +4329,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -4267,6 +4341,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertNetworkAttachmentRequest): @@ -4500,10 +4575,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNetworkAttachmentRequest.pb( compute.InsertNetworkAttachmentRequest() ) @@ -4527,6 +4605,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4538,6 +4617,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNetworkAttachmentsRequest): @@ -4626,10 +4706,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkAttachmentsRequest.pb( compute.ListNetworkAttachmentsRequest() ) @@ -4655,6 +4738,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkAttachmentList() + post_with_metadata.return_value = compute.NetworkAttachmentList(), metadata client.list( request, @@ -4666,6 +4750,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchNetworkAttachmentRequest): @@ -4907,10 +4992,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchNetworkAttachmentRequest.pb( compute.PatchNetworkAttachmentRequest() ) @@ -4934,6 +5022,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4945,6 +5034,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -5149,10 +5239,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyNetworkAttachmentRequest.pb( compute.SetIamPolicyNetworkAttachmentRequest() ) @@ -5176,6 +5270,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -5187,6 +5282,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5349,10 +5445,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.NetworkAttachmentsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkAttachmentsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsNetworkAttachmentRequest.pb( compute.TestIamPermissionsNetworkAttachmentRequest() ) @@ -5378,6 +5478,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5389,6 +5490,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py index 13ad68322b4b..31f805a605ef 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworkEdgeSecurityServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworkEdgeSecurityServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2953,10 +3003,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.NetworkEdgeSecurityServicesRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListNetworkEdgeSecurityServicesRequest.pb( compute.AggregatedListNetworkEdgeSecurityServicesRequest() ) @@ -2982,6 +3036,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEdgeSecurityServiceAggregatedList() + post_with_metadata.return_value = ( + compute.NetworkEdgeSecurityServiceAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -2993,6 +3051,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -3127,10 +3186,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NetworkEdgeSecurityServicesRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNetworkEdgeSecurityServiceRequest.pb( compute.DeleteNetworkEdgeSecurityServiceRequest() ) @@ -3154,6 +3217,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3165,6 +3229,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -3275,10 +3340,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NetworkEdgeSecurityServicesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNetworkEdgeSecurityServiceRequest.pb( compute.GetNetworkEdgeSecurityServiceRequest() ) @@ -3304,6 +3372,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEdgeSecurityService() + post_with_metadata.return_value = compute.NetworkEdgeSecurityService(), metadata client.get( request, @@ -3315,6 +3384,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -3531,10 +3601,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NetworkEdgeSecurityServicesRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNetworkEdgeSecurityServiceRequest.pb( compute.InsertNetworkEdgeSecurityServiceRequest() ) @@ -3558,6 +3632,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3569,6 +3644,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -3793,10 +3869,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.NetworkEdgeSecurityServicesRestInterceptor, + "post_patch_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEdgeSecurityServicesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchNetworkEdgeSecurityServiceRequest.pb( compute.PatchNetworkEdgeSecurityServiceRequest() ) @@ -3820,6 +3900,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -3831,6 +3912,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py index ef914d3162e5..8c7c6fbc31c5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworkEndpointGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworkEndpointGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4130,10 +4180,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListNetworkEndpointGroupsRequest.pb( compute.AggregatedListNetworkEndpointGroupsRequest() ) @@ -4159,6 +4213,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupAggregatedList() + post_with_metadata.return_value = ( + compute.NetworkEndpointGroupAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -4170,6 +4228,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_attach_network_endpoints_rest_bad_request( @@ -4399,10 +4458,14 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, + "post_attach_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.pb( compute.AttachNetworkEndpointsNetworkEndpointGroupRequest() ) @@ -4426,6 +4489,7 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.attach_network_endpoints( request, @@ -4437,6 +4501,7 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -4571,10 +4636,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNetworkEndpointGroupRequest.pb( compute.DeleteNetworkEndpointGroupRequest() ) @@ -4598,6 +4666,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4609,6 +4678,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_detach_network_endpoints_rest_bad_request( @@ -4838,10 +4908,14 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, + "post_detach_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.pb( compute.DetachNetworkEndpointsNetworkEndpointGroupRequest() ) @@ -4865,6 +4939,7 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.detach_network_endpoints( request, @@ -4876,6 +4951,7 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNetworkEndpointGroupRequest): @@ -4992,10 +5068,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNetworkEndpointGroupRequest.pb( compute.GetNetworkEndpointGroupRequest() ) @@ -5021,6 +5100,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroup() + post_with_metadata.return_value = compute.NetworkEndpointGroup(), metadata client.get( request, @@ -5032,6 +5112,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -5267,10 +5348,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNetworkEndpointGroupRequest.pb( compute.InsertNetworkEndpointGroupRequest() ) @@ -5294,6 +5378,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5305,6 +5390,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNetworkEndpointGroupsRequest): @@ -5393,10 +5479,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkEndpointGroupsRequest.pb( compute.ListNetworkEndpointGroupsRequest() ) @@ -5422,6 +5511,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupList() + post_with_metadata.return_value = compute.NetworkEndpointGroupList(), metadata client.list( request, @@ -5433,6 +5523,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_network_endpoints_rest_bad_request( @@ -5616,10 +5707,14 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_list_network_endpoints" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, + "post_list_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_list_network_endpoints" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.pb( compute.ListNetworkEndpointsNetworkEndpointGroupsRequest() ) @@ -5645,6 +5740,10 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + post_with_metadata.return_value = ( + compute.NetworkEndpointGroupsListNetworkEndpoints(), + metadata, + ) client.list_network_endpoints( request, @@ -5656,6 +5755,7 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5818,10 +5918,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.NetworkEndpointGroupsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkEndpointGroupsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsNetworkEndpointGroupRequest.pb( compute.TestIamPermissionsNetworkEndpointGroupRequest() ) @@ -5847,6 +5951,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5858,6 +5963,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py index 6938b9add120..6f35311e312b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworkFirewallPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworkFirewallPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1816,6 +1866,284 @@ def test_add_rule_unary_rest_flattened_error(transport: str = "rest"): ) +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_aggregated_list_rest_required_fields( + request_type=compute.AggregatedListNetworkFirewallPoliciesRequest, +): + transport_class = transports.NetworkFirewallPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "include_all_scopes", + "max_results", + "order_by", + "page_token", + "return_partial_success", + "service_project_number", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.NetworkFirewallPolicyAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.NetworkFirewallPolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.aggregated_list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "includeAllScopes", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + "serviceProjectNumber", + ) + ) + & set(("project",)) + ) + + +def test_aggregated_list_rest_flattened(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkFirewallPolicyAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.NetworkFirewallPolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/aggregated/firewallPolicies" + % client.transport._host, + args[1], + ) + + +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListNetworkFirewallPoliciesRequest(), + project="project_value", + ) + + +def test_aggregated_list_rest_pager(transport: str = "rest"): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.NetworkFirewallPolicyAggregatedList( + items={ + "a": compute.FirewallPoliciesScopedList(), + "b": compute.FirewallPoliciesScopedList(), + "c": compute.FirewallPoliciesScopedList(), + }, + next_page_token="abc", + ), + compute.NetworkFirewallPolicyAggregatedList( + items={}, + next_page_token="def", + ), + compute.NetworkFirewallPolicyAggregatedList( + items={ + "g": compute.FirewallPoliciesScopedList(), + }, + next_page_token="ghi", + ), + compute.NetworkFirewallPolicyAggregatedList( + items={ + "h": compute.FirewallPoliciesScopedList(), + "i": compute.FirewallPoliciesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + compute.NetworkFirewallPolicyAggregatedList.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1"} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get("a"), compute.FirewallPoliciesScopedList) + assert pager.get("h") is None + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuple) for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == ( + str, + compute.FirewallPoliciesScopedList, + ) + + assert pager.get("a") is None + assert isinstance(pager.get("h"), compute.FirewallPoliciesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_clone_rules_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6468,10 +6796,14 @@ def test_add_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_add_association" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_add_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_add_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddAssociationNetworkFirewallPolicyRequest.pb( compute.AddAssociationNetworkFirewallPolicyRequest() ) @@ -6495,6 +6827,7 @@ def test_add_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_association( request, @@ -6506,6 +6839,7 @@ def test_add_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_add_rule_rest_bad_request( @@ -6760,10 +7094,13 @@ def test_add_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_add_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddRuleNetworkFirewallPolicyRequest.pb( compute.AddRuleNetworkFirewallPolicyRequest() ) @@ -6787,6 +7124,7 @@ def test_add_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_rule( request, @@ -6798,6 +7136,148 @@ def test_add_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_aggregated_list_rest_bad_request( + request_type=compute.AggregatedListNetworkFirewallPoliciesRequest, +): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.aggregated_list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.AggregatedListNetworkFirewallPoliciesRequest, + dict, + ], +) +def test_aggregated_list_rest_call_success(request_type): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.NetworkFirewallPolicyAggregatedList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.NetworkFirewallPolicyAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.NetworkFirewallPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NetworkFirewallPoliciesRestInterceptor(), + ) + client = NetworkFirewallPoliciesClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_aggregated_list" + ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "pre_aggregated_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.AggregatedListNetworkFirewallPoliciesRequest.pb( + compute.AggregatedListNetworkFirewallPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.NetworkFirewallPolicyAggregatedList.to_json( + compute.NetworkFirewallPolicyAggregatedList() + ) + req.return_value.content = return_value + + request = compute.AggregatedListNetworkFirewallPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.NetworkFirewallPolicyAggregatedList() + post_with_metadata.return_value = ( + compute.NetworkFirewallPolicyAggregatedList(), + metadata, + ) + + client.aggregated_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() def test_clone_rules_rest_bad_request( @@ -6924,10 +7404,14 @@ def test_clone_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_clone_rules" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_clone_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_clone_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CloneRulesNetworkFirewallPolicyRequest.pb( compute.CloneRulesNetworkFirewallPolicyRequest() ) @@ -6951,6 +7435,7 @@ def test_clone_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.clone_rules( request, @@ -6962,6 +7447,7 @@ def test_clone_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -7088,10 +7574,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNetworkFirewallPolicyRequest.pb( compute.DeleteNetworkFirewallPolicyRequest() ) @@ -7115,6 +7604,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -7126,6 +7616,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNetworkFirewallPolicyRequest): @@ -7232,10 +7723,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNetworkFirewallPolicyRequest.pb( compute.GetNetworkFirewallPolicyRequest() ) @@ -7259,6 +7753,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicy() + post_with_metadata.return_value = compute.FirewallPolicy(), metadata client.get( request, @@ -7270,6 +7765,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_association_rest_bad_request( @@ -7362,10 +7858,14 @@ def test_get_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_get_association" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_get_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_get_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetAssociationNetworkFirewallPolicyRequest.pb( compute.GetAssociationNetworkFirewallPolicyRequest() ) @@ -7391,6 +7891,7 @@ def test_get_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyAssociation() + post_with_metadata.return_value = compute.FirewallPolicyAssociation(), metadata client.get_association( request, @@ -7402,6 +7903,7 @@ def test_get_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -7490,10 +7992,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyNetworkFirewallPolicyRequest.pb( compute.GetIamPolicyNetworkFirewallPolicyRequest() ) @@ -7517,6 +8023,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -7528,6 +8035,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_rest_bad_request( @@ -7636,10 +8144,13 @@ def test_get_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_get_rule" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_get_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRuleNetworkFirewallPolicyRequest.pb( compute.GetRuleNetworkFirewallPolicyRequest() ) @@ -7663,6 +8174,7 @@ def test_get_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyRule() + post_with_metadata.return_value = compute.FirewallPolicyRule(), metadata client.get_rule( request, @@ -7674,6 +8186,7 @@ def test_get_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -7959,10 +8472,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNetworkFirewallPolicyRequest.pb( compute.InsertNetworkFirewallPolicyRequest() ) @@ -7986,6 +8502,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -7997,6 +8514,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNetworkFirewallPoliciesRequest): @@ -8083,10 +8601,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkFirewallPoliciesRequest.pb( compute.ListNetworkFirewallPoliciesRequest() ) @@ -8110,6 +8631,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyList() + post_with_metadata.return_value = compute.FirewallPolicyList(), metadata client.list( request, @@ -8121,6 +8643,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchNetworkFirewallPolicyRequest): @@ -8404,10 +8927,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchNetworkFirewallPolicyRequest.pb( compute.PatchNetworkFirewallPolicyRequest() ) @@ -8431,6 +8957,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -8442,6 +8969,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rule_rest_bad_request( @@ -8696,10 +9224,14 @@ def test_patch_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_patch_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRuleNetworkFirewallPolicyRequest.pb( compute.PatchRuleNetworkFirewallPolicyRequest() ) @@ -8723,6 +9255,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_rule( request, @@ -8734,6 +9267,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_association_rest_bad_request( @@ -8860,10 +9394,14 @@ def test_remove_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_remove_association" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_remove_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_remove_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveAssociationNetworkFirewallPolicyRequest.pb( compute.RemoveAssociationNetworkFirewallPolicyRequest() ) @@ -8887,6 +9425,7 @@ def test_remove_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_association( request, @@ -8898,6 +9437,7 @@ def test_remove_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_rule_rest_bad_request( @@ -9024,10 +9564,14 @@ def test_remove_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_remove_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveRuleNetworkFirewallPolicyRequest.pb( compute.RemoveRuleNetworkFirewallPolicyRequest() ) @@ -9051,6 +9595,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_rule( request, @@ -9062,6 +9607,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -9266,10 +9812,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyNetworkFirewallPolicyRequest.pb( compute.SetIamPolicyNetworkFirewallPolicyRequest() ) @@ -9293,6 +9843,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -9304,6 +9855,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -9466,10 +10018,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.NetworkFirewallPoliciesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NetworkFirewallPoliciesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsNetworkFirewallPolicyRequest.pb( compute.TestIamPermissionsNetworkFirewallPolicyRequest() ) @@ -9495,6 +10051,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -9506,6 +10063,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): @@ -9555,6 +10113,26 @@ def test_add_rule_unary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_aggregated_list_empty_call_rest(): + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.aggregated_list), "__call__") as call: + client.aggregated_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.AggregatedListNetworkFirewallPoliciesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_clone_rules_unary_empty_call_rest(): @@ -9863,6 +10441,7 @@ def test_network_firewall_policies_base_transport(): methods = ( "add_association", "add_rule", + "aggregated_list", "clone_rules", "delete", "get", @@ -10020,6 +10599,9 @@ def test_network_firewall_policies_client_transport_session_collision(transport_ session1 = client1.transport.add_rule._session session2 = client2.transport.add_rule._session assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 session1 = client1.transport.clone_rules._session session2 = client2.transport.clone_rules._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_profiles.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_profiles.py index cef89ee04c6f..51ceb5180458 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_profiles.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_profiles.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -312,6 +319,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworkProfilesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworkProfilesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1535,10 +1585,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkProfilesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NetworkProfilesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkProfilesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNetworkProfileRequest.pb( compute.GetNetworkProfileRequest() ) @@ -1562,6 +1615,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkProfile() + post_with_metadata.return_value = compute.NetworkProfile(), metadata client.get( request, @@ -1573,6 +1627,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNetworkProfilesRequest): @@ -1665,10 +1720,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworkProfilesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NetworkProfilesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworkProfilesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkProfilesRequest.pb( compute.ListNetworkProfilesRequest() ) @@ -1694,6 +1752,10 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkProfilesListResponse() + post_with_metadata.return_value = ( + compute.NetworkProfilesListResponse(), + metadata, + ) client.list( request, @@ -1705,6 +1767,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py index 18c91606938f..d44bd59506b3 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.networks import NetworksClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -287,6 +294,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NetworksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NetworksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4911,10 +4961,13 @@ def test_add_peering_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_add_peering" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_add_peering_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_add_peering" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddPeeringNetworkRequest.pb( compute.AddPeeringNetworkRequest() ) @@ -4938,6 +4991,7 @@ def test_add_peering_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_peering( request, @@ -4949,6 +5003,7 @@ def test_add_peering_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteNetworkRequest): @@ -5071,10 +5126,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNetworkRequest.pb(compute.DeleteNetworkRequest()) transcode.return_value = { "method": "post", @@ -5096,6 +5154,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -5107,6 +5166,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNetworkRequest): @@ -5222,10 +5282,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNetworkRequest.pb(compute.GetNetworkRequest()) transcode.return_value = { "method": "post", @@ -5247,6 +5310,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Network() + post_with_metadata.return_value = compute.Network(), metadata client.get( request, @@ -5258,6 +5322,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_effective_firewalls_rest_bad_request( @@ -5337,10 +5402,13 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_get_effective_firewalls" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_get_effective_firewalls_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_get_effective_firewalls" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetEffectiveFirewallsNetworkRequest.pb( compute.GetEffectiveFirewallsNetworkRequest() ) @@ -5366,6 +5434,10 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworksGetEffectiveFirewallsResponse() + post_with_metadata.return_value = ( + compute.NetworksGetEffectiveFirewallsResponse(), + metadata, + ) client.get_effective_firewalls( request, @@ -5377,6 +5449,7 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertNetworkRequest): @@ -5451,6 +5524,8 @@ def test_insert_rest_call_success(request_type): "bgp_always_compare_med": True, "bgp_best_path_selection_mode": "bgp_best_path_selection_mode_value", "bgp_inter_region_cost": "bgp_inter_region_cost_value", + "effective_bgp_always_compare_med": True, + "effective_bgp_inter_region_cost": "effective_bgp_inter_region_cost_value", "routing_mode": "routing_mode_value", }, "self_link": "self_link_value", @@ -5607,10 +5682,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNetworkRequest.pb(compute.InsertNetworkRequest()) transcode.return_value = { "method": "post", @@ -5632,6 +5710,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5643,6 +5722,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNetworksRequest): @@ -5729,10 +5809,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworksRequest.pb(compute.ListNetworksRequest()) transcode.return_value = { "method": "post", @@ -5754,6 +5837,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkList() + post_with_metadata.return_value = compute.NetworkList(), metadata client.list( request, @@ -5765,6 +5849,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_peering_routes_rest_bad_request( @@ -5853,10 +5938,13 @@ def test_list_peering_routes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_list_peering_routes" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_list_peering_routes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_list_peering_routes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListPeeringRoutesNetworksRequest.pb( compute.ListPeeringRoutesNetworksRequest() ) @@ -5882,6 +5970,7 @@ def test_list_peering_routes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ExchangedPeeringRoutesList() + post_with_metadata.return_value = compute.ExchangedPeeringRoutesList(), metadata client.list_peering_routes( request, @@ -5893,6 +5982,7 @@ def test_list_peering_routes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchNetworkRequest): @@ -5967,6 +6057,8 @@ def test_patch_rest_call_success(request_type): "bgp_always_compare_med": True, "bgp_best_path_selection_mode": "bgp_best_path_selection_mode_value", "bgp_inter_region_cost": "bgp_inter_region_cost_value", + "effective_bgp_always_compare_med": True, + "effective_bgp_inter_region_cost": "effective_bgp_inter_region_cost_value", "routing_mode": "routing_mode_value", }, "self_link": "self_link_value", @@ -6123,10 +6215,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchNetworkRequest.pb(compute.PatchNetworkRequest()) transcode.return_value = { "method": "post", @@ -6148,6 +6243,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6159,6 +6255,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_peering_rest_bad_request( @@ -6364,10 +6461,13 @@ def test_remove_peering_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_remove_peering" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_remove_peering_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_remove_peering" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemovePeeringNetworkRequest.pb( compute.RemovePeeringNetworkRequest() ) @@ -6391,6 +6491,7 @@ def test_remove_peering_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_peering( request, @@ -6402,6 +6503,7 @@ def test_remove_peering_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_switch_to_custom_mode_rest_bad_request( @@ -6526,10 +6628,13 @@ def test_switch_to_custom_mode_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_switch_to_custom_mode" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_switch_to_custom_mode_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_switch_to_custom_mode" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SwitchToCustomModeNetworkRequest.pb( compute.SwitchToCustomModeNetworkRequest() ) @@ -6553,6 +6658,7 @@ def test_switch_to_custom_mode_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.switch_to_custom_mode( request, @@ -6564,6 +6670,7 @@ def test_switch_to_custom_mode_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_peering_rest_bad_request( @@ -6784,10 +6891,13 @@ def test_update_peering_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NetworksRestInterceptor, "post_update_peering" ) as post, mock.patch.object( + transports.NetworksRestInterceptor, "post_update_peering_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NetworksRestInterceptor, "pre_update_peering" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdatePeeringNetworkRequest.pb( compute.UpdatePeeringNetworkRequest() ) @@ -6811,6 +6921,7 @@ def test_update_peering_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_peering( request, @@ -6822,6 +6933,7 @@ def test_update_peering_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py index 68be93a84380..dd10bdf9bd3f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -296,6 +303,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NodeGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NodeGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6314,10 +6364,13 @@ def test_add_nodes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_add_nodes" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_add_nodes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_add_nodes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddNodesNodeGroupRequest.pb( compute.AddNodesNodeGroupRequest() ) @@ -6341,6 +6394,7 @@ def test_add_nodes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_nodes( request, @@ -6352,6 +6406,7 @@ def test_add_nodes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -6444,10 +6499,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListNodeGroupsRequest.pb( compute.AggregatedListNodeGroupsRequest() ) @@ -6473,6 +6531,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeGroupAggregatedList() + post_with_metadata.return_value = compute.NodeGroupAggregatedList(), metadata client.aggregated_list( request, @@ -6484,6 +6543,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteNodeGroupRequest): @@ -6608,10 +6668,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNodeGroupRequest.pb(compute.DeleteNodeGroupRequest()) transcode.return_value = { "method": "post", @@ -6633,6 +6696,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -6644,6 +6708,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_nodes_rest_bad_request( @@ -6853,10 +6918,13 @@ def test_delete_nodes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_delete_nodes" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_delete_nodes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_delete_nodes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNodesNodeGroupRequest.pb( compute.DeleteNodesNodeGroupRequest() ) @@ -6880,6 +6948,7 @@ def test_delete_nodes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_nodes( request, @@ -6891,6 +6960,7 @@ def test_delete_nodes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNodeGroupRequest): @@ -6999,10 +7069,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNodeGroupRequest.pb(compute.GetNodeGroupRequest()) transcode.return_value = { "method": "post", @@ -7024,6 +7097,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeGroup() + post_with_metadata.return_value = compute.NodeGroup(), metadata client.get( request, @@ -7035,6 +7109,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -7123,10 +7198,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyNodeGroupRequest.pb( compute.GetIamPolicyNodeGroupRequest() ) @@ -7150,6 +7228,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -7161,6 +7240,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertNodeGroupRequest): @@ -7378,10 +7458,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNodeGroupRequest.pb(compute.InsertNodeGroupRequest()) transcode.return_value = { "method": "post", @@ -7403,6 +7486,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -7414,6 +7498,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNodeGroupsRequest): @@ -7502,10 +7587,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNodeGroupsRequest.pb(compute.ListNodeGroupsRequest()) transcode.return_value = { "method": "post", @@ -7527,6 +7615,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeGroupList() + post_with_metadata.return_value = compute.NodeGroupList(), metadata client.list( request, @@ -7538,6 +7627,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_nodes_rest_bad_request(request_type=compute.ListNodesNodeGroupsRequest): @@ -7626,10 +7716,13 @@ def test_list_nodes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_list_nodes" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_list_nodes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_list_nodes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNodesNodeGroupsRequest.pb( compute.ListNodesNodeGroupsRequest() ) @@ -7655,6 +7748,7 @@ def test_list_nodes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeGroupsListNodes() + post_with_metadata.return_value = compute.NodeGroupsListNodes(), metadata client.list_nodes( request, @@ -7666,6 +7760,7 @@ def test_list_nodes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchNodeGroupRequest): @@ -7883,10 +7978,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchNodeGroupRequest.pb(compute.PatchNodeGroupRequest()) transcode.return_value = { "method": "post", @@ -7908,6 +8006,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -7919,6 +8018,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_perform_maintenance_rest_bad_request( @@ -8131,10 +8231,13 @@ def test_perform_maintenance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_perform_maintenance" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_perform_maintenance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_perform_maintenance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PerformMaintenanceNodeGroupRequest.pb( compute.PerformMaintenanceNodeGroupRequest() ) @@ -8158,6 +8261,7 @@ def test_perform_maintenance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.perform_maintenance( request, @@ -8169,6 +8273,7 @@ def test_perform_maintenance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -8373,10 +8478,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyNodeGroupRequest.pb( compute.SetIamPolicyNodeGroupRequest() ) @@ -8400,6 +8508,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -8411,6 +8520,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_node_template_rest_bad_request( @@ -8622,10 +8732,13 @@ def test_set_node_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_set_node_template" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_set_node_template_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_set_node_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetNodeTemplateNodeGroupRequest.pb( compute.SetNodeTemplateNodeGroupRequest() ) @@ -8649,6 +8762,7 @@ def test_set_node_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_node_template( request, @@ -8660,6 +8774,7 @@ def test_set_node_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_simulate_maintenance_event_rest_bad_request( @@ -8871,10 +8986,14 @@ def test_simulate_maintenance_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_simulate_maintenance_event" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, + "post_simulate_maintenance_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_simulate_maintenance_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SimulateMaintenanceEventNodeGroupRequest.pb( compute.SimulateMaintenanceEventNodeGroupRequest() ) @@ -8898,6 +9017,7 @@ def test_simulate_maintenance_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.simulate_maintenance_event( request, @@ -8909,6 +9029,7 @@ def test_simulate_maintenance_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -9071,10 +9192,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeGroupsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsNodeGroupRequest.pb( compute.TestIamPermissionsNodeGroupRequest() ) @@ -9100,6 +9224,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -9111,6 +9236,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py index 7e3af94e8164..73e34e42334c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -315,6 +322,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NodeTemplatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NodeTemplatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3275,10 +3325,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListNodeTemplatesRequest.pb( compute.AggregatedListNodeTemplatesRequest() ) @@ -3304,6 +3357,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeTemplateAggregatedList() + post_with_metadata.return_value = compute.NodeTemplateAggregatedList(), metadata client.aggregated_list( request, @@ -3315,6 +3369,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteNodeTemplateRequest): @@ -3447,10 +3502,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteNodeTemplateRequest.pb( compute.DeleteNodeTemplateRequest() ) @@ -3474,6 +3532,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3485,6 +3544,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNodeTemplateRequest): @@ -3595,10 +3655,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNodeTemplateRequest.pb(compute.GetNodeTemplateRequest()) transcode.return_value = { "method": "post", @@ -3620,6 +3683,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeTemplate() + post_with_metadata.return_value = compute.NodeTemplate(), metadata client.get( request, @@ -3631,6 +3695,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3719,10 +3784,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyNodeTemplateRequest.pb( compute.GetIamPolicyNodeTemplateRequest() ) @@ -3746,6 +3814,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -3757,6 +3826,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertNodeTemplateRequest): @@ -3976,10 +4046,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertNodeTemplateRequest.pb( compute.InsertNodeTemplateRequest() ) @@ -4003,6 +4076,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4014,6 +4088,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNodeTemplatesRequest): @@ -4102,10 +4177,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNodeTemplatesRequest.pb( compute.ListNodeTemplatesRequest() ) @@ -4129,6 +4207,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeTemplateList() + post_with_metadata.return_value = compute.NodeTemplateList(), metadata client.list( request, @@ -4140,6 +4219,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -4344,10 +4424,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyNodeTemplateRequest.pb( compute.SetIamPolicyNodeTemplateRequest() ) @@ -4371,6 +4454,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -4382,6 +4466,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4544,10 +4629,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTemplatesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.NodeTemplatesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NodeTemplatesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsNodeTemplateRequest.pb( compute.TestIamPermissionsNodeTemplateRequest() ) @@ -4573,6 +4662,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4584,6 +4674,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py index defaf1b6df74..20d947629a32 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -287,6 +294,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NodeTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NodeTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1781,10 +1831,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTypesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.NodeTypesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTypesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListNodeTypesRequest.pb( compute.AggregatedListNodeTypesRequest() ) @@ -1810,6 +1863,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeTypeAggregatedList() + post_with_metadata.return_value = compute.NodeTypeAggregatedList(), metadata client.aggregated_list( request, @@ -1821,6 +1875,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetNodeTypeRequest): @@ -1921,10 +1976,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTypesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.NodeTypesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTypesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNodeTypeRequest.pb(compute.GetNodeTypeRequest()) transcode.return_value = { "method": "post", @@ -1946,6 +2004,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeType() + post_with_metadata.return_value = compute.NodeType(), metadata client.get( request, @@ -1957,6 +2016,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListNodeTypesRequest): @@ -2043,10 +2103,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeTypesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.NodeTypesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.NodeTypesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNodeTypesRequest.pb(compute.ListNodeTypesRequest()) transcode.return_value = { "method": "post", @@ -2068,6 +2131,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NodeTypeList() + post_with_metadata.return_value = compute.NodeTypeList(), metadata client.list( request, @@ -2079,6 +2143,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py index f7803ca9cf6a..1e03ef3326b9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PacketMirroringsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PacketMirroringsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3331,10 +3381,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListPacketMirroringsRequest.pb( compute.AggregatedListPacketMirroringsRequest() ) @@ -3360,6 +3413,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PacketMirroringAggregatedList() + post_with_metadata.return_value = ( + compute.PacketMirroringAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3371,6 +3428,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeletePacketMirroringRequest): @@ -3503,10 +3561,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeletePacketMirroringRequest.pb( compute.DeletePacketMirroringRequest() ) @@ -3530,6 +3591,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3541,6 +3603,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetPacketMirroringRequest): @@ -3647,10 +3710,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetPacketMirroringRequest.pb( compute.GetPacketMirroringRequest() ) @@ -3674,6 +3740,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PacketMirroring() + post_with_metadata.return_value = compute.PacketMirroring(), metadata client.get( request, @@ -3685,6 +3752,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertPacketMirroringRequest): @@ -3907,10 +3975,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertPacketMirroringRequest.pb( compute.InsertPacketMirroringRequest() ) @@ -3934,6 +4005,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3945,6 +4017,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListPacketMirroringsRequest): @@ -4033,10 +4106,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListPacketMirroringsRequest.pb( compute.ListPacketMirroringsRequest() ) @@ -4062,6 +4138,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PacketMirroringList() + post_with_metadata.return_value = compute.PacketMirroringList(), metadata client.list( request, @@ -4073,6 +4150,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchPacketMirroringRequest): @@ -4303,10 +4381,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchPacketMirroringRequest.pb( compute.PatchPacketMirroringRequest() ) @@ -4330,6 +4411,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4341,6 +4423,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4503,10 +4586,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PacketMirroringsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.PacketMirroringsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PacketMirroringsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsPacketMirroringRequest.pb( compute.TestIamPermissionsPacketMirroringRequest() ) @@ -4532,6 +4619,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4543,6 +4631,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py index 8709306363ab..307ce38f7c2f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.projects import ProjectsClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -287,6 +294,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ProjectsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ProjectsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5918,10 +5968,13 @@ def test_disable_xpn_host_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_disable_xpn_host" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_disable_xpn_host_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_disable_xpn_host" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DisableXpnHostProjectRequest.pb( compute.DisableXpnHostProjectRequest() ) @@ -5945,6 +5998,7 @@ def test_disable_xpn_host_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.disable_xpn_host( request, @@ -5956,6 +6010,7 @@ def test_disable_xpn_host_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_xpn_resource_rest_bad_request( @@ -6165,10 +6220,13 @@ def test_disable_xpn_resource_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_disable_xpn_resource" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_disable_xpn_resource_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_disable_xpn_resource" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DisableXpnResourceProjectRequest.pb( compute.DisableXpnResourceProjectRequest() ) @@ -6192,6 +6250,7 @@ def test_disable_xpn_resource_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.disable_xpn_resource( request, @@ -6203,6 +6262,7 @@ def test_disable_xpn_resource_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_xpn_host_rest_bad_request( @@ -6327,10 +6387,13 @@ def test_enable_xpn_host_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_enable_xpn_host" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_enable_xpn_host_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_enable_xpn_host" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.EnableXpnHostProjectRequest.pb( compute.EnableXpnHostProjectRequest() ) @@ -6354,6 +6417,7 @@ def test_enable_xpn_host_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.enable_xpn_host( request, @@ -6365,6 +6429,7 @@ def test_enable_xpn_host_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_xpn_resource_rest_bad_request( @@ -6574,10 +6639,13 @@ def test_enable_xpn_resource_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_enable_xpn_resource" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_enable_xpn_resource_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_enable_xpn_resource" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.EnableXpnResourceProjectRequest.pb( compute.EnableXpnResourceProjectRequest() ) @@ -6601,6 +6669,7 @@ def test_enable_xpn_resource_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.enable_xpn_resource( request, @@ -6612,6 +6681,7 @@ def test_enable_xpn_resource_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetProjectRequest): @@ -6714,10 +6784,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetProjectRequest.pb(compute.GetProjectRequest()) transcode.return_value = { "method": "post", @@ -6739,6 +6812,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Project() + post_with_metadata.return_value = compute.Project(), metadata client.get( request, @@ -6750,6 +6824,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_xpn_host_rest_bad_request(request_type=compute.GetXpnHostProjectRequest): @@ -6852,10 +6927,13 @@ def test_get_xpn_host_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_get_xpn_host" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_get_xpn_host_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_get_xpn_host" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetXpnHostProjectRequest.pb( compute.GetXpnHostProjectRequest() ) @@ -6879,6 +6957,7 @@ def test_get_xpn_host_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Project() + post_with_metadata.return_value = compute.Project(), metadata client.get_xpn_host( request, @@ -6890,6 +6969,7 @@ def test_get_xpn_host_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_xpn_resources_rest_bad_request( @@ -6974,10 +7054,13 @@ def test_get_xpn_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_get_xpn_resources" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_get_xpn_resources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_get_xpn_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetXpnResourcesProjectsRequest.pb( compute.GetXpnResourcesProjectsRequest() ) @@ -7003,6 +7086,7 @@ def test_get_xpn_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ProjectsGetXpnResources() + post_with_metadata.return_value = compute.ProjectsGetXpnResources(), metadata client.get_xpn_resources( request, @@ -7014,6 +7098,7 @@ def test_get_xpn_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_xpn_hosts_rest_bad_request( @@ -7185,10 +7270,13 @@ def test_list_xpn_hosts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_list_xpn_hosts" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_list_xpn_hosts_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_list_xpn_hosts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListXpnHostsProjectsRequest.pb( compute.ListXpnHostsProjectsRequest() ) @@ -7212,6 +7300,7 @@ def test_list_xpn_hosts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.XpnHostList() + post_with_metadata.return_value = compute.XpnHostList(), metadata client.list_xpn_hosts( request, @@ -7223,6 +7312,7 @@ def test_list_xpn_hosts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_disk_rest_bad_request(request_type=compute.MoveDiskProjectRequest): @@ -7422,10 +7512,13 @@ def test_move_disk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_move_disk" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_move_disk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_move_disk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.MoveDiskProjectRequest.pb(compute.MoveDiskProjectRequest()) transcode.return_value = { "method": "post", @@ -7447,6 +7540,7 @@ def test_move_disk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.move_disk( request, @@ -7458,6 +7552,7 @@ def test_move_disk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_instance_rest_bad_request( @@ -7661,10 +7756,13 @@ def test_move_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_move_instance" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_move_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_move_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.MoveInstanceProjectRequest.pb( compute.MoveInstanceProjectRequest() ) @@ -7688,6 +7786,7 @@ def test_move_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.move_instance( request, @@ -7699,6 +7798,7 @@ def test_move_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_cloud_armor_tier_rest_bad_request( @@ -7908,10 +8008,13 @@ def test_set_cloud_armor_tier_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_set_cloud_armor_tier" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_set_cloud_armor_tier_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_set_cloud_armor_tier" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetCloudArmorTierProjectRequest.pb( compute.SetCloudArmorTierProjectRequest() ) @@ -7935,6 +8038,7 @@ def test_set_cloud_armor_tier_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_cloud_armor_tier( request, @@ -7946,6 +8050,7 @@ def test_set_cloud_armor_tier_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_common_instance_metadata_rest_bad_request( @@ -8144,10 +8249,14 @@ def test_set_common_instance_metadata_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_set_common_instance_metadata" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, + "post_set_common_instance_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_set_common_instance_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetCommonInstanceMetadataProjectRequest.pb( compute.SetCommonInstanceMetadataProjectRequest() ) @@ -8171,6 +8280,7 @@ def test_set_common_instance_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_common_instance_metadata( request, @@ -8182,6 +8292,7 @@ def test_set_common_instance_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_default_network_tier_rest_bad_request( @@ -8391,10 +8502,14 @@ def test_set_default_network_tier_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_set_default_network_tier" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, + "post_set_default_network_tier_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_set_default_network_tier" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetDefaultNetworkTierProjectRequest.pb( compute.SetDefaultNetworkTierProjectRequest() ) @@ -8418,6 +8533,7 @@ def test_set_default_network_tier_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_default_network_tier( request, @@ -8429,6 +8545,7 @@ def test_set_default_network_tier_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_usage_export_bucket_rest_bad_request( @@ -8632,10 +8749,13 @@ def test_set_usage_export_bucket_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectsRestInterceptor, "post_set_usage_export_bucket" ) as post, mock.patch.object( + transports.ProjectsRestInterceptor, "post_set_usage_export_bucket_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectsRestInterceptor, "pre_set_usage_export_bucket" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetUsageExportBucketProjectRequest.pb( compute.SetUsageExportBucketProjectRequest() ) @@ -8659,6 +8779,7 @@ def test_set_usage_export_bucket_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_usage_export_bucket( request, @@ -8670,6 +8791,7 @@ def test_set_usage_export_bucket_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py index 0b8bc67f75c3..ba7961bc0225 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PublicAdvertisedPrefixesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PublicAdvertisedPrefixesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3628,10 +3678,14 @@ def test_announce_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_announce" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, + "post_announce_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_announce" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AnnouncePublicAdvertisedPrefixeRequest.pb( compute.AnnouncePublicAdvertisedPrefixeRequest() ) @@ -3655,6 +3709,7 @@ def test_announce_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.announce( request, @@ -3666,6 +3721,7 @@ def test_announce_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -3792,10 +3848,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeletePublicAdvertisedPrefixeRequest.pb( compute.DeletePublicAdvertisedPrefixeRequest() ) @@ -3819,6 +3878,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3830,6 +3890,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetPublicAdvertisedPrefixeRequest): @@ -3936,10 +3997,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetPublicAdvertisedPrefixeRequest.pb( compute.GetPublicAdvertisedPrefixeRequest() ) @@ -3965,6 +4029,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicAdvertisedPrefix() + post_with_metadata.return_value = compute.PublicAdvertisedPrefix(), metadata client.get( request, @@ -3976,6 +4041,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4201,10 +4267,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertPublicAdvertisedPrefixeRequest.pb( compute.InsertPublicAdvertisedPrefixeRequest() ) @@ -4228,6 +4297,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4239,6 +4309,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -4329,10 +4400,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListPublicAdvertisedPrefixesRequest.pb( compute.ListPublicAdvertisedPrefixesRequest() ) @@ -4358,6 +4432,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicAdvertisedPrefixList() + post_with_metadata.return_value = compute.PublicAdvertisedPrefixList(), metadata client.list( request, @@ -4369,6 +4444,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -4594,10 +4670,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchPublicAdvertisedPrefixeRequest.pb( compute.PatchPublicAdvertisedPrefixeRequest() ) @@ -4621,6 +4700,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4632,6 +4712,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_withdraw_rest_bad_request( @@ -4758,10 +4839,14 @@ def test_withdraw_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "post_withdraw" ) as post, mock.patch.object( + transports.PublicAdvertisedPrefixesRestInterceptor, + "post_withdraw_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PublicAdvertisedPrefixesRestInterceptor, "pre_withdraw" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.WithdrawPublicAdvertisedPrefixeRequest.pb( compute.WithdrawPublicAdvertisedPrefixeRequest() ) @@ -4785,6 +4870,7 @@ def test_withdraw_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.withdraw( request, @@ -4796,6 +4882,7 @@ def test_withdraw_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py index 82c3569094ad..278ae4aba242 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PublicDelegatedPrefixesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PublicDelegatedPrefixesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3972,10 +4022,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListPublicDelegatedPrefixesRequest.pb( compute.AggregatedListPublicDelegatedPrefixesRequest() ) @@ -4001,6 +4055,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicDelegatedPrefixAggregatedList() + post_with_metadata.return_value = ( + compute.PublicDelegatedPrefixAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -4012,6 +4070,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_announce_rest_bad_request( @@ -4146,10 +4205,13 @@ def test_announce_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_announce" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_announce_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_announce" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AnnouncePublicDelegatedPrefixeRequest.pb( compute.AnnouncePublicDelegatedPrefixeRequest() ) @@ -4173,6 +4235,7 @@ def test_announce_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.announce( request, @@ -4184,6 +4247,7 @@ def test_announce_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -4318,10 +4382,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeletePublicDelegatedPrefixeRequest.pb( compute.DeletePublicDelegatedPrefixeRequest() ) @@ -4345,6 +4412,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4356,6 +4424,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetPublicDelegatedPrefixeRequest): @@ -4474,10 +4543,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetPublicDelegatedPrefixeRequest.pb( compute.GetPublicDelegatedPrefixeRequest() ) @@ -4503,6 +4575,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicDelegatedPrefix() + post_with_metadata.return_value = compute.PublicDelegatedPrefix(), metadata client.get( request, @@ -4514,6 +4587,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4745,10 +4819,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertPublicDelegatedPrefixeRequest.pb( compute.InsertPublicDelegatedPrefixeRequest() ) @@ -4772,6 +4849,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4783,6 +4861,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListPublicDelegatedPrefixesRequest): @@ -4871,10 +4950,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListPublicDelegatedPrefixesRequest.pb( compute.ListPublicDelegatedPrefixesRequest() ) @@ -4900,6 +4982,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.PublicDelegatedPrefixList() + post_with_metadata.return_value = compute.PublicDelegatedPrefixList(), metadata client.list( request, @@ -4911,6 +4994,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -5150,10 +5234,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchPublicDelegatedPrefixeRequest.pb( compute.PatchPublicDelegatedPrefixeRequest() ) @@ -5177,6 +5264,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -5188,6 +5276,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_withdraw_rest_bad_request( @@ -5322,10 +5411,13 @@ def test_withdraw_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "post_withdraw" ) as post, mock.patch.object( + transports.PublicDelegatedPrefixesRestInterceptor, "post_withdraw_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublicDelegatedPrefixesRestInterceptor, "pre_withdraw" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.WithdrawPublicDelegatedPrefixeRequest.pb( compute.WithdrawPublicDelegatedPrefixeRequest() ) @@ -5349,6 +5441,7 @@ def test_withdraw_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.withdraw( request, @@ -5360,6 +5453,7 @@ def test_withdraw_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py index 0e036605b668..2e1b2f7edc0d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionAutoscalersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionAutoscalersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3296,10 +3346,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionAutoscalersRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionAutoscalerRequest.pb( compute.DeleteRegionAutoscalerRequest() ) @@ -3323,6 +3376,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3334,6 +3388,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionAutoscalerRequest): @@ -3436,10 +3491,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionAutoscalersRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionAutoscalerRequest.pb( compute.GetRegionAutoscalerRequest() ) @@ -3463,6 +3521,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Autoscaler() + post_with_metadata.return_value = compute.Autoscaler(), metadata client.get( request, @@ -3474,6 +3533,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRegionAutoscalerRequest): @@ -3711,10 +3771,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionAutoscalersRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionAutoscalerRequest.pb( compute.InsertRegionAutoscalerRequest() ) @@ -3738,6 +3801,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3749,6 +3813,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionAutoscalersRequest): @@ -3837,10 +3902,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionAutoscalersRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionAutoscalersRequest.pb( compute.ListRegionAutoscalersRequest() ) @@ -3866,6 +3934,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionAutoscalerList() + post_with_metadata.return_value = compute.RegionAutoscalerList(), metadata client.list( request, @@ -3877,6 +3946,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRegionAutoscalerRequest): @@ -4112,10 +4182,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionAutoscalersRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionAutoscalerRequest.pb( compute.PatchRegionAutoscalerRequest() ) @@ -4139,6 +4212,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4150,6 +4224,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateRegionAutoscalerRequest): @@ -4387,10 +4462,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RegionAutoscalersRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionAutoscalersRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRegionAutoscalerRequest.pb( compute.UpdateRegionAutoscalerRequest() ) @@ -4414,6 +4492,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -4425,6 +4504,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py index 197baeef6c17..8f28aa7c667a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionBackendServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionBackendServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4885,10 +4935,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionBackendServiceRequest.pb( compute.DeleteRegionBackendServiceRequest() ) @@ -4912,6 +4965,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4923,6 +4977,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionBackendServiceRequest): @@ -5065,10 +5120,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionBackendServiceRequest.pb( compute.GetRegionBackendServiceRequest() ) @@ -5092,6 +5150,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendService() + post_with_metadata.return_value = compute.BackendService(), metadata client.get( request, @@ -5103,6 +5162,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_health_rest_bad_request( @@ -5271,10 +5331,13 @@ def test_get_health_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_get_health" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_get_health_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_get_health" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetHealthRegionBackendServiceRequest.pb( compute.GetHealthRegionBackendServiceRequest() ) @@ -5300,6 +5363,7 @@ def test_get_health_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceGroupHealth() + post_with_metadata.return_value = compute.BackendServiceGroupHealth(), metadata client.get_health( request, @@ -5311,6 +5375,7 @@ def test_get_health_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -5399,10 +5464,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyRegionBackendServiceRequest.pb( compute.GetIamPolicyRegionBackendServiceRequest() ) @@ -5426,6 +5495,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -5437,6 +5507,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -5804,10 +5875,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionBackendServiceRequest.pb( compute.InsertRegionBackendServiceRequest() ) @@ -5831,6 +5905,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5842,6 +5917,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionBackendServicesRequest): @@ -5930,10 +6006,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionBackendServicesRequest.pb( compute.ListRegionBackendServicesRequest() ) @@ -5957,6 +6036,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceList() + post_with_metadata.return_value = compute.BackendServiceList(), metadata client.list( request, @@ -5968,6 +6048,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_usable_rest_bad_request( @@ -6058,10 +6139,14 @@ def test_list_usable_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_list_usable" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, + "post_list_usable_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_list_usable" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListUsableRegionBackendServicesRequest.pb( compute.ListUsableRegionBackendServicesRequest() ) @@ -6087,6 +6172,7 @@ def test_list_usable_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.BackendServiceListUsable() + post_with_metadata.return_value = compute.BackendServiceListUsable(), metadata client.list_usable( request, @@ -6098,6 +6184,7 @@ def test_list_usable_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRegionBackendServiceRequest): @@ -6471,10 +6558,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionBackendServiceRequest.pb( compute.PatchRegionBackendServiceRequest() ) @@ -6498,6 +6588,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6509,6 +6600,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -6713,10 +6805,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyRegionBackendServiceRequest.pb( compute.SetIamPolicyRegionBackendServiceRequest() ) @@ -6740,6 +6836,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -6751,6 +6848,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_security_policy_rest_bad_request( @@ -6963,10 +7061,14 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, + "post_set_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSecurityPolicyRegionBackendServiceRequest.pb( compute.SetSecurityPolicyRegionBackendServiceRequest() ) @@ -6990,6 +7092,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_security_policy( request, @@ -7001,6 +7104,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -7163,10 +7267,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsRegionBackendServiceRequest.pb( compute.TestIamPermissionsRegionBackendServiceRequest() ) @@ -7192,6 +7300,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -7203,6 +7312,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request( @@ -7578,10 +7688,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RegionBackendServicesRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionBackendServicesRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRegionBackendServiceRequest.pb( compute.UpdateRegionBackendServiceRequest() ) @@ -7605,6 +7718,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -7616,6 +7730,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py index d75aa1635141..485220a1da92 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionCommitmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionCommitmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2702,10 +2752,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.RegionCommitmentsRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListRegionCommitmentsRequest.pb( compute.AggregatedListRegionCommitmentsRequest() ) @@ -2731,6 +2785,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.CommitmentAggregatedList() + post_with_metadata.return_value = compute.CommitmentAggregatedList(), metadata client.aggregated_list( request, @@ -2742,6 +2797,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionCommitmentRequest): @@ -2860,10 +2916,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionCommitmentsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionCommitmentRequest.pb( compute.GetRegionCommitmentRequest() ) @@ -2887,6 +2946,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Commitment() + post_with_metadata.return_value = compute.Commitment(), metadata client.get( request, @@ -2898,6 +2958,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRegionCommitmentRequest): @@ -3188,10 +3249,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionCommitmentsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionCommitmentRequest.pb( compute.InsertRegionCommitmentRequest() ) @@ -3215,6 +3279,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3226,6 +3291,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionCommitmentsRequest): @@ -3314,10 +3380,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionCommitmentsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionCommitmentsRequest.pb( compute.ListRegionCommitmentsRequest() ) @@ -3341,6 +3410,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.CommitmentList() + post_with_metadata.return_value = compute.CommitmentList(), metadata client.list( request, @@ -3352,6 +3422,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateRegionCommitmentRequest): @@ -3642,10 +3713,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RegionCommitmentsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionCommitmentsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRegionCommitmentRequest.pb( compute.UpdateRegionCommitmentRequest() ) @@ -3669,6 +3743,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -3680,6 +3755,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py index a96d22545472..b0d1baab2ef7 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -312,6 +319,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionDiskTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionDiskTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1559,10 +1609,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDiskTypesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionDiskTypesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDiskTypesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionDiskTypeRequest.pb( compute.GetRegionDiskTypeRequest() ) @@ -1586,6 +1639,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskType() + post_with_metadata.return_value = compute.DiskType(), metadata client.get( request, @@ -1597,6 +1651,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionDiskTypesRequest): @@ -1685,10 +1740,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDiskTypesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionDiskTypesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDiskTypesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionDiskTypesRequest.pb( compute.ListRegionDiskTypesRequest() ) @@ -1712,6 +1770,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionDiskTypeList() + post_with_metadata.return_value = compute.RegionDiskTypeList(), metadata client.list( request, @@ -1723,6 +1782,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py index f41249aef4d7..d142bfd0035a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionDisksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionDisksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7379,10 +7429,14 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_add_resource_policies" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, + "post_add_resource_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_add_resource_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddResourcePoliciesRegionDiskRequest.pb( compute.AddResourcePoliciesRegionDiskRequest() ) @@ -7406,6 +7460,7 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_resource_policies( request, @@ -7417,6 +7472,7 @@ def test_add_resource_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_insert_rest_bad_request(request_type=compute.BulkInsertRegionDiskRequest): @@ -7619,10 +7675,13 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_bulk_insert" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_bulk_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_bulk_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.BulkInsertRegionDiskRequest.pb( compute.BulkInsertRegionDiskRequest() ) @@ -7646,6 +7705,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.bulk_insert( request, @@ -7657,6 +7717,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_snapshot_rest_bad_request( @@ -7896,10 +7957,13 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_create_snapshot" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_create_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_create_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CreateSnapshotRegionDiskRequest.pb( compute.CreateSnapshotRegionDiskRequest() ) @@ -7923,6 +7987,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.create_snapshot( request, @@ -7934,6 +7999,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteRegionDiskRequest): @@ -8058,10 +8124,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionDiskRequest.pb( compute.DeleteRegionDiskRequest() ) @@ -8085,6 +8154,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -8096,6 +8166,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionDiskRequest): @@ -8264,10 +8335,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionDiskRequest.pb(compute.GetRegionDiskRequest()) transcode.return_value = { "method": "post", @@ -8289,6 +8363,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Disk() + post_with_metadata.return_value = compute.Disk(), metadata client.get( request, @@ -8300,6 +8375,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -8388,10 +8464,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyRegionDiskRequest.pb( compute.GetIamPolicyRegionDiskRequest() ) @@ -8415,6 +8494,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -8426,6 +8506,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRegionDiskRequest): @@ -8683,10 +8764,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionDiskRequest.pb( compute.InsertRegionDiskRequest() ) @@ -8710,6 +8794,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -8721,6 +8806,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionDisksRequest): @@ -8809,10 +8895,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionDisksRequest.pb(compute.ListRegionDisksRequest()) transcode.return_value = { "method": "post", @@ -8834,6 +8923,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DiskList() + post_with_metadata.return_value = compute.DiskList(), metadata client.list( request, @@ -8845,6 +8935,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_resource_policies_rest_bad_request( @@ -9056,10 +9147,14 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_remove_resource_policies" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, + "post_remove_resource_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_remove_resource_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveResourcePoliciesRegionDiskRequest.pb( compute.RemoveResourcePoliciesRegionDiskRequest() ) @@ -9083,6 +9178,7 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_resource_policies( request, @@ -9094,6 +9190,7 @@ def test_remove_resource_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_rest_bad_request(request_type=compute.ResizeRegionDiskRequest): @@ -9296,10 +9393,13 @@ def test_resize_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_resize" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_resize_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_resize" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResizeRegionDiskRequest.pb( compute.ResizeRegionDiskRequest() ) @@ -9323,6 +9423,7 @@ def test_resize_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resize( request, @@ -9334,6 +9435,7 @@ def test_resize_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -9538,10 +9640,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyRegionDiskRequest.pb( compute.SetIamPolicyRegionDiskRequest() ) @@ -9565,6 +9670,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -9576,6 +9682,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsRegionDiskRequest): @@ -9779,10 +9886,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsRegionDiskRequest.pb( compute.SetLabelsRegionDiskRequest() ) @@ -9806,6 +9916,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -9817,6 +9928,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_async_replication_rest_bad_request( @@ -10028,10 +10140,14 @@ def test_start_async_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_start_async_replication" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, + "post_start_async_replication_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_start_async_replication" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StartAsyncReplicationRegionDiskRequest.pb( compute.StartAsyncReplicationRegionDiskRequest() ) @@ -10055,6 +10171,7 @@ def test_start_async_replication_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.start_async_replication( request, @@ -10066,6 +10183,7 @@ def test_start_async_replication_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_async_replication_rest_bad_request( @@ -10192,10 +10310,14 @@ def test_stop_async_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_stop_async_replication" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, + "post_stop_async_replication_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_stop_async_replication" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopAsyncReplicationRegionDiskRequest.pb( compute.StopAsyncReplicationRegionDiskRequest() ) @@ -10219,6 +10341,7 @@ def test_stop_async_replication_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop_async_replication( request, @@ -10230,6 +10353,7 @@ def test_stop_async_replication_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_group_async_replication_rest_bad_request( @@ -10441,10 +10565,14 @@ def test_stop_group_async_replication_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_stop_group_async_replication" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, + "post_stop_group_async_replication_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_stop_group_async_replication" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopGroupAsyncReplicationRegionDiskRequest.pb( compute.StopGroupAsyncReplicationRegionDiskRequest() ) @@ -10468,6 +10596,7 @@ def test_stop_group_async_replication_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop_group_async_replication( request, @@ -10479,6 +10608,7 @@ def test_stop_group_async_replication_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -10641,10 +10771,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsRegionDiskRequest.pb( compute.TestIamPermissionsRegionDiskRequest() ) @@ -10670,6 +10803,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -10681,6 +10815,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateRegionDiskRequest): @@ -10938,10 +11073,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionDisksRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RegionDisksRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionDisksRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRegionDiskRequest.pb( compute.UpdateRegionDiskRequest() ) @@ -10965,6 +11103,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -10976,6 +11115,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py index 61243c2c0224..8e8f7dd18ac4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionHealthCheckServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionHealthCheckServicesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2910,10 +2960,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionHealthCheckServicesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionHealthCheckServiceRequest.pb( compute.DeleteRegionHealthCheckServiceRequest() ) @@ -2937,6 +2990,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2948,6 +3002,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionHealthCheckServiceRequest): @@ -3063,10 +3118,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionHealthCheckServicesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionHealthCheckServiceRequest.pb( compute.GetRegionHealthCheckServiceRequest() ) @@ -3090,6 +3148,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthCheckService() + post_with_metadata.return_value = compute.HealthCheckService(), metadata client.get( request, @@ -3101,6 +3160,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -3322,10 +3382,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionHealthCheckServicesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionHealthCheckServiceRequest.pb( compute.InsertRegionHealthCheckServiceRequest() ) @@ -3349,6 +3412,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3360,6 +3424,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -3450,10 +3515,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionHealthCheckServicesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionHealthCheckServicesRequest.pb( compute.ListRegionHealthCheckServicesRequest() ) @@ -3479,6 +3547,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthCheckServicesList() + post_with_metadata.return_value = compute.HealthCheckServicesList(), metadata client.list( request, @@ -3490,6 +3559,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -3719,10 +3789,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionHealthCheckServicesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthCheckServicesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionHealthCheckServiceRequest.pb( compute.PatchRegionHealthCheckServiceRequest() ) @@ -3746,6 +3819,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -3757,6 +3831,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py index 4c9953b753ac..55408aa602a4 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -332,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionHealthChecksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionHealthChecksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3288,10 +3338,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionHealthChecksRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionHealthCheckRequest.pb( compute.DeleteRegionHealthCheckRequest() ) @@ -3315,6 +3368,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3326,6 +3380,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionHealthCheckRequest): @@ -3440,10 +3495,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionHealthChecksRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionHealthCheckRequest.pb( compute.GetRegionHealthCheckRequest() ) @@ -3467,6 +3525,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthCheck() + post_with_metadata.return_value = compute.HealthCheck(), metadata client.get( request, @@ -3478,6 +3537,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRegionHealthCheckRequest): @@ -3738,10 +3798,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionHealthChecksRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionHealthCheckRequest.pb( compute.InsertRegionHealthCheckRequest() ) @@ -3765,6 +3828,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3776,6 +3840,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionHealthChecksRequest): @@ -3864,10 +3929,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionHealthChecksRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionHealthChecksRequest.pb( compute.ListRegionHealthChecksRequest() ) @@ -3891,6 +3959,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.HealthCheckList() + post_with_metadata.return_value = compute.HealthCheckList(), metadata client.list( request, @@ -3902,6 +3971,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRegionHealthCheckRequest): @@ -4170,10 +4240,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionHealthChecksRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionHealthCheckRequest.pb( compute.PatchRegionHealthCheckRequest() ) @@ -4197,6 +4270,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4208,6 +4282,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateRegionHealthCheckRequest): @@ -4476,10 +4551,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RegionHealthChecksRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionHealthChecksRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRegionHealthCheckRequest.pb( compute.UpdateRegionHealthCheckRequest() ) @@ -4503,6 +4581,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -4514,6 +4593,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index 3de03fd1e41e..c146dd13a86b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstanceGroupManagersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstanceGroupManagersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10562,10 +10612,14 @@ def test_abandon_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_abandon_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_abandon_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_abandon_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AbandonInstancesRegionInstanceGroupManagerRequest.pb( compute.AbandonInstancesRegionInstanceGroupManagerRequest() ) @@ -10589,6 +10643,7 @@ def test_abandon_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.abandon_instances( request, @@ -10600,6 +10655,7 @@ def test_abandon_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_apply_updates_to_instances_rest_bad_request( @@ -10825,11 +10881,15 @@ def test_apply_updates_to_instances_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_apply_updates_to_instances", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_apply_updates_to_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_apply_updates_to_instances", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.pb( compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest() @@ -10855,6 +10915,7 @@ def test_apply_updates_to_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.apply_updates_to_instances( request, @@ -10866,6 +10927,7 @@ def test_apply_updates_to_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instances_rest_bad_request( @@ -11097,10 +11159,14 @@ def test_create_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_create_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_create_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_create_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CreateInstancesRegionInstanceGroupManagerRequest.pb( compute.CreateInstancesRegionInstanceGroupManagerRequest() ) @@ -11124,6 +11190,7 @@ def test_create_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.create_instances( request, @@ -11135,6 +11202,7 @@ def test_create_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -11269,10 +11337,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionInstanceGroupManagerRequest.pb( compute.DeleteRegionInstanceGroupManagerRequest() ) @@ -11296,6 +11368,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -11307,6 +11380,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instances_rest_bad_request( @@ -11527,10 +11601,14 @@ def test_delete_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_delete_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_delete_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteInstancesRegionInstanceGroupManagerRequest.pb( compute.DeleteInstancesRegionInstanceGroupManagerRequest() ) @@ -11554,6 +11632,7 @@ def test_delete_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_instances( request, @@ -11565,6 +11644,7 @@ def test_delete_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_per_instance_configs_rest_bad_request( @@ -11787,11 +11867,15 @@ def test_delete_per_instance_configs_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_delete_per_instance_configs", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_delete_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_delete_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.pb( compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest() @@ -11817,6 +11901,7 @@ def test_delete_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete_per_instance_configs( request, @@ -11828,6 +11913,7 @@ def test_delete_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -11959,10 +12045,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionInstanceGroupManagerRequest.pb( compute.GetRegionInstanceGroupManagerRequest() ) @@ -11988,6 +12077,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroupManager() + post_with_metadata.return_value = compute.InstanceGroupManager(), metadata client.get( request, @@ -11999,6 +12089,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -12283,10 +12374,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionInstanceGroupManagerRequest.pb( compute.InsertRegionInstanceGroupManagerRequest() ) @@ -12310,6 +12405,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -12321,6 +12417,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -12411,10 +12508,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionInstanceGroupManagersRequest.pb( compute.ListRegionInstanceGroupManagersRequest() ) @@ -12440,6 +12540,10 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionInstanceGroupManagerList() + post_with_metadata.return_value = ( + compute.RegionInstanceGroupManagerList(), + metadata, + ) client.list( request, @@ -12451,6 +12555,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_errors_rest_bad_request( @@ -12545,10 +12650,14 @@ def test_list_errors_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_list_errors" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_list_errors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_list_errors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListErrorsRegionInstanceGroupManagersRequest.pb( compute.ListErrorsRegionInstanceGroupManagersRequest() ) @@ -12574,6 +12683,10 @@ def test_list_errors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionInstanceGroupManagersListErrorsResponse() + post_with_metadata.return_value = ( + compute.RegionInstanceGroupManagersListErrorsResponse(), + metadata, + ) client.list_errors( request, @@ -12585,6 +12698,7 @@ def test_list_errors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_managed_instances_rest_bad_request( @@ -12680,11 +12794,15 @@ def test_list_managed_instances_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_list_managed_instances", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_list_managed_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_list_managed_instances", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListManagedInstancesRegionInstanceGroupManagersRequest.pb( compute.ListManagedInstancesRegionInstanceGroupManagersRequest() ) @@ -12710,6 +12828,10 @@ def test_list_managed_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionInstanceGroupManagersListInstancesResponse() + post_with_metadata.return_value = ( + compute.RegionInstanceGroupManagersListInstancesResponse(), + metadata, + ) client.list_managed_instances( request, @@ -12721,6 +12843,7 @@ def test_list_managed_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_per_instance_configs_rest_bad_request( @@ -12816,11 +12939,15 @@ def test_list_per_instance_configs_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_list_per_instance_configs", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_list_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_list_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.pb( compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest() @@ -12850,6 +12977,10 @@ def test_list_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp() + post_with_metadata.return_value = ( + compute.RegionInstanceGroupManagersListInstanceConfigsResp(), + metadata, + ) client.list_per_instance_configs( request, @@ -12861,6 +12992,7 @@ def test_list_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -13153,10 +13285,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_patch_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionInstanceGroupManagerRequest.pb( compute.PatchRegionInstanceGroupManagerRequest() ) @@ -13180,6 +13316,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -13191,6 +13328,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_per_instance_configs_rest_bad_request( @@ -13425,11 +13563,15 @@ def test_patch_per_instance_configs_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_patch_per_instance_configs", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_patch_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_patch_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.pb( compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest() @@ -13455,6 +13597,7 @@ def test_patch_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_per_instance_configs( request, @@ -13466,6 +13609,7 @@ def test_patch_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_recreate_instances_rest_bad_request( @@ -13685,10 +13829,14 @@ def test_recreate_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_recreate_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_recreate_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_recreate_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RecreateInstancesRegionInstanceGroupManagerRequest.pb( compute.RecreateInstancesRegionInstanceGroupManagerRequest() ) @@ -13712,6 +13860,7 @@ def test_recreate_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.recreate_instances( request, @@ -13723,6 +13872,7 @@ def test_recreate_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_rest_bad_request( @@ -13857,10 +14007,14 @@ def test_resize_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_resize" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_resize_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_resize" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResizeRegionInstanceGroupManagerRequest.pb( compute.ResizeRegionInstanceGroupManagerRequest() ) @@ -13884,6 +14038,7 @@ def test_resize_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resize( request, @@ -13895,6 +14050,7 @@ def test_resize_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resume_instances_rest_bad_request( @@ -14114,10 +14270,14 @@ def test_resume_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_resume_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_resume_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_resume_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResumeInstancesRegionInstanceGroupManagerRequest.pb( compute.ResumeInstancesRegionInstanceGroupManagerRequest() ) @@ -14141,6 +14301,7 @@ def test_resume_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resume_instances( request, @@ -14152,6 +14313,7 @@ def test_resume_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_instance_template_rest_bad_request( @@ -14374,11 +14536,15 @@ def test_set_instance_template_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_set_instance_template", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_set_instance_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_set_instance_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.pb( compute.SetInstanceTemplateRegionInstanceGroupManagerRequest() ) @@ -14402,6 +14568,7 @@ def test_set_instance_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_instance_template( request, @@ -14413,6 +14580,7 @@ def test_set_instance_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_target_pools_rest_bad_request( @@ -14633,10 +14801,14 @@ def test_set_target_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_set_target_pools" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_set_target_pools_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_set_target_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.pb( compute.SetTargetPoolsRegionInstanceGroupManagerRequest() ) @@ -14660,6 +14832,7 @@ def test_set_target_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_target_pools( request, @@ -14671,6 +14844,7 @@ def test_set_target_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_instances_rest_bad_request( @@ -14890,10 +15064,14 @@ def test_start_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_start_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_start_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_start_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StartInstancesRegionInstanceGroupManagerRequest.pb( compute.StartInstancesRegionInstanceGroupManagerRequest() ) @@ -14917,6 +15095,7 @@ def test_start_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.start_instances( request, @@ -14928,6 +15107,7 @@ def test_start_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_instances_rest_bad_request( @@ -15148,10 +15328,14 @@ def test_stop_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_stop_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_stop_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_stop_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.StopInstancesRegionInstanceGroupManagerRequest.pb( compute.StopInstancesRegionInstanceGroupManagerRequest() ) @@ -15175,6 +15359,7 @@ def test_stop_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.stop_instances( request, @@ -15186,6 +15371,7 @@ def test_stop_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suspend_instances_rest_bad_request( @@ -15405,10 +15591,14 @@ def test_suspend_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "post_suspend_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_suspend_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_suspend_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SuspendInstancesRegionInstanceGroupManagerRequest.pb( compute.SuspendInstancesRegionInstanceGroupManagerRequest() ) @@ -15432,6 +15622,7 @@ def test_suspend_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.suspend_instances( request, @@ -15443,6 +15634,7 @@ def test_suspend_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_per_instance_configs_rest_bad_request( @@ -15679,11 +15871,15 @@ def test_update_per_instance_configs_rest_interceptors(null_interceptor): transports.RegionInstanceGroupManagersRestInterceptor, "post_update_per_instance_configs", ) as post, mock.patch.object( + transports.RegionInstanceGroupManagersRestInterceptor, + "post_update_per_instance_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupManagersRestInterceptor, "pre_update_per_instance_configs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ( compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.pb( compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest() @@ -15709,6 +15905,7 @@ def test_update_per_instance_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update_per_instance_configs( request, @@ -15720,6 +15917,7 @@ def test_update_per_instance_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py index 535dfaeaef4d..d8eb061a6fa9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstanceGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstanceGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2343,10 +2393,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionInstanceGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionInstanceGroupRequest.pb( compute.GetRegionInstanceGroupRequest() ) @@ -2370,6 +2423,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceGroup() + post_with_metadata.return_value = compute.InstanceGroup(), metadata client.get( request, @@ -2381,6 +2435,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionInstanceGroupsRequest): @@ -2469,10 +2524,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionInstanceGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionInstanceGroupsRequest.pb( compute.ListRegionInstanceGroupsRequest() ) @@ -2498,6 +2556,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionInstanceGroupList() + post_with_metadata.return_value = compute.RegionInstanceGroupList(), metadata client.list( request, @@ -2509,6 +2568,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_instances_rest_bad_request( @@ -2693,10 +2753,14 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.RegionInstanceGroupsRestInterceptor, + "post_list_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListInstancesRegionInstanceGroupsRequest.pb( compute.ListInstancesRegionInstanceGroupsRequest() ) @@ -2722,6 +2786,10 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionInstanceGroupsListInstances() + post_with_metadata.return_value = ( + compute.RegionInstanceGroupsListInstances(), + metadata, + ) client.list_instances( request, @@ -2733,6 +2801,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_named_ports_rest_bad_request( @@ -2953,10 +3022,14 @@ def test_set_named_ports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "post_set_named_ports" ) as post, mock.patch.object( + transports.RegionInstanceGroupsRestInterceptor, + "post_set_named_ports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceGroupsRestInterceptor, "pre_set_named_ports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetNamedPortsRegionInstanceGroupRequest.pb( compute.SetNamedPortsRegionInstanceGroupRequest() ) @@ -2980,6 +3053,7 @@ def test_set_named_ports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_named_ports( request, @@ -2991,6 +3065,7 @@ def test_set_named_ports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py index cafda09c012f..779f2177682d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstanceTemplatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstanceTemplatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2466,10 +2516,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionInstanceTemplatesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionInstanceTemplateRequest.pb( compute.DeleteRegionInstanceTemplateRequest() ) @@ -2493,6 +2546,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2504,6 +2558,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionInstanceTemplateRequest): @@ -2608,10 +2663,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionInstanceTemplatesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionInstanceTemplateRequest.pb( compute.GetRegionInstanceTemplateRequest() ) @@ -2635,6 +2693,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceTemplate() + post_with_metadata.return_value = compute.InstanceTemplate(), metadata client.get( request, @@ -2646,6 +2705,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -2839,6 +2899,7 @@ def test_insert_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -3042,10 +3103,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionInstanceTemplatesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionInstanceTemplateRequest.pb( compute.InsertRegionInstanceTemplateRequest() ) @@ -3069,6 +3133,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3080,6 +3145,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionInstanceTemplatesRequest): @@ -3168,10 +3234,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionInstanceTemplatesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstanceTemplatesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionInstanceTemplatesRequest.pb( compute.ListRegionInstanceTemplatesRequest() ) @@ -3197,6 +3266,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstanceTemplateList() + post_with_metadata.return_value = compute.InstanceTemplateList(), metadata client.list( request, @@ -3208,6 +3278,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py index 5347ad371589..6cd7e21d388e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py @@ -65,6 +65,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1596,6 +1646,7 @@ def test_bulk_insert_rest_call_success(request_type): "scheduling": { "automatic_restart": True, "availability_domain": 2002, + "host_error_timeout_seconds": 2811, "instance_termination_action": "instance_termination_action_value", "local_ssd_recovery_timeout": {"nanos": 543, "seconds": 751}, "location_hint": "location_hint_value", @@ -1796,10 +1847,13 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstancesRestInterceptor, "post_bulk_insert" ) as post, mock.patch.object( + transports.RegionInstancesRestInterceptor, "post_bulk_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstancesRestInterceptor, "pre_bulk_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.BulkInsertRegionInstanceRequest.pb( compute.BulkInsertRegionInstanceRequest() ) @@ -1823,6 +1877,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.bulk_insert( request, @@ -1834,6 +1889,7 @@ def test_bulk_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py index 04b32415b203..ba0bfcfdd050 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstantSnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstantSnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3529,10 +3579,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionInstantSnapshotRequest.pb( compute.DeleteRegionInstantSnapshotRequest() ) @@ -3556,6 +3609,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3567,6 +3621,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionInstantSnapshotRequest): @@ -3689,10 +3744,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionInstantSnapshotRequest.pb( compute.GetRegionInstantSnapshotRequest() ) @@ -3716,6 +3774,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstantSnapshot() + post_with_metadata.return_value = compute.InstantSnapshot(), metadata client.get( request, @@ -3727,6 +3786,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3815,10 +3875,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyRegionInstantSnapshotRequest.pb( compute.GetIamPolicyRegionInstantSnapshotRequest() ) @@ -3842,6 +3906,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -3853,6 +3918,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4073,10 +4139,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionInstantSnapshotRequest.pb( compute.InsertRegionInstantSnapshotRequest() ) @@ -4100,6 +4169,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4111,6 +4181,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionInstantSnapshotsRequest): @@ -4199,10 +4270,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionInstantSnapshotsRequest.pb( compute.ListRegionInstantSnapshotsRequest() ) @@ -4228,6 +4302,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.InstantSnapshotList() + post_with_metadata.return_value = compute.InstantSnapshotList(), metadata client.list( request, @@ -4239,6 +4314,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -4443,10 +4519,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyRegionInstantSnapshotRequest.pb( compute.SetIamPolicyRegionInstantSnapshotRequest() ) @@ -4470,6 +4550,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -4481,6 +4562,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -4686,10 +4768,14 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, + "post_set_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsRegionInstantSnapshotRequest.pb( compute.SetLabelsRegionInstantSnapshotRequest() ) @@ -4713,6 +4799,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4724,6 +4811,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4886,10 +4974,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.RegionInstantSnapshotsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionInstantSnapshotsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsRegionInstantSnapshotRequest.pb( compute.TestIamPermissionsRegionInstantSnapshotRequest() ) @@ -4915,6 +5007,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4926,6 +5019,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py index 2818411dae10..519d47e6d816 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionNetworkEndpointGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionNetworkEndpointGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3782,11 +3832,15 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): transports.RegionNetworkEndpointGroupsRestInterceptor, "post_attach_network_endpoints", ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_attach_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_attach_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest() ) @@ -3810,6 +3864,7 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.attach_network_endpoints( request, @@ -3821,6 +3876,7 @@ def test_attach_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -3955,10 +4011,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionNetworkEndpointGroupRequest.pb( compute.DeleteRegionNetworkEndpointGroupRequest() ) @@ -3982,6 +4042,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3993,6 +4054,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_detach_network_endpoints_rest_bad_request( @@ -4225,11 +4287,15 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): transports.RegionNetworkEndpointGroupsRestInterceptor, "post_detach_network_endpoints", ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_detach_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_detach_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest.pb( compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest() ) @@ -4253,6 +4319,7 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.detach_network_endpoints( request, @@ -4264,6 +4331,7 @@ def test_detach_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -4382,10 +4450,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionNetworkEndpointGroupRequest.pb( compute.GetRegionNetworkEndpointGroupRequest() ) @@ -4411,6 +4482,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroup() + post_with_metadata.return_value = compute.NetworkEndpointGroup(), metadata client.get( request, @@ -4422,6 +4494,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4657,10 +4730,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionNetworkEndpointGroupRequest.pb( compute.InsertRegionNetworkEndpointGroupRequest() ) @@ -4684,6 +4761,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4695,6 +4773,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -4785,10 +4864,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionNetworkEndpointGroupsRequest.pb( compute.ListRegionNetworkEndpointGroupsRequest() ) @@ -4814,6 +4896,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupList() + post_with_metadata.return_value = compute.NetworkEndpointGroupList(), metadata client.list( request, @@ -4825,6 +4908,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_network_endpoints_rest_bad_request( @@ -4924,11 +5008,15 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): transports.RegionNetworkEndpointGroupsRestInterceptor, "post_list_network_endpoints", ) as post, mock.patch.object( + transports.RegionNetworkEndpointGroupsRestInterceptor, + "post_list_network_endpoints_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkEndpointGroupsRestInterceptor, "pre_list_network_endpoints", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest.pb( compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest() ) @@ -4954,6 +5042,10 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() + post_with_metadata.return_value = ( + compute.NetworkEndpointGroupsListNetworkEndpoints(), + metadata, + ) client.list_network_endpoints( request, @@ -4965,6 +5057,7 @@ def test_list_network_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py index 0a37e80df5ba..01e53e03d63f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionNetworkFirewallPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionNetworkFirewallPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6989,10 +7039,14 @@ def test_add_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_add_association" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_add_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_add_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddAssociationRegionNetworkFirewallPolicyRequest.pb( compute.AddAssociationRegionNetworkFirewallPolicyRequest() ) @@ -7016,6 +7070,7 @@ def test_add_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_association( request, @@ -7027,6 +7082,7 @@ def test_add_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_add_rule_rest_bad_request( @@ -7289,10 +7345,14 @@ def test_add_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_add_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddRuleRegionNetworkFirewallPolicyRequest.pb( compute.AddRuleRegionNetworkFirewallPolicyRequest() ) @@ -7316,6 +7376,7 @@ def test_add_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_rule( request, @@ -7327,6 +7388,7 @@ def test_add_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_clone_rules_rest_bad_request( @@ -7461,10 +7523,14 @@ def test_clone_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_clone_rules" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_clone_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_clone_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.CloneRulesRegionNetworkFirewallPolicyRequest.pb( compute.CloneRulesRegionNetworkFirewallPolicyRequest() ) @@ -7488,6 +7554,7 @@ def test_clone_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.clone_rules( request, @@ -7499,6 +7566,7 @@ def test_clone_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -7633,10 +7701,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionNetworkFirewallPolicyRequest.pb( compute.DeleteRegionNetworkFirewallPolicyRequest() ) @@ -7660,6 +7732,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -7671,6 +7744,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -7787,10 +7861,14 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_get_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionNetworkFirewallPolicyRequest.pb( compute.GetRegionNetworkFirewallPolicyRequest() ) @@ -7814,6 +7892,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicy() + post_with_metadata.return_value = compute.FirewallPolicy(), metadata client.get( request, @@ -7825,6 +7904,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_association_rest_bad_request( @@ -7925,10 +8005,14 @@ def test_get_association_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_association" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_get_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_association" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetAssociationRegionNetworkFirewallPolicyRequest.pb( compute.GetAssociationRegionNetworkFirewallPolicyRequest() ) @@ -7954,6 +8038,7 @@ def test_get_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyAssociation() + post_with_metadata.return_value = compute.FirewallPolicyAssociation(), metadata client.get_association( request, @@ -7965,6 +8050,7 @@ def test_get_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_effective_firewalls_rest_bad_request( @@ -8055,11 +8141,15 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_effective_firewalls", ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_get_effective_firewalls_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_effective_firewalls", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest.pb( compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest() ) @@ -8089,6 +8179,10 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): post.return_value = ( compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse() ) + post_with_metadata.return_value = ( + compute.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse(), + metadata, + ) client.get_effective_firewalls( request, @@ -8100,6 +8194,7 @@ def test_get_effective_firewalls_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -8188,10 +8283,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyRegionNetworkFirewallPolicyRequest.pb( compute.GetIamPolicyRegionNetworkFirewallPolicyRequest() ) @@ -8215,6 +8314,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -8226,6 +8326,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_rest_bad_request( @@ -8342,10 +8443,14 @@ def test_get_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_get_rule" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_get_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRuleRegionNetworkFirewallPolicyRequest.pb( compute.GetRuleRegionNetworkFirewallPolicyRequest() ) @@ -8369,6 +8474,7 @@ def test_get_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyRule() + post_with_metadata.return_value = compute.FirewallPolicyRule(), metadata client.get_rule( request, @@ -8380,6 +8486,7 @@ def test_get_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -8665,10 +8772,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionNetworkFirewallPolicyRequest.pb( compute.InsertRegionNetworkFirewallPolicyRequest() ) @@ -8692,6 +8803,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -8703,6 +8815,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -8791,10 +8904,14 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionNetworkFirewallPoliciesRequest.pb( compute.ListRegionNetworkFirewallPoliciesRequest() ) @@ -8818,6 +8935,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.FirewallPolicyList() + post_with_metadata.return_value = compute.FirewallPolicyList(), metadata client.list( request, @@ -8829,6 +8947,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -9122,10 +9241,14 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_patch_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionNetworkFirewallPolicyRequest.pb( compute.PatchRegionNetworkFirewallPolicyRequest() ) @@ -9149,6 +9272,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -9160,6 +9284,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rule_rest_bad_request( @@ -9422,10 +9547,14 @@ def test_patch_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_patch_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRuleRegionNetworkFirewallPolicyRequest.pb( compute.PatchRuleRegionNetworkFirewallPolicyRequest() ) @@ -9449,6 +9578,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_rule( request, @@ -9460,6 +9590,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_association_rest_bad_request( @@ -9595,11 +9726,15 @@ def test_remove_association_rest_interceptors(null_interceptor): transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_remove_association", ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_remove_association_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_remove_association", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveAssociationRegionNetworkFirewallPolicyRequest.pb( compute.RemoveAssociationRegionNetworkFirewallPolicyRequest() ) @@ -9623,6 +9758,7 @@ def test_remove_association_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_association( request, @@ -9634,6 +9770,7 @@ def test_remove_association_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_rule_rest_bad_request( @@ -9768,10 +9905,14 @@ def test_remove_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_remove_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveRuleRegionNetworkFirewallPolicyRequest.pb( compute.RemoveRuleRegionNetworkFirewallPolicyRequest() ) @@ -9795,6 +9936,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_rule( request, @@ -9806,6 +9948,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -10010,10 +10153,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyRegionNetworkFirewallPolicyRequest.pb( compute.SetIamPolicyRegionNetworkFirewallPolicyRequest() ) @@ -10037,6 +10184,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -10048,6 +10196,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -10213,11 +10362,15 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): transports.RegionNetworkFirewallPoliciesRestInterceptor, "post_test_iam_permissions", ) as post, mock.patch.object( + transports.RegionNetworkFirewallPoliciesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNetworkFirewallPoliciesRestInterceptor, "pre_test_iam_permissions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest.pb( compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest() ) @@ -10243,6 +10396,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -10254,6 +10408,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py index dffefa1e54fb..ebe10d87780f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionNotificationEndpointsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionNotificationEndpointsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2480,10 +2530,14 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionNotificationEndpointsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionNotificationEndpointRequest.pb( compute.DeleteRegionNotificationEndpointRequest() ) @@ -2507,6 +2561,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2518,6 +2573,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request( @@ -2622,10 +2678,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionNotificationEndpointsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionNotificationEndpointRequest.pb( compute.GetRegionNotificationEndpointRequest() ) @@ -2651,6 +2710,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NotificationEndpoint() + post_with_metadata.return_value = compute.NotificationEndpoint(), metadata client.get( request, @@ -2662,6 +2722,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -2879,10 +2940,14 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionNotificationEndpointsRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionNotificationEndpointRequest.pb( compute.InsertRegionNotificationEndpointRequest() ) @@ -2906,6 +2971,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -2917,6 +2983,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -3007,10 +3074,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionNotificationEndpointsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionNotificationEndpointsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionNotificationEndpointsRequest.pb( compute.ListRegionNotificationEndpointsRequest() ) @@ -3036,6 +3106,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NotificationEndpointList() + post_with_metadata.return_value = compute.NotificationEndpointList(), metadata client.list( request, @@ -3047,6 +3118,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py index 4860f6b57518..65390025d06c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1946,10 +1996,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionOperationsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionOperationsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionOperationsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionOperationRequest.pb( compute.DeleteRegionOperationRequest() ) @@ -1975,6 +2028,10 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DeleteRegionOperationResponse() + post_with_metadata.return_value = ( + compute.DeleteRegionOperationResponse(), + metadata, + ) client.delete( request, @@ -1986,6 +2043,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionOperationRequest): @@ -2110,10 +2168,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionOperationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionOperationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionOperationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionOperationRequest.pb( compute.GetRegionOperationRequest() ) @@ -2137,6 +2198,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.get( request, @@ -2148,6 +2210,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionOperationsRequest): @@ -2236,10 +2299,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionOperationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionOperationsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionOperationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionOperationsRequest.pb( compute.ListRegionOperationsRequest() ) @@ -2263,6 +2329,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.OperationList() + post_with_metadata.return_value = compute.OperationList(), metadata client.list( request, @@ -2274,6 +2341,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_wait_rest_bad_request(request_type=compute.WaitRegionOperationRequest): @@ -2398,10 +2466,13 @@ def test_wait_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionOperationsRestInterceptor, "post_wait" ) as post, mock.patch.object( + transports.RegionOperationsRestInterceptor, "post_wait_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionOperationsRestInterceptor, "pre_wait" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.WaitRegionOperationRequest.pb( compute.WaitRegionOperationRequest() ) @@ -2425,6 +2496,7 @@ def test_wait_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.wait( request, @@ -2436,6 +2508,7 @@ def test_wait_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py index 5ea251fdb37f..f9cf72798403 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionSecurityPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionSecurityPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5082,10 +5132,13 @@ def test_add_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_add_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddRuleRegionSecurityPolicyRequest.pb( compute.AddRuleRegionSecurityPolicyRequest() ) @@ -5109,6 +5162,7 @@ def test_add_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_rule( request, @@ -5120,6 +5174,7 @@ def test_add_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request( @@ -5254,10 +5309,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionSecurityPolicyRequest.pb( compute.DeleteRegionSecurityPolicyRequest() ) @@ -5281,6 +5339,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -5292,6 +5351,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionSecurityPolicyRequest): @@ -5400,10 +5460,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionSecurityPolicyRequest.pb( compute.GetRegionSecurityPolicyRequest() ) @@ -5427,6 +5490,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPolicy() + post_with_metadata.return_value = compute.SecurityPolicy(), metadata client.get( request, @@ -5438,6 +5502,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_rest_bad_request( @@ -5538,10 +5603,13 @@ def test_get_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_get_rule" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_get_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRuleRegionSecurityPolicyRequest.pb( compute.GetRuleRegionSecurityPolicyRequest() ) @@ -5565,6 +5633,7 @@ def test_get_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPolicyRule() + post_with_metadata.return_value = compute.SecurityPolicyRule(), metadata client.get_rule( request, @@ -5576,6 +5645,7 @@ def test_get_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -5935,10 +6005,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionSecurityPolicyRequest.pb( compute.InsertRegionSecurityPolicyRequest() ) @@ -5962,6 +6035,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5973,6 +6047,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionSecurityPoliciesRequest): @@ -6059,10 +6134,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionSecurityPoliciesRequest.pb( compute.ListRegionSecurityPoliciesRequest() ) @@ -6086,6 +6164,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPolicyList() + post_with_metadata.return_value = compute.SecurityPolicyList(), metadata client.list( request, @@ -6097,6 +6176,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRegionSecurityPolicyRequest): @@ -6462,10 +6542,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionSecurityPolicyRequest.pb( compute.PatchRegionSecurityPolicyRequest() ) @@ -6489,6 +6572,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6500,6 +6584,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rule_rest_bad_request( @@ -6797,10 +6882,14 @@ def test_patch_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, + "post_patch_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRuleRegionSecurityPolicyRequest.pb( compute.PatchRuleRegionSecurityPolicyRequest() ) @@ -6824,6 +6913,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_rule( request, @@ -6835,6 +6925,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_rule_rest_bad_request( @@ -6969,10 +7060,14 @@ def test_remove_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, + "post_remove_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveRuleRegionSecurityPolicyRequest.pb( compute.RemoveRuleRegionSecurityPolicyRequest() ) @@ -6996,6 +7091,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_rule( request, @@ -7007,6 +7103,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -7212,10 +7309,14 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.RegionSecurityPoliciesRestInterceptor, + "post_set_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionSecurityPoliciesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsRegionSecurityPolicyRequest.pb( compute.SetLabelsRegionSecurityPolicyRequest() ) @@ -7239,6 +7340,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -7250,6 +7352,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py index 75098aa5ddc2..30ca05af0e08 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionSslCertificatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionSslCertificatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2462,10 +2512,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionSslCertificatesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionSslCertificateRequest.pb( compute.DeleteRegionSslCertificateRequest() ) @@ -2489,6 +2542,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2500,6 +2554,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionSslCertificateRequest): @@ -2612,10 +2667,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionSslCertificatesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionSslCertificateRequest.pb( compute.GetRegionSslCertificateRequest() ) @@ -2639,6 +2697,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslCertificate() + post_with_metadata.return_value = compute.SslCertificate(), metadata client.get( request, @@ -2650,6 +2709,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -2873,10 +2933,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionSslCertificatesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionSslCertificateRequest.pb( compute.InsertRegionSslCertificateRequest() ) @@ -2900,6 +2963,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -2911,6 +2975,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionSslCertificatesRequest): @@ -2999,10 +3064,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionSslCertificatesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslCertificatesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionSslCertificatesRequest.pb( compute.ListRegionSslCertificatesRequest() ) @@ -3026,6 +3094,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslCertificateList() + post_with_metadata.return_value = compute.SslCertificateList(), metadata client.list( request, @@ -3037,6 +3106,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py index 11b1a2056765..7e424cee83c6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionSslPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionSslPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3079,10 +3129,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionSslPoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionSslPolicyRequest.pb( compute.DeleteRegionSslPolicyRequest() ) @@ -3106,6 +3159,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3117,6 +3171,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionSslPolicyRequest): @@ -3221,10 +3276,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionSslPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionSslPolicyRequest.pb( compute.GetRegionSslPolicyRequest() ) @@ -3248,6 +3306,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPolicy() + post_with_metadata.return_value = compute.SslPolicy(), metadata client.get( request, @@ -3259,6 +3318,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRegionSslPolicyRequest): @@ -3471,10 +3531,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionSslPoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionSslPolicyRequest.pb( compute.InsertRegionSslPolicyRequest() ) @@ -3498,6 +3561,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3509,6 +3573,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionSslPoliciesRequest): @@ -3597,10 +3662,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionSslPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionSslPoliciesRequest.pb( compute.ListRegionSslPoliciesRequest() ) @@ -3624,6 +3692,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPoliciesList() + post_with_metadata.return_value = compute.SslPoliciesList(), metadata client.list( request, @@ -3635,6 +3704,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_available_features_rest_bad_request( @@ -3719,10 +3789,14 @@ def test_list_available_features_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "post_list_available_features" ) as post, mock.patch.object( + transports.RegionSslPoliciesRestInterceptor, + "post_list_available_features_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "pre_list_available_features" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListAvailableFeaturesRegionSslPoliciesRequest.pb( compute.ListAvailableFeaturesRegionSslPoliciesRequest() ) @@ -3748,6 +3822,10 @@ def test_list_available_features_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPoliciesListAvailableFeaturesResponse() + post_with_metadata.return_value = ( + compute.SslPoliciesListAvailableFeaturesResponse(), + metadata, + ) client.list_available_features( request, @@ -3759,6 +3837,7 @@ def test_list_available_features_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRegionSslPolicyRequest): @@ -3971,10 +4050,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionSslPoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionSslPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionSslPolicyRequest.pb( compute.PatchRegionSslPolicyRequest() ) @@ -3998,6 +4080,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4009,6 +4092,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py index e81cc1f8f8ae..2ce2a3f638b5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionTargetHttpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionTargetHttpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2890,10 +2940,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionTargetHttpProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionTargetHttpProxyRequest.pb( compute.DeleteRegionTargetHttpProxyRequest() ) @@ -2917,6 +2970,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2928,6 +2982,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionTargetHttpProxyRequest): @@ -3038,10 +3093,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionTargetHttpProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionTargetHttpProxyRequest.pb( compute.GetRegionTargetHttpProxyRequest() ) @@ -3065,6 +3123,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpProxy() + post_with_metadata.return_value = compute.TargetHttpProxy(), metadata client.get( request, @@ -3076,6 +3135,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -3288,10 +3348,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionTargetHttpProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionTargetHttpProxyRequest.pb( compute.InsertRegionTargetHttpProxyRequest() ) @@ -3315,6 +3378,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3326,6 +3390,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionTargetHttpProxiesRequest): @@ -3414,10 +3479,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionTargetHttpProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionTargetHttpProxiesRequest.pb( compute.ListRegionTargetHttpProxiesRequest() ) @@ -3443,6 +3511,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpProxyList() + post_with_metadata.return_value = compute.TargetHttpProxyList(), metadata client.list( request, @@ -3454,6 +3523,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_url_map_rest_bad_request( @@ -3662,10 +3732,14 @@ def test_set_url_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "post_set_url_map" ) as post, mock.patch.object( + transports.RegionTargetHttpProxiesRestInterceptor, + "post_set_url_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpProxiesRestInterceptor, "pre_set_url_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetUrlMapRegionTargetHttpProxyRequest.pb( compute.SetUrlMapRegionTargetHttpProxyRequest() ) @@ -3689,6 +3763,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_url_map( request, @@ -3700,6 +3775,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py index 22815fda5a23..b0c292053b7f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionTargetHttpsProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionTargetHttpsProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3772,10 +3822,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionTargetHttpsProxyRequest.pb( compute.DeleteRegionTargetHttpsProxyRequest() ) @@ -3799,6 +3852,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3810,6 +3864,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionTargetHttpsProxyRequest): @@ -3934,10 +3989,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionTargetHttpsProxyRequest.pb( compute.GetRegionTargetHttpsProxyRequest() ) @@ -3961,6 +4019,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpsProxy() + post_with_metadata.return_value = compute.TargetHttpsProxy(), metadata client.get( request, @@ -3972,6 +4031,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -4191,10 +4251,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionTargetHttpsProxyRequest.pb( compute.InsertRegionTargetHttpsProxyRequest() ) @@ -4218,6 +4281,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4229,6 +4293,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request( @@ -4319,10 +4384,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionTargetHttpsProxiesRequest.pb( compute.ListRegionTargetHttpsProxiesRequest() ) @@ -4348,6 +4416,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpsProxyList() + post_with_metadata.return_value = compute.TargetHttpsProxyList(), metadata client.list( request, @@ -4359,6 +4428,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request( @@ -4586,10 +4656,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionTargetHttpsProxyRequest.pb( compute.PatchRegionTargetHttpsProxyRequest() ) @@ -4613,6 +4686,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4624,6 +4698,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_ssl_certificates_rest_bad_request( @@ -4843,10 +4918,14 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_set_ssl_certificates" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, + "post_set_ssl_certificates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_set_ssl_certificates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.pb( compute.SetSslCertificatesRegionTargetHttpsProxyRequest() ) @@ -4870,6 +4949,7 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_ssl_certificates( request, @@ -4881,6 +4961,7 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_url_map_rest_bad_request( @@ -5089,10 +5170,14 @@ def test_set_url_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "post_set_url_map" ) as post, mock.patch.object( + transports.RegionTargetHttpsProxiesRestInterceptor, + "post_set_url_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.RegionTargetHttpsProxiesRestInterceptor, "pre_set_url_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetUrlMapRegionTargetHttpsProxyRequest.pb( compute.SetUrlMapRegionTargetHttpsProxyRequest() ) @@ -5116,6 +5201,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_url_map( request, @@ -5127,6 +5213,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py index 56de523ac378..f0d5e971a983 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionTargetTcpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionTargetTcpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2464,10 +2514,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionTargetTcpProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionTargetTcpProxyRequest.pb( compute.DeleteRegionTargetTcpProxyRequest() ) @@ -2491,6 +2544,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2502,6 +2556,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionTargetTcpProxyRequest): @@ -2610,10 +2665,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionTargetTcpProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionTargetTcpProxyRequest.pb( compute.GetRegionTargetTcpProxyRequest() ) @@ -2637,6 +2695,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetTcpProxy() + post_with_metadata.return_value = compute.TargetTcpProxy(), metadata client.get( request, @@ -2648,6 +2707,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request( @@ -2859,10 +2919,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionTargetTcpProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionTargetTcpProxyRequest.pb( compute.InsertRegionTargetTcpProxyRequest() ) @@ -2886,6 +2949,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -2897,6 +2961,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionTargetTcpProxiesRequest): @@ -2985,10 +3050,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionTargetTcpProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionTargetTcpProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionTargetTcpProxiesRequest.pb( compute.ListRegionTargetTcpProxiesRequest() ) @@ -3012,6 +3080,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetTcpProxyList() + post_with_metadata.return_value = compute.TargetTcpProxyList(), metadata client.list( request, @@ -3023,6 +3092,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py index 9f478dfa904d..f89915d1860f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -315,6 +322,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionUrlMapsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionUrlMapsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3473,10 +3523,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRegionUrlMapRequest.pb( compute.DeleteRegionUrlMapRequest() ) @@ -3500,6 +3553,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3511,6 +3565,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRegionUrlMapRequest): @@ -3609,10 +3664,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionUrlMapRequest.pb(compute.GetRegionUrlMapRequest()) transcode.return_value = { "method": "post", @@ -3634,6 +3692,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMap() + post_with_metadata.return_value = compute.UrlMap(), metadata client.get( request, @@ -3645,6 +3704,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRegionUrlMapRequest): @@ -4023,10 +4083,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRegionUrlMapRequest.pb( compute.InsertRegionUrlMapRequest() ) @@ -4050,6 +4113,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4061,6 +4125,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionUrlMapsRequest): @@ -4149,10 +4214,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionUrlMapsRequest.pb( compute.ListRegionUrlMapsRequest() ) @@ -4176,6 +4244,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMapList() + post_with_metadata.return_value = compute.UrlMapList(), metadata client.list( request, @@ -4187,6 +4256,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRegionUrlMapRequest): @@ -4565,10 +4635,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRegionUrlMapRequest.pb( compute.PatchRegionUrlMapRequest() ) @@ -4592,6 +4665,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4603,6 +4677,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateRegionUrlMapRequest): @@ -4981,10 +5056,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRegionUrlMapRequest.pb( compute.UpdateRegionUrlMapRequest() ) @@ -5008,6 +5086,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -5019,6 +5098,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_rest_bad_request(request_type=compute.ValidateRegionUrlMapRequest): @@ -5370,10 +5450,13 @@ def test_validate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "post_validate" ) as post, mock.patch.object( + transports.RegionUrlMapsRestInterceptor, "post_validate_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionUrlMapsRestInterceptor, "pre_validate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ValidateRegionUrlMapRequest.pb( compute.ValidateRegionUrlMapRequest() ) @@ -5399,6 +5482,7 @@ def test_validate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMapsValidateResponse() + post_with_metadata.return_value = compute.UrlMapsValidateResponse(), metadata client.validate( request, @@ -5410,6 +5494,7 @@ def test_validate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py index bd45824b687a..44ce18b01327 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -293,6 +300,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionZonesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionZonesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1311,10 +1361,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionZonesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionZonesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionZonesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionZonesRequest.pb(compute.ListRegionZonesRequest()) transcode.return_value = { "method": "post", @@ -1336,6 +1389,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ZoneList() + post_with_metadata.return_value = compute.ZoneList(), metadata client.list( request, @@ -1347,6 +1401,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py index 8ca019192b86..3f7292e5242d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py @@ -55,6 +55,13 @@ from google.cloud.compute_v1.services.regions import RegionsClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -273,6 +280,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1472,10 +1522,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RegionsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRegionRequest.pb(compute.GetRegionRequest()) transcode.return_value = { "method": "post", @@ -1497,6 +1550,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Region() + post_with_metadata.return_value = compute.Region(), metadata client.get( request, @@ -1508,6 +1562,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRegionsRequest): @@ -1594,10 +1649,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RegionsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RegionsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RegionsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRegionsRequest.pb(compute.ListRegionsRequest()) transcode.return_value = { "method": "post", @@ -1619,6 +1677,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RegionList() + post_with_metadata.return_value = compute.RegionList(), metadata client.list( request, @@ -1630,6 +1689,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py index a46d872f9272..d6bef130dd07 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ReservationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ReservationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4212,10 +4262,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListReservationsRequest.pb( compute.AggregatedListReservationsRequest() ) @@ -4241,6 +4294,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ReservationAggregatedList() + post_with_metadata.return_value = compute.ReservationAggregatedList(), metadata client.aggregated_list( request, @@ -4252,6 +4306,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteReservationRequest): @@ -4376,10 +4431,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteReservationRequest.pb( compute.DeleteReservationRequest() ) @@ -4403,6 +4461,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4414,6 +4473,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetReservationRequest): @@ -4516,10 +4576,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetReservationRequest.pb(compute.GetReservationRequest()) transcode.return_value = { "method": "post", @@ -4541,6 +4604,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Reservation() + post_with_metadata.return_value = compute.Reservation(), metadata client.get( request, @@ -4552,6 +4616,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -4640,10 +4705,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyReservationRequest.pb( compute.GetIamPolicyReservationRequest() ) @@ -4667,6 +4735,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -4678,6 +4747,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertReservationRequest): @@ -4922,10 +4992,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertReservationRequest.pb( compute.InsertReservationRequest() ) @@ -4949,6 +5022,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4960,6 +5034,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListReservationsRequest): @@ -5048,10 +5123,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListReservationsRequest.pb( compute.ListReservationsRequest() ) @@ -5075,6 +5153,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ReservationList() + post_with_metadata.return_value = compute.ReservationList(), metadata client.list( request, @@ -5086,6 +5165,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_rest_bad_request(request_type=compute.ResizeReservationRequest): @@ -5288,10 +5368,13 @@ def test_resize_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_resize" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_resize_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_resize" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ResizeReservationRequest.pb( compute.ResizeReservationRequest() ) @@ -5315,6 +5398,7 @@ def test_resize_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.resize( request, @@ -5326,6 +5410,7 @@ def test_resize_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -5530,10 +5615,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyReservationRequest.pb( compute.SetIamPolicyReservationRequest() ) @@ -5557,6 +5645,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -5568,6 +5657,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5730,10 +5820,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsReservationRequest.pb( compute.TestIamPermissionsReservationRequest() ) @@ -5759,6 +5853,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5770,6 +5865,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateReservationRequest): @@ -6014,10 +6110,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ReservationsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.ReservationsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ReservationsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateReservationRequest.pb( compute.UpdateReservationRequest() ) @@ -6041,6 +6140,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -6052,6 +6152,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py index c4136e611dbc..a5cff380a629 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ResourcePoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ResourcePoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3753,10 +3803,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListResourcePoliciesRequest.pb( compute.AggregatedListResourcePoliciesRequest() ) @@ -3782,6 +3835,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ResourcePolicyAggregatedList() + post_with_metadata.return_value = ( + compute.ResourcePolicyAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3793,6 +3850,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteResourcePolicyRequest): @@ -3925,10 +3983,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteResourcePolicyRequest.pb( compute.DeleteResourcePolicyRequest() ) @@ -3952,6 +4013,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3963,6 +4025,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetResourcePolicyRequest): @@ -4067,10 +4130,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetResourcePolicyRequest.pb( compute.GetResourcePolicyRequest() ) @@ -4094,6 +4160,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ResourcePolicy() + post_with_metadata.return_value = compute.ResourcePolicy(), metadata client.get( request, @@ -4105,6 +4172,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -4193,10 +4261,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyResourcePolicyRequest.pb( compute.GetIamPolicyResourcePolicyRequest() ) @@ -4220,6 +4291,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -4231,6 +4303,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertResourcePolicyRequest): @@ -4491,10 +4564,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertResourcePolicyRequest.pb( compute.InsertResourcePolicyRequest() ) @@ -4518,6 +4594,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4529,6 +4606,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListResourcePoliciesRequest): @@ -4619,10 +4697,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListResourcePoliciesRequest.pb( compute.ListResourcePoliciesRequest() ) @@ -4646,6 +4727,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ResourcePolicyList() + post_with_metadata.return_value = compute.ResourcePolicyList(), metadata client.list( request, @@ -4657,6 +4739,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchResourcePolicyRequest): @@ -4925,10 +5008,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchResourcePolicyRequest.pb( compute.PatchResourcePolicyRequest() ) @@ -4952,6 +5038,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4963,6 +5050,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -5167,10 +5255,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyResourcePolicyRequest.pb( compute.SetIamPolicyResourcePolicyRequest() ) @@ -5194,6 +5285,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -5205,6 +5297,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5367,10 +5460,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ResourcePoliciesRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ResourcePoliciesRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsResourcePolicyRequest.pb( compute.TestIamPermissionsResourcePolicyRequest() ) @@ -5396,6 +5493,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5407,6 +5505,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py index 9655b229f31f..c1fbdf8e0923 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.routers import RoutersClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -280,6 +287,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RoutersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RoutersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4363,10 +4413,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListRoutersRequest.pb( compute.AggregatedListRoutersRequest() ) @@ -4392,6 +4445,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RouterAggregatedList() + post_with_metadata.return_value = compute.RouterAggregatedList(), metadata client.aggregated_list( request, @@ -4403,6 +4457,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteRouterRequest): @@ -4525,10 +4580,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRouterRequest.pb(compute.DeleteRouterRequest()) transcode.return_value = { "method": "post", @@ -4550,6 +4608,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4561,6 +4620,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRouterRequest): @@ -4657,10 +4717,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRouterRequest.pb(compute.GetRouterRequest()) transcode.return_value = { "method": "post", @@ -4682,6 +4745,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Router() + post_with_metadata.return_value = compute.Router(), metadata client.get( request, @@ -4693,6 +4757,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_nat_ip_info_rest_bad_request( @@ -4772,10 +4837,13 @@ def test_get_nat_ip_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_get_nat_ip_info" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_get_nat_ip_info_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_get_nat_ip_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNatIpInfoRouterRequest.pb( compute.GetNatIpInfoRouterRequest() ) @@ -4799,6 +4867,7 @@ def test_get_nat_ip_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.NatIpInfoResponse() + post_with_metadata.return_value = compute.NatIpInfoResponse(), metadata client.get_nat_ip_info( request, @@ -4810,6 +4879,7 @@ def test_get_nat_ip_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_nat_mapping_info_rest_bad_request( @@ -4898,10 +4968,13 @@ def test_get_nat_mapping_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_get_nat_mapping_info" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_get_nat_mapping_info_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_get_nat_mapping_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetNatMappingInfoRoutersRequest.pb( compute.GetNatMappingInfoRoutersRequest() ) @@ -4927,6 +5000,7 @@ def test_get_nat_mapping_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VmEndpointNatMappingsList() + post_with_metadata.return_value = compute.VmEndpointNatMappingsList(), metadata client.get_nat_mapping_info( request, @@ -4938,6 +5012,7 @@ def test_get_nat_mapping_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_router_status_rest_bad_request( @@ -5020,10 +5095,13 @@ def test_get_router_status_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_get_router_status" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_get_router_status_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_get_router_status" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRouterStatusRouterRequest.pb( compute.GetRouterStatusRouterRequest() ) @@ -5049,6 +5127,7 @@ def test_get_router_status_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RouterStatusResponse() + post_with_metadata.return_value = compute.RouterStatusResponse(), metadata client.get_router_status( request, @@ -5060,6 +5139,7 @@ def test_get_router_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRouterRequest): @@ -5383,10 +5463,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRouterRequest.pb(compute.InsertRouterRequest()) transcode.return_value = { "method": "post", @@ -5408,6 +5491,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5419,6 +5503,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRoutersRequest): @@ -5505,10 +5590,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRoutersRequest.pb(compute.ListRoutersRequest()) transcode.return_value = { "method": "post", @@ -5530,6 +5618,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RouterList() + post_with_metadata.return_value = compute.RouterList(), metadata client.list( request, @@ -5541,6 +5630,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchRouterRequest): @@ -5864,10 +5954,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRouterRequest.pb(compute.PatchRouterRequest()) transcode.return_value = { "method": "post", @@ -5889,6 +5982,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -5900,6 +5994,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_preview_rest_bad_request(request_type=compute.PreviewRouterRequest): @@ -6178,10 +6273,13 @@ def test_preview_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_preview" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_preview_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_preview" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PreviewRouterRequest.pb(compute.PreviewRouterRequest()) transcode.return_value = { "method": "post", @@ -6205,6 +6303,7 @@ def test_preview_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RoutersPreviewResponse() + post_with_metadata.return_value = compute.RoutersPreviewResponse(), metadata client.preview( request, @@ -6216,6 +6315,7 @@ def test_preview_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateRouterRequest): @@ -6539,10 +6639,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutersRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.RoutersRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutersRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateRouterRequest.pb(compute.UpdateRouterRequest()) transcode.return_value = { "method": "post", @@ -6564,6 +6667,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -6575,6 +6679,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py index c484a3fe491d..5370a90290db 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.routes import RoutesClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -279,6 +286,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RoutesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RoutesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2289,10 +2339,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.RoutesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteRouteRequest.pb(compute.DeleteRouteRequest()) transcode.return_value = { "method": "post", @@ -2314,6 +2367,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2325,6 +2379,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetRouteRequest): @@ -2449,10 +2504,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.RoutesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRouteRequest.pb(compute.GetRouteRequest()) transcode.return_value = { "method": "post", @@ -2474,6 +2532,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Route() + post_with_metadata.return_value = compute.Route(), metadata client.get( request, @@ -2485,6 +2544,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertRouteRequest): @@ -2709,10 +2769,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.RoutesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertRouteRequest.pb(compute.InsertRouteRequest()) transcode.return_value = { "method": "post", @@ -2734,6 +2797,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -2745,6 +2809,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListRoutesRequest): @@ -2831,10 +2896,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RoutesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.RoutesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RoutesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListRoutesRequest.pb(compute.ListRoutesRequest()) transcode.return_value = { "method": "post", @@ -2856,6 +2924,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.RouteList() + post_with_metadata.return_value = compute.RouteList(), metadata client.list( request, @@ -2867,6 +2936,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py index 1ae2fb7845a8..1b1394a5e15a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SecurityPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SecurityPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5349,10 +5399,13 @@ def test_add_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_add_rule" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_add_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_add_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddRuleSecurityPolicyRequest.pb( compute.AddRuleSecurityPolicyRequest() ) @@ -5376,6 +5429,7 @@ def test_add_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_rule( request, @@ -5387,6 +5441,7 @@ def test_add_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -5481,10 +5536,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListSecurityPoliciesRequest.pb( compute.AggregatedListSecurityPoliciesRequest() ) @@ -5510,6 +5568,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPoliciesAggregatedList() + post_with_metadata.return_value = ( + compute.SecurityPoliciesAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -5521,6 +5583,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteSecurityPolicyRequest): @@ -5645,10 +5708,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSecurityPolicyRequest.pb( compute.DeleteSecurityPolicyRequest() ) @@ -5672,6 +5738,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -5683,6 +5750,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetSecurityPolicyRequest): @@ -5783,10 +5851,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSecurityPolicyRequest.pb( compute.GetSecurityPolicyRequest() ) @@ -5810,6 +5881,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPolicy() + post_with_metadata.return_value = compute.SecurityPolicy(), metadata client.get( request, @@ -5821,6 +5893,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_rest_bad_request(request_type=compute.GetRuleSecurityPolicyRequest): @@ -5911,10 +5984,13 @@ def test_get_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_get_rule" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_get_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_get_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetRuleSecurityPolicyRequest.pb( compute.GetRuleSecurityPolicyRequest() ) @@ -5938,6 +6014,7 @@ def test_get_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPolicyRule() + post_with_metadata.return_value = compute.SecurityPolicyRule(), metadata client.get_rule( request, @@ -5949,6 +6026,7 @@ def test_get_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertSecurityPolicyRequest): @@ -6306,10 +6384,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertSecurityPolicyRequest.pb( compute.InsertSecurityPolicyRequest() ) @@ -6333,6 +6414,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -6344,6 +6426,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListSecurityPoliciesRequest): @@ -6430,10 +6513,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListSecurityPoliciesRequest.pb( compute.ListSecurityPoliciesRequest() ) @@ -6457,6 +6543,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SecurityPolicyList() + post_with_metadata.return_value = compute.SecurityPolicyList(), metadata client.list( request, @@ -6468,6 +6555,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_preconfigured_expression_sets_rest_bad_request( @@ -6556,11 +6644,15 @@ def test_list_preconfigured_expression_sets_rest_interceptors(null_interceptor): transports.SecurityPoliciesRestInterceptor, "post_list_preconfigured_expression_sets", ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, + "post_list_preconfigured_expression_sets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_list_preconfigured_expression_sets", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.pb( compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest() ) @@ -6590,6 +6682,10 @@ def test_list_preconfigured_expression_sets_rest_interceptors(null_interceptor): post.return_value = ( compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() ) + post_with_metadata.return_value = ( + compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse(), + metadata, + ) client.list_preconfigured_expression_sets( request, @@ -6601,6 +6697,7 @@ def test_list_preconfigured_expression_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchSecurityPolicyRequest): @@ -6958,10 +7055,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchSecurityPolicyRequest.pb( compute.PatchSecurityPolicyRequest() ) @@ -6985,6 +7085,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6996,6 +7097,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rule_rest_bad_request( @@ -7285,10 +7387,13 @@ def test_patch_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_patch_rule" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_patch_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_patch_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchRuleSecurityPolicyRequest.pb( compute.PatchRuleSecurityPolicyRequest() ) @@ -7312,6 +7417,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch_rule( request, @@ -7323,6 +7429,7 @@ def test_patch_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_rule_rest_bad_request( @@ -7449,10 +7556,13 @@ def test_remove_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_remove_rule" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_remove_rule_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_remove_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveRuleSecurityPolicyRequest.pb( compute.RemoveRuleSecurityPolicyRequest() ) @@ -7476,6 +7586,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_rule( request, @@ -7487,6 +7598,7 @@ def test_remove_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -7692,10 +7804,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.SecurityPoliciesRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SecurityPoliciesRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsSecurityPolicyRequest.pb( compute.SetLabelsSecurityPolicyRequest() ) @@ -7719,6 +7834,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -7730,6 +7846,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py index 66580f6a94bf..072199045286 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -332,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ServiceAttachmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ServiceAttachmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3779,10 +3829,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListServiceAttachmentsRequest.pb( compute.AggregatedListServiceAttachmentsRequest() ) @@ -3808,6 +3862,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ServiceAttachmentAggregatedList() + post_with_metadata.return_value = ( + compute.ServiceAttachmentAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3819,6 +3877,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteServiceAttachmentRequest): @@ -3951,10 +4010,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteServiceAttachmentRequest.pb( compute.DeleteServiceAttachmentRequest() ) @@ -3978,6 +4040,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3989,6 +4052,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetServiceAttachmentRequest): @@ -4111,10 +4175,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetServiceAttachmentRequest.pb( compute.GetServiceAttachmentRequest() ) @@ -4138,6 +4205,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ServiceAttachment() + post_with_metadata.return_value = compute.ServiceAttachment(), metadata client.get( request, @@ -4149,6 +4217,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -4237,10 +4306,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyServiceAttachmentRequest.pb( compute.GetIamPolicyServiceAttachmentRequest() ) @@ -4264,6 +4337,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -4275,6 +4349,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertServiceAttachmentRequest): @@ -4511,10 +4586,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertServiceAttachmentRequest.pb( compute.InsertServiceAttachmentRequest() ) @@ -4538,6 +4616,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4549,6 +4628,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListServiceAttachmentsRequest): @@ -4637,10 +4717,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListServiceAttachmentsRequest.pb( compute.ListServiceAttachmentsRequest() ) @@ -4666,6 +4749,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ServiceAttachmentList() + post_with_metadata.return_value = compute.ServiceAttachmentList(), metadata client.list( request, @@ -4677,6 +4761,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchServiceAttachmentRequest): @@ -4921,10 +5006,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchServiceAttachmentRequest.pb( compute.PatchServiceAttachmentRequest() ) @@ -4948,6 +5036,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4959,6 +5048,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -5163,10 +5253,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyServiceAttachmentRequest.pb( compute.SetIamPolicyServiceAttachmentRequest() ) @@ -5190,6 +5284,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -5201,6 +5296,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5363,10 +5459,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ServiceAttachmentsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServiceAttachmentsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsServiceAttachmentRequest.pb( compute.TestIamPermissionsServiceAttachmentRequest() ) @@ -5392,6 +5492,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5403,6 +5504,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py index 482dfcb21e99..dd68a7f69c63 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py @@ -65,6 +65,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -335,6 +342,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SnapshotSettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SnapshotSettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1722,10 +1772,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotSettingsServiceRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotSettingsServiceRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSnapshotSettingRequest.pb( compute.GetSnapshotSettingRequest() ) @@ -1749,6 +1802,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SnapshotSettings() + post_with_metadata.return_value = compute.SnapshotSettings(), metadata client.get( request, @@ -1760,6 +1814,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchSnapshotSettingRequest): @@ -1960,10 +2015,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotSettingsServiceRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.SnapshotSettingsServiceRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotSettingsServiceRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchSnapshotSettingRequest.pb( compute.PatchSnapshotSettingRequest() ) @@ -1987,6 +2045,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -1998,6 +2057,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py index 048d45a54940..4fd544b6e5c0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -294,6 +301,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3305,10 +3355,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSnapshotRequest.pb(compute.DeleteSnapshotRequest()) transcode.return_value = { "method": "post", @@ -3330,6 +3383,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3341,6 +3395,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetSnapshotRequest): @@ -3490,10 +3545,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSnapshotRequest.pb(compute.GetSnapshotRequest()) transcode.return_value = { "method": "post", @@ -3515,6 +3573,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Snapshot() + post_with_metadata.return_value = compute.Snapshot(), metadata client.get( request, @@ -3526,6 +3585,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3612,10 +3672,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicySnapshotRequest.pb( compute.GetIamPolicySnapshotRequest() ) @@ -3639,6 +3702,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -3650,6 +3714,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertSnapshotRequest): @@ -3883,10 +3948,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertSnapshotRequest.pb(compute.InsertSnapshotRequest()) transcode.return_value = { "method": "post", @@ -3908,6 +3976,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3919,6 +3988,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListSnapshotsRequest): @@ -4005,10 +4075,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListSnapshotsRequest.pb(compute.ListSnapshotsRequest()) transcode.return_value = { "method": "post", @@ -4030,6 +4103,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SnapshotList() + post_with_metadata.return_value = compute.SnapshotList(), metadata client.list( request, @@ -4041,6 +4115,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -4243,10 +4318,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicySnapshotRequest.pb( compute.SetIamPolicySnapshotRequest() ) @@ -4270,6 +4348,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -4281,6 +4360,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsSnapshotRequest): @@ -4482,10 +4562,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsSnapshotRequest.pb( compute.SetLabelsSnapshotRequest() ) @@ -4509,6 +4592,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4520,6 +4604,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4680,10 +4765,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.SnapshotsRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsSnapshotRequest.pb( compute.TestIamPermissionsSnapshotRequest() ) @@ -4709,6 +4797,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4720,6 +4809,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py index 2f034ad59580..0b007c57f41a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SslCertificatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SslCertificatesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2604,10 +2654,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslCertificatesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.SslCertificatesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslCertificatesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListSslCertificatesRequest.pb( compute.AggregatedListSslCertificatesRequest() ) @@ -2633,6 +2686,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslCertificateAggregatedList() + post_with_metadata.return_value = ( + compute.SslCertificateAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -2644,6 +2701,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteSslCertificateRequest): @@ -2768,10 +2826,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslCertificatesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.SslCertificatesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslCertificatesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSslCertificateRequest.pb( compute.DeleteSslCertificateRequest() ) @@ -2795,6 +2856,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2806,6 +2868,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetSslCertificateRequest): @@ -2910,10 +2973,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslCertificatesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.SslCertificatesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslCertificatesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSslCertificateRequest.pb( compute.GetSslCertificateRequest() ) @@ -2937,6 +3003,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslCertificate() + post_with_metadata.return_value = compute.SslCertificate(), metadata client.get( request, @@ -2948,6 +3015,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertSslCertificateRequest): @@ -3169,10 +3237,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslCertificatesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.SslCertificatesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslCertificatesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertSslCertificateRequest.pb( compute.InsertSslCertificateRequest() ) @@ -3196,6 +3267,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3207,6 +3279,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListSslCertificatesRequest): @@ -3295,10 +3368,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslCertificatesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.SslCertificatesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslCertificatesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListSslCertificatesRequest.pb( compute.ListSslCertificatesRequest() ) @@ -3322,6 +3398,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslCertificateList() + post_with_metadata.return_value = compute.SslCertificateList(), metadata client.list( request, @@ -3333,6 +3410,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py index ce5a3c5f3cb3..18bc5412153e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SslPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SslPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3179,10 +3229,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListSslPoliciesRequest.pb( compute.AggregatedListSslPoliciesRequest() ) @@ -3208,6 +3261,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPoliciesAggregatedList() + post_with_metadata.return_value = compute.SslPoliciesAggregatedList(), metadata client.aggregated_list( request, @@ -3219,6 +3273,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteSslPolicyRequest): @@ -3343,10 +3398,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSslPolicyRequest.pb(compute.DeleteSslPolicyRequest()) transcode.return_value = { "method": "post", @@ -3368,6 +3426,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3379,6 +3438,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetSslPolicyRequest): @@ -3483,10 +3543,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSslPolicyRequest.pb(compute.GetSslPolicyRequest()) transcode.return_value = { "method": "post", @@ -3508,6 +3571,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPolicy() + post_with_metadata.return_value = compute.SslPolicy(), metadata client.get( request, @@ -3519,6 +3583,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertSslPolicyRequest): @@ -3731,10 +3796,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertSslPolicyRequest.pb(compute.InsertSslPolicyRequest()) transcode.return_value = { "method": "post", @@ -3756,6 +3824,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3767,6 +3836,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListSslPoliciesRequest): @@ -3855,10 +3925,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListSslPoliciesRequest.pb(compute.ListSslPoliciesRequest()) transcode.return_value = { "method": "post", @@ -3880,6 +3953,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPoliciesList() + post_with_metadata.return_value = compute.SslPoliciesList(), metadata client.list( request, @@ -3891,6 +3965,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_available_features_rest_bad_request( @@ -3975,10 +4050,14 @@ def test_list_available_features_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_list_available_features" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, + "post_list_available_features_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_list_available_features" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListAvailableFeaturesSslPoliciesRequest.pb( compute.ListAvailableFeaturesSslPoliciesRequest() ) @@ -4004,6 +4083,10 @@ def test_list_available_features_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SslPoliciesListAvailableFeaturesResponse() + post_with_metadata.return_value = ( + compute.SslPoliciesListAvailableFeaturesResponse(), + metadata, + ) client.list_available_features( request, @@ -4015,6 +4098,7 @@ def test_list_available_features_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchSslPolicyRequest): @@ -4227,10 +4311,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SslPoliciesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.SslPoliciesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SslPoliciesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchSslPolicyRequest.pb(compute.PatchSslPolicyRequest()) transcode.return_value = { "method": "post", @@ -4252,6 +4339,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4263,6 +4351,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py index 3a3f952d398f..ca026ceba44d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = StoragePoolTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = StoragePoolTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1833,10 +1883,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolTypesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolTypesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListStoragePoolTypesRequest.pb( compute.AggregatedListStoragePoolTypesRequest() ) @@ -1862,6 +1915,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePoolTypeAggregatedList() + post_with_metadata.return_value = ( + compute.StoragePoolTypeAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -1873,6 +1930,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetStoragePoolTypeRequest): @@ -1993,10 +2051,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolTypesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolTypesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetStoragePoolTypeRequest.pb( compute.GetStoragePoolTypeRequest() ) @@ -2020,6 +2081,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePoolType() + post_with_metadata.return_value = compute.StoragePoolType(), metadata client.get( request, @@ -2031,6 +2093,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListStoragePoolTypesRequest): @@ -2119,10 +2182,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolTypesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolTypesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListStoragePoolTypesRequest.pb( compute.ListStoragePoolTypesRequest() ) @@ -2148,6 +2214,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePoolTypeList() + post_with_metadata.return_value = compute.StoragePoolTypeList(), metadata client.list( request, @@ -2159,6 +2226,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py index 90c5d62f6400..1dc0425ef083 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = StoragePoolsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = StoragePoolsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4000,10 +4050,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListStoragePoolsRequest.pb( compute.AggregatedListStoragePoolsRequest() ) @@ -4029,6 +4082,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePoolAggregatedList() + post_with_metadata.return_value = compute.StoragePoolAggregatedList(), metadata client.aggregated_list( request, @@ -4040,6 +4094,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteStoragePoolRequest): @@ -4164,10 +4219,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteStoragePoolRequest.pb( compute.DeleteStoragePoolRequest() ) @@ -4191,6 +4249,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4202,6 +4261,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetStoragePoolRequest): @@ -4316,10 +4376,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetStoragePoolRequest.pb(compute.GetStoragePoolRequest()) transcode.return_value = { "method": "post", @@ -4341,6 +4404,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePool() + post_with_metadata.return_value = compute.StoragePool(), metadata client.get( request, @@ -4352,6 +4416,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -4440,10 +4505,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicyStoragePoolRequest.pb( compute.GetIamPolicyStoragePoolRequest() ) @@ -4467,6 +4535,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -4478,6 +4547,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertStoragePoolRequest): @@ -4703,10 +4773,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertStoragePoolRequest.pb( compute.InsertStoragePoolRequest() ) @@ -4730,6 +4803,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4741,6 +4815,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListStoragePoolsRequest): @@ -4833,10 +4908,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListStoragePoolsRequest.pb( compute.ListStoragePoolsRequest() ) @@ -4860,6 +4938,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePoolList() + post_with_metadata.return_value = compute.StoragePoolList(), metadata client.list( request, @@ -4871,6 +4950,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_disks_rest_bad_request(request_type=compute.ListDisksStoragePoolsRequest): @@ -4963,10 +5043,13 @@ def test_list_disks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_list_disks" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_list_disks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_list_disks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListDisksStoragePoolsRequest.pb( compute.ListDisksStoragePoolsRequest() ) @@ -4992,6 +5075,7 @@ def test_list_disks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.StoragePoolListDisks() + post_with_metadata.return_value = compute.StoragePoolListDisks(), metadata client.list_disks( request, @@ -5003,6 +5087,7 @@ def test_list_disks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -5207,10 +5292,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicyStoragePoolRequest.pb( compute.SetIamPolicyStoragePoolRequest() ) @@ -5234,6 +5322,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -5245,6 +5334,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -5407,10 +5497,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsStoragePoolRequest.pb( compute.TestIamPermissionsStoragePoolRequest() ) @@ -5436,6 +5530,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -5447,6 +5542,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateStoragePoolRequest): @@ -5672,10 +5768,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.StoragePoolsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.StoragePoolsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateStoragePoolRequest.pb( compute.UpdateStoragePoolRequest() ) @@ -5699,6 +5798,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -5710,6 +5810,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py index ea238c7f0ab8..d834e0d55e97 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SubnetworksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SubnetworksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4856,10 +4906,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListSubnetworksRequest.pb( compute.AggregatedListSubnetworksRequest() ) @@ -4885,6 +4938,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SubnetworkAggregatedList() + post_with_metadata.return_value = compute.SubnetworkAggregatedList(), metadata client.aggregated_list( request, @@ -4896,6 +4950,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteSubnetworkRequest): @@ -5020,10 +5075,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteSubnetworkRequest.pb( compute.DeleteSubnetworkRequest() ) @@ -5047,6 +5105,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -5058,6 +5117,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_expand_ip_cidr_range_rest_bad_request( @@ -5269,10 +5329,13 @@ def test_expand_ip_cidr_range_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_expand_ip_cidr_range" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_expand_ip_cidr_range_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_expand_ip_cidr_range" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ExpandIpCidrRangeSubnetworkRequest.pb( compute.ExpandIpCidrRangeSubnetworkRequest() ) @@ -5296,6 +5359,7 @@ def test_expand_ip_cidr_range_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.expand_ip_cidr_range( request, @@ -5307,6 +5371,7 @@ def test_expand_ip_cidr_range_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetSubnetworkRequest): @@ -5433,10 +5498,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetSubnetworkRequest.pb(compute.GetSubnetworkRequest()) transcode.return_value = { "method": "post", @@ -5458,6 +5526,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Subnetwork() + post_with_metadata.return_value = compute.Subnetwork(), metadata client.get( request, @@ -5469,6 +5538,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -5557,10 +5627,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetIamPolicySubnetworkRequest.pb( compute.GetIamPolicySubnetworkRequest() ) @@ -5584,6 +5657,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.get_iam_policy( request, @@ -5595,6 +5669,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertSubnetworkRequest): @@ -5826,10 +5901,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertSubnetworkRequest.pb( compute.InsertSubnetworkRequest() ) @@ -5853,6 +5931,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5864,6 +5943,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListSubnetworksRequest): @@ -5952,10 +6032,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListSubnetworksRequest.pb(compute.ListSubnetworksRequest()) transcode.return_value = { "method": "post", @@ -5977,6 +6060,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.SubnetworkList() + post_with_metadata.return_value = compute.SubnetworkList(), metadata client.list( request, @@ -5988,6 +6072,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_usable_rest_bad_request( @@ -6078,10 +6163,13 @@ def test_list_usable_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_list_usable" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_list_usable_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_list_usable" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListUsableSubnetworksRequest.pb( compute.ListUsableSubnetworksRequest() ) @@ -6107,6 +6195,10 @@ def test_list_usable_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UsableSubnetworksAggregatedList() + post_with_metadata.return_value = ( + compute.UsableSubnetworksAggregatedList(), + metadata, + ) client.list_usable( request, @@ -6118,6 +6210,7 @@ def test_list_usable_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchSubnetworkRequest): @@ -6349,10 +6442,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchSubnetworkRequest.pb(compute.PatchSubnetworkRequest()) transcode.return_value = { "method": "post", @@ -6374,6 +6470,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6385,6 +6482,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -6589,10 +6687,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetIamPolicySubnetworkRequest.pb( compute.SetIamPolicySubnetworkRequest() ) @@ -6616,6 +6717,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata client.set_iam_policy( request, @@ -6627,6 +6729,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_private_ip_google_access_rest_bad_request( @@ -6838,10 +6941,14 @@ def test_set_private_ip_google_access_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_set_private_ip_google_access" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, + "post_set_private_ip_google_access_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_set_private_ip_google_access" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetPrivateIpGoogleAccessSubnetworkRequest.pb( compute.SetPrivateIpGoogleAccessSubnetworkRequest() ) @@ -6865,6 +6972,7 @@ def test_set_private_ip_google_access_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_private_ip_google_access( request, @@ -6876,6 +6984,7 @@ def test_set_private_ip_google_access_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -7038,10 +7147,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubnetworksRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.SubnetworksRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubnetworksRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsSubnetworkRequest.pb( compute.TestIamPermissionsSubnetworkRequest() ) @@ -7067,6 +7179,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -7078,6 +7191,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py index a4055203d20c..306946574cc0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetGrpcProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetGrpcProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2782,10 +2832,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetGrpcProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetGrpcProxyRequest.pb( compute.DeleteTargetGrpcProxyRequest() ) @@ -2809,6 +2862,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -2820,6 +2874,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetGrpcProxyRequest): @@ -2920,10 +2975,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetGrpcProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetGrpcProxyRequest.pb( compute.GetTargetGrpcProxyRequest() ) @@ -2947,6 +3005,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetGrpcProxy() + post_with_metadata.return_value = compute.TargetGrpcProxy(), metadata client.get( request, @@ -2958,6 +3017,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetGrpcProxyRequest): @@ -3167,10 +3227,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetGrpcProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetGrpcProxyRequest.pb( compute.InsertTargetGrpcProxyRequest() ) @@ -3194,6 +3257,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3205,6 +3269,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetGrpcProxiesRequest): @@ -3293,10 +3358,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetGrpcProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetGrpcProxiesRequest.pb( compute.ListTargetGrpcProxiesRequest() ) @@ -3322,6 +3390,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetGrpcProxyList() + post_with_metadata.return_value = compute.TargetGrpcProxyList(), metadata client.list( request, @@ -3333,6 +3402,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchTargetGrpcProxyRequest): @@ -3542,10 +3612,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.TargetGrpcProxiesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetGrpcProxiesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchTargetGrpcProxyRequest.pb( compute.PatchTargetGrpcProxyRequest() ) @@ -3569,6 +3642,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -3580,6 +3654,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py index d00ba47d47dc..f60be38f466e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetHttpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetHttpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3432,10 +3482,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListTargetHttpProxiesRequest.pb( compute.AggregatedListTargetHttpProxiesRequest() ) @@ -3461,6 +3515,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpProxyAggregatedList() + post_with_metadata.return_value = ( + compute.TargetHttpProxyAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3472,6 +3530,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteTargetHttpProxyRequest): @@ -3596,10 +3655,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetHttpProxyRequest.pb( compute.DeleteTargetHttpProxyRequest() ) @@ -3623,6 +3685,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3634,6 +3697,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetHttpProxyRequest): @@ -3736,10 +3800,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetHttpProxyRequest.pb( compute.GetTargetHttpProxyRequest() ) @@ -3763,6 +3830,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpProxy() + post_with_metadata.return_value = compute.TargetHttpProxy(), metadata client.get( request, @@ -3774,6 +3842,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetHttpProxyRequest): @@ -3984,10 +4053,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetHttpProxyRequest.pb( compute.InsertTargetHttpProxyRequest() ) @@ -4011,6 +4083,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4022,6 +4095,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetHttpProxiesRequest): @@ -4110,10 +4184,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetHttpProxiesRequest.pb( compute.ListTargetHttpProxiesRequest() ) @@ -4139,6 +4216,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpProxyList() + post_with_metadata.return_value = compute.TargetHttpProxyList(), metadata client.list( request, @@ -4150,6 +4228,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchTargetHttpProxyRequest): @@ -4360,10 +4439,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchTargetHttpProxyRequest.pb( compute.PatchTargetHttpProxyRequest() ) @@ -4387,6 +4469,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -4398,6 +4481,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_url_map_rest_bad_request( @@ -4598,10 +4682,13 @@ def test_set_url_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "post_set_url_map" ) as post, mock.patch.object( + transports.TargetHttpProxiesRestInterceptor, "post_set_url_map_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpProxiesRestInterceptor, "pre_set_url_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetUrlMapTargetHttpProxyRequest.pb( compute.SetUrlMapTargetHttpProxyRequest() ) @@ -4625,6 +4712,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_url_map( request, @@ -4636,6 +4724,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py index 3337c4d9e2ec..bed1810f5b50 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -332,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetHttpsProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetHttpsProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5108,10 +5158,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListTargetHttpsProxiesRequest.pb( compute.AggregatedListTargetHttpsProxiesRequest() ) @@ -5137,6 +5191,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpsProxyAggregatedList() + post_with_metadata.return_value = ( + compute.TargetHttpsProxyAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -5148,6 +5206,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteTargetHttpsProxyRequest): @@ -5272,10 +5331,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetHttpsProxyRequest.pb( compute.DeleteTargetHttpsProxyRequest() ) @@ -5299,6 +5361,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -5310,6 +5373,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetHttpsProxyRequest): @@ -5426,10 +5490,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetHttpsProxyRequest.pb( compute.GetTargetHttpsProxyRequest() ) @@ -5453,6 +5520,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpsProxy() + post_with_metadata.return_value = compute.TargetHttpsProxy(), metadata client.get( request, @@ -5464,6 +5532,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetHttpsProxyRequest): @@ -5681,10 +5750,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetHttpsProxyRequest.pb( compute.InsertTargetHttpsProxyRequest() ) @@ -5708,6 +5780,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -5719,6 +5792,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetHttpsProxiesRequest): @@ -5807,10 +5881,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetHttpsProxiesRequest.pb( compute.ListTargetHttpsProxiesRequest() ) @@ -5836,6 +5913,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetHttpsProxyList() + post_with_metadata.return_value = compute.TargetHttpsProxyList(), metadata client.list( request, @@ -5847,6 +5925,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchTargetHttpsProxyRequest): @@ -6064,10 +6143,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchTargetHttpsProxyRequest.pb( compute.PatchTargetHttpsProxyRequest() ) @@ -6091,6 +6173,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -6102,6 +6185,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_certificate_map_rest_bad_request( @@ -6313,10 +6397,14 @@ def test_set_certificate_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_set_certificate_map" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, + "post_set_certificate_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_set_certificate_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetCertificateMapTargetHttpsProxyRequest.pb( compute.SetCertificateMapTargetHttpsProxyRequest() ) @@ -6340,6 +6428,7 @@ def test_set_certificate_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_certificate_map( request, @@ -6351,6 +6440,7 @@ def test_set_certificate_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_quic_override_rest_bad_request( @@ -6562,10 +6652,14 @@ def test_set_quic_override_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_set_quic_override" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, + "post_set_quic_override_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_set_quic_override" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetQuicOverrideTargetHttpsProxyRequest.pb( compute.SetQuicOverrideTargetHttpsProxyRequest() ) @@ -6589,6 +6683,7 @@ def test_set_quic_override_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_quic_override( request, @@ -6600,6 +6695,7 @@ def test_set_quic_override_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_ssl_certificates_rest_bad_request( @@ -6811,10 +6907,14 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_set_ssl_certificates" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, + "post_set_ssl_certificates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_set_ssl_certificates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSslCertificatesTargetHttpsProxyRequest.pb( compute.SetSslCertificatesTargetHttpsProxyRequest() ) @@ -6838,6 +6938,7 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_ssl_certificates( request, @@ -6849,6 +6950,7 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_ssl_policy_rest_bad_request( @@ -7051,10 +7153,14 @@ def test_set_ssl_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_set_ssl_policy" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, + "post_set_ssl_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_set_ssl_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSslPolicyTargetHttpsProxyRequest.pb( compute.SetSslPolicyTargetHttpsProxyRequest() ) @@ -7078,6 +7184,7 @@ def test_set_ssl_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_ssl_policy( request, @@ -7089,6 +7196,7 @@ def test_set_ssl_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_url_map_rest_bad_request( @@ -7289,10 +7397,13 @@ def test_set_url_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "post_set_url_map" ) as post, mock.patch.object( + transports.TargetHttpsProxiesRestInterceptor, "post_set_url_map_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetHttpsProxiesRestInterceptor, "pre_set_url_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetUrlMapTargetHttpsProxyRequest.pb( compute.SetUrlMapTargetHttpsProxyRequest() ) @@ -7316,6 +7427,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_url_map( request, @@ -7327,6 +7439,7 @@ def test_set_url_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py index d956e7f89b44..f16db0bbdf81 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetInstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetInstancesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3104,10 +3154,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetInstancesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetInstancesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListTargetInstancesRequest.pb( compute.AggregatedListTargetInstancesRequest() ) @@ -3133,6 +3186,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetInstanceAggregatedList() + post_with_metadata.return_value = ( + compute.TargetInstanceAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3144,6 +3201,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteTargetInstanceRequest): @@ -3276,10 +3334,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetInstancesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetInstancesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetInstanceRequest.pb( compute.DeleteTargetInstanceRequest() ) @@ -3303,6 +3364,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3314,6 +3376,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetInstanceRequest): @@ -3424,10 +3487,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetInstancesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetInstancesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetInstanceRequest.pb( compute.GetTargetInstanceRequest() ) @@ -3451,6 +3517,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetInstance() + post_with_metadata.return_value = compute.TargetInstance(), metadata client.get( request, @@ -3462,6 +3529,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetInstanceRequest): @@ -3670,10 +3738,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetInstancesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetInstancesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetInstanceRequest.pb( compute.InsertTargetInstanceRequest() ) @@ -3697,6 +3768,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3708,6 +3780,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetInstancesRequest): @@ -3796,10 +3869,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetInstancesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetInstancesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetInstancesRequest.pb( compute.ListTargetInstancesRequest() ) @@ -3823,6 +3899,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetInstanceList() + post_with_metadata.return_value = compute.TargetInstanceList(), metadata client.list( request, @@ -3834,6 +3911,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_security_policy_rest_bad_request( @@ -4046,10 +4124,14 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetInstancesRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( + transports.TargetInstancesRestInterceptor, + "post_set_security_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetInstancesRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSecurityPolicyTargetInstanceRequest.pb( compute.SetSecurityPolicyTargetInstanceRequest() ) @@ -4073,6 +4155,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_security_policy( request, @@ -4084,6 +4167,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py index e23ed1874e46..3250848a01a0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetPoolsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetPoolsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5599,10 +5649,13 @@ def test_add_health_check_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_add_health_check" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_add_health_check_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_add_health_check" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddHealthCheckTargetPoolRequest.pb( compute.AddHealthCheckTargetPoolRequest() ) @@ -5626,6 +5679,7 @@ def test_add_health_check_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_health_check( request, @@ -5637,6 +5691,7 @@ def test_add_health_check_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_add_instance_rest_bad_request( @@ -5848,10 +5903,13 @@ def test_add_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_add_instance" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_add_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_add_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AddInstanceTargetPoolRequest.pb( compute.AddInstanceTargetPoolRequest() ) @@ -5875,6 +5933,7 @@ def test_add_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.add_instance( request, @@ -5886,6 +5945,7 @@ def test_add_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_rest_bad_request( @@ -5978,10 +6038,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListTargetPoolsRequest.pb( compute.AggregatedListTargetPoolsRequest() ) @@ -6007,6 +6070,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetPoolAggregatedList() + post_with_metadata.return_value = compute.TargetPoolAggregatedList(), metadata client.aggregated_list( request, @@ -6018,6 +6082,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteTargetPoolRequest): @@ -6142,10 +6207,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetPoolRequest.pb( compute.DeleteTargetPoolRequest() ) @@ -6169,6 +6237,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -6180,6 +6249,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetPoolRequest): @@ -6286,10 +6356,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetPoolRequest.pb(compute.GetTargetPoolRequest()) transcode.return_value = { "method": "post", @@ -6311,6 +6384,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetPool() + post_with_metadata.return_value = compute.TargetPool(), metadata client.get( request, @@ -6322,6 +6396,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_health_rest_bad_request(request_type=compute.GetHealthTargetPoolRequest): @@ -6478,10 +6553,13 @@ def test_get_health_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_get_health" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_get_health_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_get_health" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetHealthTargetPoolRequest.pb( compute.GetHealthTargetPoolRequest() ) @@ -6507,6 +6585,7 @@ def test_get_health_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetPoolInstanceHealth() + post_with_metadata.return_value = compute.TargetPoolInstanceHealth(), metadata client.get_health( request, @@ -6518,6 +6597,7 @@ def test_get_health_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetPoolRequest): @@ -6726,10 +6806,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetPoolRequest.pb( compute.InsertTargetPoolRequest() ) @@ -6753,6 +6836,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -6764,6 +6848,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetPoolsRequest): @@ -6852,10 +6937,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetPoolsRequest.pb(compute.ListTargetPoolsRequest()) transcode.return_value = { "method": "post", @@ -6877,6 +6965,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetPoolList() + post_with_metadata.return_value = compute.TargetPoolList(), metadata client.list( request, @@ -6888,6 +6977,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_health_check_rest_bad_request( @@ -7099,10 +7189,13 @@ def test_remove_health_check_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_remove_health_check" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_remove_health_check_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_remove_health_check" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveHealthCheckTargetPoolRequest.pb( compute.RemoveHealthCheckTargetPoolRequest() ) @@ -7126,6 +7219,7 @@ def test_remove_health_check_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_health_check( request, @@ -7137,6 +7231,7 @@ def test_remove_health_check_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_instance_rest_bad_request( @@ -7348,10 +7443,13 @@ def test_remove_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_remove_instance" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_remove_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_remove_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.RemoveInstanceTargetPoolRequest.pb( compute.RemoveInstanceTargetPoolRequest() ) @@ -7375,6 +7473,7 @@ def test_remove_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.remove_instance( request, @@ -7386,6 +7485,7 @@ def test_remove_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_backup_rest_bad_request(request_type=compute.SetBackupTargetPoolRequest): @@ -7584,10 +7684,13 @@ def test_set_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_set_backup" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_set_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_set_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetBackupTargetPoolRequest.pb( compute.SetBackupTargetPoolRequest() ) @@ -7611,6 +7714,7 @@ def test_set_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_backup( request, @@ -7622,6 +7726,7 @@ def test_set_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_security_policy_rest_bad_request( @@ -7826,10 +7931,13 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetPoolsRestInterceptor, "post_set_security_policy" ) as post, mock.patch.object( + transports.TargetPoolsRestInterceptor, "post_set_security_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetPoolsRestInterceptor, "pre_set_security_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSecurityPolicyTargetPoolRequest.pb( compute.SetSecurityPolicyTargetPoolRequest() ) @@ -7853,6 +7961,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_security_policy( request, @@ -7864,6 +7973,7 @@ def test_set_security_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py index 8c5e342be302..4b8c79970eff 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetSslProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetSslProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4452,10 +4502,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetSslProxyRequest.pb( compute.DeleteTargetSslProxyRequest() ) @@ -4479,6 +4532,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4490,6 +4544,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetSslProxyRequest): @@ -4592,10 +4647,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetSslProxyRequest.pb( compute.GetTargetSslProxyRequest() ) @@ -4619,6 +4677,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetSslProxy() + post_with_metadata.return_value = compute.TargetSslProxy(), metadata client.get( request, @@ -4630,6 +4689,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetSslProxyRequest): @@ -4840,10 +4900,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetSslProxyRequest.pb( compute.InsertTargetSslProxyRequest() ) @@ -4867,6 +4930,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4878,6 +4942,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetSslProxiesRequest): @@ -4966,10 +5031,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetSslProxiesRequest.pb( compute.ListTargetSslProxiesRequest() ) @@ -4993,6 +5061,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetSslProxyList() + post_with_metadata.return_value = compute.TargetSslProxyList(), metadata client.list( request, @@ -5004,6 +5073,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_backend_service_rest_bad_request( @@ -5215,10 +5285,14 @@ def test_set_backend_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_set_backend_service" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, + "post_set_backend_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_set_backend_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetBackendServiceTargetSslProxyRequest.pb( compute.SetBackendServiceTargetSslProxyRequest() ) @@ -5242,6 +5316,7 @@ def test_set_backend_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_backend_service( request, @@ -5253,6 +5328,7 @@ def test_set_backend_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_certificate_map_rest_bad_request( @@ -5464,10 +5540,14 @@ def test_set_certificate_map_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_set_certificate_map" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, + "post_set_certificate_map_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_set_certificate_map" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetCertificateMapTargetSslProxyRequest.pb( compute.SetCertificateMapTargetSslProxyRequest() ) @@ -5491,6 +5571,7 @@ def test_set_certificate_map_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_certificate_map( request, @@ -5502,6 +5583,7 @@ def test_set_certificate_map_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_proxy_header_rest_bad_request( @@ -5713,10 +5795,14 @@ def test_set_proxy_header_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_set_proxy_header" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, + "post_set_proxy_header_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_set_proxy_header" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetProxyHeaderTargetSslProxyRequest.pb( compute.SetProxyHeaderTargetSslProxyRequest() ) @@ -5740,6 +5826,7 @@ def test_set_proxy_header_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_proxy_header( request, @@ -5751,6 +5838,7 @@ def test_set_proxy_header_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_ssl_certificates_rest_bad_request( @@ -5962,10 +6050,14 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_set_ssl_certificates" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, + "post_set_ssl_certificates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_set_ssl_certificates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSslCertificatesTargetSslProxyRequest.pb( compute.SetSslCertificatesTargetSslProxyRequest() ) @@ -5989,6 +6081,7 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_ssl_certificates( request, @@ -6000,6 +6093,7 @@ def test_set_ssl_certificates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_ssl_policy_rest_bad_request( @@ -6202,10 +6296,13 @@ def test_set_ssl_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "post_set_ssl_policy" ) as post, mock.patch.object( + transports.TargetSslProxiesRestInterceptor, "post_set_ssl_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetSslProxiesRestInterceptor, "pre_set_ssl_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetSslPolicyTargetSslProxyRequest.pb( compute.SetSslPolicyTargetSslProxyRequest() ) @@ -6229,6 +6326,7 @@ def test_set_ssl_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_ssl_policy( request, @@ -6240,6 +6338,7 @@ def test_set_ssl_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py index 13bddeb6f0a0..3ba0e831d99a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetTcpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetTcpProxiesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3446,10 +3496,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListTargetTcpProxiesRequest.pb( compute.AggregatedListTargetTcpProxiesRequest() ) @@ -3475,6 +3528,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetTcpProxyAggregatedList() + post_with_metadata.return_value = ( + compute.TargetTcpProxyAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3486,6 +3543,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteTargetTcpProxyRequest): @@ -3610,10 +3668,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetTcpProxyRequest.pb( compute.DeleteTargetTcpProxyRequest() ) @@ -3637,6 +3698,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3648,6 +3710,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetTcpProxyRequest): @@ -3748,10 +3811,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetTcpProxyRequest.pb( compute.GetTargetTcpProxyRequest() ) @@ -3775,6 +3841,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetTcpProxy() + post_with_metadata.return_value = compute.TargetTcpProxy(), metadata client.get( request, @@ -3786,6 +3853,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetTcpProxyRequest): @@ -3995,10 +4063,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetTcpProxyRequest.pb( compute.InsertTargetTcpProxyRequest() ) @@ -4022,6 +4093,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4033,6 +4105,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetTcpProxiesRequest): @@ -4121,10 +4194,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetTcpProxiesRequest.pb( compute.ListTargetTcpProxiesRequest() ) @@ -4148,6 +4224,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetTcpProxyList() + post_with_metadata.return_value = compute.TargetTcpProxyList(), metadata client.list( request, @@ -4159,6 +4236,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_backend_service_rest_bad_request( @@ -4370,10 +4448,14 @@ def test_set_backend_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_set_backend_service" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, + "post_set_backend_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_set_backend_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetBackendServiceTargetTcpProxyRequest.pb( compute.SetBackendServiceTargetTcpProxyRequest() ) @@ -4397,6 +4479,7 @@ def test_set_backend_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_backend_service( request, @@ -4408,6 +4491,7 @@ def test_set_backend_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_proxy_header_rest_bad_request( @@ -4619,10 +4703,14 @@ def test_set_proxy_header_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "post_set_proxy_header" ) as post, mock.patch.object( + transports.TargetTcpProxiesRestInterceptor, + "post_set_proxy_header_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetTcpProxiesRestInterceptor, "pre_set_proxy_header" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetProxyHeaderTargetTcpProxyRequest.pb( compute.SetProxyHeaderTargetTcpProxyRequest() ) @@ -4646,6 +4734,7 @@ def test_set_proxy_header_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_proxy_header( request, @@ -4657,6 +4746,7 @@ def test_set_proxy_header_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py index 53fab639dc70..8355523e47ad 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TargetVpnGatewaysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TargetVpnGatewaysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3114,10 +3164,14 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.TargetVpnGatewaysRestInterceptor, + "post_aggregated_list_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListTargetVpnGatewaysRequest.pb( compute.AggregatedListTargetVpnGatewaysRequest() ) @@ -3143,6 +3197,10 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetVpnGatewayAggregatedList() + post_with_metadata.return_value = ( + compute.TargetVpnGatewayAggregatedList(), + metadata, + ) client.aggregated_list( request, @@ -3154,6 +3212,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteTargetVpnGatewayRequest): @@ -3286,10 +3345,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.TargetVpnGatewaysRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteTargetVpnGatewayRequest.pb( compute.DeleteTargetVpnGatewayRequest() ) @@ -3313,6 +3375,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3324,6 +3387,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetTargetVpnGatewayRequest): @@ -3436,10 +3500,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.TargetVpnGatewaysRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetTargetVpnGatewayRequest.pb( compute.GetTargetVpnGatewayRequest() ) @@ -3463,6 +3530,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetVpnGateway() + post_with_metadata.return_value = compute.TargetVpnGateway(), metadata client.get( request, @@ -3474,6 +3542,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertTargetVpnGatewayRequest): @@ -3686,10 +3755,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.TargetVpnGatewaysRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertTargetVpnGatewayRequest.pb( compute.InsertTargetVpnGatewayRequest() ) @@ -3713,6 +3785,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3724,6 +3797,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListTargetVpnGatewaysRequest): @@ -3812,10 +3886,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.TargetVpnGatewaysRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListTargetVpnGatewaysRequest.pb( compute.ListTargetVpnGatewaysRequest() ) @@ -3841,6 +3918,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TargetVpnGatewayList() + post_with_metadata.return_value = compute.TargetVpnGatewayList(), metadata client.list( request, @@ -3852,6 +3930,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request( @@ -4057,10 +4136,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.TargetVpnGatewaysRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TargetVpnGatewaysRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsTargetVpnGatewayRequest.pb( compute.SetLabelsTargetVpnGatewayRequest() ) @@ -4084,6 +4166,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4095,6 +4178,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py index 703a138ade2e..3bb41da0198a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py @@ -62,6 +62,13 @@ from google.cloud.compute_v1.services.url_maps import UrlMapsClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -280,6 +287,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = UrlMapsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = UrlMapsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3963,10 +4013,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListUrlMapsRequest.pb( compute.AggregatedListUrlMapsRequest() ) @@ -3992,6 +4045,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMapsAggregatedList() + post_with_metadata.return_value = compute.UrlMapsAggregatedList(), metadata client.aggregated_list( request, @@ -4003,6 +4057,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteUrlMapRequest): @@ -4125,10 +4180,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteUrlMapRequest.pb(compute.DeleteUrlMapRequest()) transcode.return_value = { "method": "post", @@ -4150,6 +4208,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -4161,6 +4220,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetUrlMapRequest): @@ -4257,10 +4317,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetUrlMapRequest.pb(compute.GetUrlMapRequest()) transcode.return_value = { "method": "post", @@ -4282,6 +4345,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMap() + post_with_metadata.return_value = compute.UrlMap(), metadata client.get( request, @@ -4293,6 +4357,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertUrlMapRequest): @@ -4669,10 +4734,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertUrlMapRequest.pb(compute.InsertUrlMapRequest()) transcode.return_value = { "method": "post", @@ -4694,6 +4762,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4705,6 +4774,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_invalidate_cache_rest_bad_request( @@ -4908,10 +4978,13 @@ def test_invalidate_cache_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_invalidate_cache" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_invalidate_cache_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_invalidate_cache" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InvalidateCacheUrlMapRequest.pb( compute.InvalidateCacheUrlMapRequest() ) @@ -4935,6 +5008,7 @@ def test_invalidate_cache_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.invalidate_cache( request, @@ -4946,6 +5020,7 @@ def test_invalidate_cache_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListUrlMapsRequest): @@ -5032,10 +5107,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListUrlMapsRequest.pb(compute.ListUrlMapsRequest()) transcode.return_value = { "method": "post", @@ -5057,6 +5135,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMapList() + post_with_metadata.return_value = compute.UrlMapList(), metadata client.list( request, @@ -5068,6 +5147,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_patch_rest_bad_request(request_type=compute.PatchUrlMapRequest): @@ -5444,10 +5524,13 @@ def test_patch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_patch" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_patch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.PatchUrlMapRequest.pb(compute.PatchUrlMapRequest()) transcode.return_value = { "method": "post", @@ -5469,6 +5552,7 @@ def test_patch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.patch( request, @@ -5480,6 +5564,7 @@ def test_patch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rest_bad_request(request_type=compute.UpdateUrlMapRequest): @@ -5856,10 +5941,13 @@ def test_update_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_update" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_update_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.UpdateUrlMapRequest.pb(compute.UpdateUrlMapRequest()) transcode.return_value = { "method": "post", @@ -5881,6 +5969,7 @@ def test_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.update( request, @@ -5892,6 +5981,7 @@ def test_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_rest_bad_request(request_type=compute.ValidateUrlMapRequest): @@ -6240,10 +6330,13 @@ def test_validate_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UrlMapsRestInterceptor, "post_validate" ) as post, mock.patch.object( + transports.UrlMapsRestInterceptor, "post_validate_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.UrlMapsRestInterceptor, "pre_validate" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ValidateUrlMapRequest.pb(compute.ValidateUrlMapRequest()) transcode.return_value = { "method": "post", @@ -6267,6 +6360,7 @@ def test_validate_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.UrlMapsValidateResponse() + post_with_metadata.return_value = compute.UrlMapsValidateResponse(), metadata client.validate( request, @@ -6278,6 +6372,7 @@ def test_validate_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py index de1c3a1b2cd7..9c509c24aa49 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VpnGatewaysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VpnGatewaysClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3477,10 +3527,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListVpnGatewaysRequest.pb( compute.AggregatedListVpnGatewaysRequest() ) @@ -3506,6 +3559,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnGatewayAggregatedList() + post_with_metadata.return_value = compute.VpnGatewayAggregatedList(), metadata client.aggregated_list( request, @@ -3517,6 +3571,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteVpnGatewayRequest): @@ -3641,10 +3696,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteVpnGatewayRequest.pb( compute.DeleteVpnGatewayRequest() ) @@ -3668,6 +3726,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3679,6 +3738,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetVpnGatewayRequest): @@ -3781,10 +3841,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetVpnGatewayRequest.pb(compute.GetVpnGatewayRequest()) transcode.return_value = { "method": "post", @@ -3806,6 +3869,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnGateway() + post_with_metadata.return_value = compute.VpnGateway(), metadata client.get( request, @@ -3817,6 +3881,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_status_rest_bad_request(request_type=compute.GetStatusVpnGatewayRequest): @@ -3896,10 +3961,13 @@ def test_get_status_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_get_status" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_get_status_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_get_status" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetStatusVpnGatewayRequest.pb( compute.GetStatusVpnGatewayRequest() ) @@ -3925,6 +3993,10 @@ def test_get_status_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnGatewaysGetStatusResponse() + post_with_metadata.return_value = ( + compute.VpnGatewaysGetStatusResponse(), + metadata, + ) client.get_status( request, @@ -3936,6 +4008,7 @@ def test_get_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertVpnGatewayRequest): @@ -4151,10 +4224,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertVpnGatewayRequest.pb( compute.InsertVpnGatewayRequest() ) @@ -4178,6 +4254,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -4189,6 +4266,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListVpnGatewaysRequest): @@ -4277,10 +4355,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListVpnGatewaysRequest.pb(compute.ListVpnGatewaysRequest()) transcode.return_value = { "method": "post", @@ -4302,6 +4383,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnGatewayList() + post_with_metadata.return_value = compute.VpnGatewayList(), metadata client.list( request, @@ -4313,6 +4395,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsVpnGatewayRequest): @@ -4516,10 +4599,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsVpnGatewayRequest.pb( compute.SetLabelsVpnGatewayRequest() ) @@ -4543,6 +4629,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4554,6 +4641,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -4716,10 +4804,13 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnGatewaysRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.VpnGatewaysRestInterceptor, "post_test_iam_permissions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnGatewaysRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.TestIamPermissionsVpnGatewayRequest.pb( compute.TestIamPermissionsVpnGatewayRequest() ) @@ -4745,6 +4836,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata client.test_iam_permissions( request, @@ -4756,6 +4848,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py index e0feaec0db83..213f95ccadc9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -66,6 +66,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -296,6 +303,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VpnTunnelsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VpnTunnelsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3048,10 +3098,13 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnTunnelsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( + transports.VpnTunnelsRestInterceptor, "post_aggregated_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnTunnelsRestInterceptor, "pre_aggregated_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.AggregatedListVpnTunnelsRequest.pb( compute.AggregatedListVpnTunnelsRequest() ) @@ -3077,6 +3130,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnTunnelAggregatedList() + post_with_metadata.return_value = compute.VpnTunnelAggregatedList(), metadata client.aggregated_list( request, @@ -3088,6 +3142,7 @@ def test_aggregated_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rest_bad_request(request_type=compute.DeleteVpnTunnelRequest): @@ -3212,10 +3267,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnTunnelsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.VpnTunnelsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnTunnelsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteVpnTunnelRequest.pb(compute.DeleteVpnTunnelRequest()) transcode.return_value = { "method": "post", @@ -3237,6 +3295,7 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.delete( request, @@ -3248,6 +3307,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetVpnTunnelRequest): @@ -3374,10 +3434,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnTunnelsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.VpnTunnelsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnTunnelsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetVpnTunnelRequest.pb(compute.GetVpnTunnelRequest()) transcode.return_value = { "method": "post", @@ -3399,6 +3462,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnTunnel() + post_with_metadata.return_value = compute.VpnTunnel(), metadata client.get( request, @@ -3410,6 +3474,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_insert_rest_bad_request(request_type=compute.InsertVpnTunnelRequest): @@ -3633,10 +3698,13 @@ def test_insert_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnTunnelsRestInterceptor, "post_insert" ) as post, mock.patch.object( + transports.VpnTunnelsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnTunnelsRestInterceptor, "pre_insert" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.InsertVpnTunnelRequest.pb(compute.InsertVpnTunnelRequest()) transcode.return_value = { "method": "post", @@ -3658,6 +3726,7 @@ def test_insert_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.insert( request, @@ -3669,6 +3738,7 @@ def test_insert_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListVpnTunnelsRequest): @@ -3757,10 +3827,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnTunnelsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.VpnTunnelsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnTunnelsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListVpnTunnelsRequest.pb(compute.ListVpnTunnelsRequest()) transcode.return_value = { "method": "post", @@ -3782,6 +3855,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.VpnTunnelList() + post_with_metadata.return_value = compute.VpnTunnelList(), metadata client.list( request, @@ -3793,6 +3867,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=compute.SetLabelsVpnTunnelRequest): @@ -3996,10 +4071,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VpnTunnelsRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.VpnTunnelsRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VpnTunnelsRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.SetLabelsVpnTunnelRequest.pb( compute.SetLabelsVpnTunnelRequest() ) @@ -4023,6 +4101,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.set_labels( request, @@ -4034,6 +4113,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py index 561f63b333ae..12e0bce394a0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py @@ -59,6 +59,13 @@ ) from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -312,6 +319,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ZoneOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ZoneOperationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1926,10 +1976,13 @@ def test_delete_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ZoneOperationsRestInterceptor, "post_delete" ) as post, mock.patch.object( + transports.ZoneOperationsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ZoneOperationsRestInterceptor, "pre_delete" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.DeleteZoneOperationRequest.pb( compute.DeleteZoneOperationRequest() ) @@ -1955,6 +2008,10 @@ def test_delete_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.DeleteZoneOperationResponse() + post_with_metadata.return_value = ( + compute.DeleteZoneOperationResponse(), + metadata, + ) client.delete( request, @@ -1966,6 +2023,7 @@ def test_delete_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rest_bad_request(request_type=compute.GetZoneOperationRequest): @@ -2090,10 +2148,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ZoneOperationsRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ZoneOperationsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ZoneOperationsRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetZoneOperationRequest.pb( compute.GetZoneOperationRequest() ) @@ -2117,6 +2178,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.get( request, @@ -2128,6 +2190,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListZoneOperationsRequest): @@ -2216,10 +2279,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ZoneOperationsRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ZoneOperationsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ZoneOperationsRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListZoneOperationsRequest.pb( compute.ListZoneOperationsRequest() ) @@ -2243,6 +2309,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.OperationList() + post_with_metadata.return_value = compute.OperationList(), metadata client.list( request, @@ -2254,6 +2321,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_wait_rest_bad_request(request_type=compute.WaitZoneOperationRequest): @@ -2378,10 +2446,13 @@ def test_wait_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ZoneOperationsRestInterceptor, "post_wait" ) as post, mock.patch.object( + transports.ZoneOperationsRestInterceptor, "post_wait_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ZoneOperationsRestInterceptor, "pre_wait" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.WaitZoneOperationRequest.pb( compute.WaitZoneOperationRequest() ) @@ -2405,6 +2476,7 @@ def test_wait_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata client.wait( request, @@ -2416,6 +2488,7 @@ def test_wait_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py index 73ce2d29a2dd..f6a69095c019 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py @@ -55,6 +55,13 @@ from google.cloud.compute_v1.services.zones import ZonesClient, pagers, transports from google.cloud.compute_v1.types import compute +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -268,6 +275,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ZonesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ZonesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1468,10 +1518,13 @@ def test_get_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ZonesRestInterceptor, "post_get" ) as post, mock.patch.object( + transports.ZonesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ZonesRestInterceptor, "pre_get" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.GetZoneRequest.pb(compute.GetZoneRequest()) transcode.return_value = { "method": "post", @@ -1493,6 +1546,7 @@ def test_get_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.Zone() + post_with_metadata.return_value = compute.Zone(), metadata client.get( request, @@ -1504,6 +1558,7 @@ def test_get_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rest_bad_request(request_type=compute.ListZonesRequest): @@ -1590,10 +1645,13 @@ def test_list_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ZonesRestInterceptor, "post_list" ) as post, mock.patch.object( + transports.ZonesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ZonesRestInterceptor, "pre_list" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = compute.ListZonesRequest.pb(compute.ListZonesRequest()) transcode.return_value = { "method": "post", @@ -1615,6 +1673,7 @@ def test_list_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = compute.ZoneList() + post_with_metadata.return_value = compute.ZoneList(), metadata client.list( request, @@ -1626,6 +1685,7 @@ def test_list_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-confidentialcomputing/CHANGELOG.md b/packages/google-cloud-confidentialcomputing/CHANGELOG.md index 34ad27476f9a..a94ca7909e50 100644 --- a/packages/google-cloud-confidentialcomputing/CHANGELOG.md +++ b/packages/google-cloud-confidentialcomputing/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.4.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.4.14...google-cloud-confidentialcomputing-v0.4.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [0.4.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-confidentialcomputing-v0.4.13...google-cloud-confidentialcomputing-v0.4.14) (2024-12-12) diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py index 3106ac663ac7..49a0d50535a0 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.14" # {x-release-please-version} +__version__ = "0.4.15" # {x-release-please-version} diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py index 387ad0647e12..996ca1880e99 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -489,6 +491,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -955,16 +984,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1010,16 +1043,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py index 184ccec17a70..981ba01438d4 100644 --- a/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py +++ b/packages/google-cloud-confidentialcomputing/google/cloud/confidentialcomputing_v1/services/confidential_computing/transports/rest.py @@ -106,12 +106,35 @@ def pre_create_challenge( def post_create_challenge(self, response: service.Challenge) -> service.Challenge: """Post-rpc interceptor for create_challenge - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_challenge_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConfidentialComputing server but before - it is returned to user code. + it is returned to user code. This `post_create_challenge` interceptor runs + before the `post_create_challenge_with_metadata` interceptor. """ return response + def post_create_challenge_with_metadata( + self, + response: service.Challenge, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Challenge, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_challenge + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConfidentialComputing server but before it is returned to user code. + + We recommend only using this `post_create_challenge_with_metadata` + interceptor in new development instead of the `post_create_challenge` interceptor. + When both interceptors are used, this `post_create_challenge_with_metadata` interceptor runs after the + `post_create_challenge` interceptor. The (possibly modified) response returned by + `post_create_challenge` will be passed to + `post_create_challenge_with_metadata`. + """ + return response, metadata + def pre_verify_attestation( self, request: service.VerifyAttestationRequest, @@ -131,12 +154,37 @@ def post_verify_attestation( ) -> service.VerifyAttestationResponse: """Post-rpc interceptor for verify_attestation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_verify_attestation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConfidentialComputing server but before - it is returned to user code. + it is returned to user code. This `post_verify_attestation` interceptor runs + before the `post_verify_attestation_with_metadata` interceptor. """ return response + def post_verify_attestation_with_metadata( + self, + response: service.VerifyAttestationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.VerifyAttestationResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for verify_attestation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConfidentialComputing server but before it is returned to user code. + + We recommend only using this `post_verify_attestation_with_metadata` + interceptor in new development instead of the `post_verify_attestation` interceptor. + When both interceptors are used, this `post_verify_attestation_with_metadata` interceptor runs after the + `post_verify_attestation` interceptor. The (possibly modified) response returned by + `post_verify_attestation` will be passed to + `post_verify_attestation_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -404,6 +452,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_challenge(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_challenge_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -561,6 +613,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_verify_attestation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_verify_attestation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json index b6e7be855de2..99fec3cd1167 100644 --- a/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json +++ b/packages/google-cloud-confidentialcomputing/samples/generated_samples/snippet_metadata_google.cloud.confidentialcomputing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-confidentialcomputing", - "version": "0.4.14" + "version": "0.4.15" }, "snippets": [ { diff --git a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py index 5b21b4407e06..3c51a1f0314b 100644 --- a/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py +++ b/packages/google-cloud-confidentialcomputing/tests/unit/gapic/confidentialcomputing_v1/test_confidential_computing.py @@ -62,6 +62,13 @@ ) from google.cloud.confidentialcomputing_v1.types import service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -337,6 +344,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConfidentialComputingClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConfidentialComputingClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2428,10 +2478,14 @@ def test_create_challenge_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfidentialComputingRestInterceptor, "post_create_challenge" ) as post, mock.patch.object( + transports.ConfidentialComputingRestInterceptor, + "post_create_challenge_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConfidentialComputingRestInterceptor, "pre_create_challenge" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateChallengeRequest.pb(service.CreateChallengeRequest()) transcode.return_value = { "method": "post", @@ -2453,6 +2507,7 @@ def test_create_challenge_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Challenge() + post_with_metadata.return_value = service.Challenge(), metadata client.create_challenge( request, @@ -2464,6 +2519,7 @@ def test_create_challenge_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_verify_attestation_rest_bad_request( @@ -2552,10 +2608,14 @@ def test_verify_attestation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfidentialComputingRestInterceptor, "post_verify_attestation" ) as post, mock.patch.object( + transports.ConfidentialComputingRestInterceptor, + "post_verify_attestation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConfidentialComputingRestInterceptor, "pre_verify_attestation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.VerifyAttestationRequest.pb( service.VerifyAttestationRequest() ) @@ -2581,6 +2641,7 @@ def test_verify_attestation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.VerifyAttestationResponse() + post_with_metadata.return_value = service.VerifyAttestationResponse(), metadata client.verify_attestation( request, @@ -2592,6 +2653,7 @@ def test_verify_attestation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-config/CHANGELOG.md b/packages/google-cloud-config/CHANGELOG.md index c75d29d8a8c3..a9a66f6c039c 100644 --- a/packages/google-cloud-config/CHANGELOG.md +++ b/packages/google-cloud-config/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.14...google-cloud-config-v0.1.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [0.1.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-config-v0.1.13...google-cloud-config-v0.1.14) (2024-12-12) diff --git a/packages/google-cloud-config/google/cloud/config/gapic_version.py b/packages/google-cloud-config/google/cloud/config/gapic_version.py index 7a4d810a47da..564cdfade642 100644 --- a/packages/google-cloud-config/google/cloud/config/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.14" # {x-release-please-version} +__version__ = "0.1.15" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py index 7a4d810a47da..564cdfade642 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.14" # {x-release-please-version} +__version__ = "0.1.15" # {x-release-please-version} diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index faa3c74eea47..5c73b07e4991 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -635,6 +637,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3514,16 +3543,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3569,16 +3602,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3801,16 +3838,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -3923,16 +3964,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -3983,16 +4028,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -4038,16 +4087,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4093,16 +4146,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py index ee4e755ffdde..83acdb1d3108 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py @@ -276,12 +276,35 @@ def post_create_deployment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_create_deployment` interceptor runs + before the `post_create_deployment_with_metadata` interceptor. """ return response + def post_create_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_create_deployment_with_metadata` + interceptor in new development instead of the `post_create_deployment` interceptor. + When both interceptors are used, this `post_create_deployment_with_metadata` interceptor runs after the + `post_create_deployment` interceptor. The (possibly modified) response returned by + `post_create_deployment` will be passed to + `post_create_deployment_with_metadata`. + """ + return response, metadata + def pre_create_preview( self, request: config.CreatePreviewRequest, @@ -299,12 +322,35 @@ def post_create_preview( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_preview - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_preview_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_create_preview` interceptor runs + before the `post_create_preview_with_metadata` interceptor. """ return response + def post_create_preview_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_preview + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_create_preview_with_metadata` + interceptor in new development instead of the `post_create_preview` interceptor. + When both interceptors are used, this `post_create_preview_with_metadata` interceptor runs after the + `post_create_preview` interceptor. The (possibly modified) response returned by + `post_create_preview` will be passed to + `post_create_preview_with_metadata`. + """ + return response, metadata + def pre_delete_deployment( self, request: config.DeleteDeploymentRequest, @@ -322,12 +368,35 @@ def post_delete_deployment( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_delete_deployment` interceptor runs + before the `post_delete_deployment_with_metadata` interceptor. """ return response + def post_delete_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_delete_deployment_with_metadata` + interceptor in new development instead of the `post_delete_deployment` interceptor. + When both interceptors are used, this `post_delete_deployment_with_metadata` interceptor runs after the + `post_delete_deployment` interceptor. The (possibly modified) response returned by + `post_delete_deployment` will be passed to + `post_delete_deployment_with_metadata`. + """ + return response, metadata + def pre_delete_preview( self, request: config.DeletePreviewRequest, @@ -345,12 +414,35 @@ def post_delete_preview( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_preview - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_preview_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_delete_preview` interceptor runs + before the `post_delete_preview_with_metadata` interceptor. """ return response + def post_delete_preview_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_preview + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_delete_preview_with_metadata` + interceptor in new development instead of the `post_delete_preview` interceptor. + When both interceptors are used, this `post_delete_preview_with_metadata` interceptor runs after the + `post_delete_preview` interceptor. The (possibly modified) response returned by + `post_delete_preview` will be passed to + `post_delete_preview_with_metadata`. + """ + return response, metadata + def pre_delete_statefile( self, request: config.DeleteStatefileRequest, @@ -382,12 +474,35 @@ def post_export_deployment_statefile( ) -> config.Statefile: """Post-rpc interceptor for export_deployment_statefile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_deployment_statefile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_export_deployment_statefile` interceptor runs + before the `post_export_deployment_statefile_with_metadata` interceptor. """ return response + def post_export_deployment_statefile_with_metadata( + self, + response: config.Statefile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Statefile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_deployment_statefile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_export_deployment_statefile_with_metadata` + interceptor in new development instead of the `post_export_deployment_statefile` interceptor. + When both interceptors are used, this `post_export_deployment_statefile_with_metadata` interceptor runs after the + `post_export_deployment_statefile` interceptor. The (possibly modified) response returned by + `post_export_deployment_statefile` will be passed to + `post_export_deployment_statefile_with_metadata`. + """ + return response, metadata + def pre_export_lock_info( self, request: config.ExportLockInfoRequest, @@ -403,12 +518,35 @@ def pre_export_lock_info( def post_export_lock_info(self, response: config.LockInfo) -> config.LockInfo: """Post-rpc interceptor for export_lock_info - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_lock_info_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_export_lock_info` interceptor runs + before the `post_export_lock_info_with_metadata` interceptor. """ return response + def post_export_lock_info_with_metadata( + self, + response: config.LockInfo, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.LockInfo, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_lock_info + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_export_lock_info_with_metadata` + interceptor in new development instead of the `post_export_lock_info` interceptor. + When both interceptors are used, this `post_export_lock_info_with_metadata` interceptor runs after the + `post_export_lock_info` interceptor. The (possibly modified) response returned by + `post_export_lock_info` will be passed to + `post_export_lock_info_with_metadata`. + """ + return response, metadata + def pre_export_preview_result( self, request: config.ExportPreviewResultRequest, @@ -428,12 +566,37 @@ def post_export_preview_result( ) -> config.ExportPreviewResultResponse: """Post-rpc interceptor for export_preview_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_preview_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_export_preview_result` interceptor runs + before the `post_export_preview_result_with_metadata` interceptor. """ return response + def post_export_preview_result_with_metadata( + self, + response: config.ExportPreviewResultResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.ExportPreviewResultResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for export_preview_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_export_preview_result_with_metadata` + interceptor in new development instead of the `post_export_preview_result` interceptor. + When both interceptors are used, this `post_export_preview_result_with_metadata` interceptor runs after the + `post_export_preview_result` interceptor. The (possibly modified) response returned by + `post_export_preview_result` will be passed to + `post_export_preview_result_with_metadata`. + """ + return response, metadata + def pre_export_revision_statefile( self, request: config.ExportRevisionStatefileRequest, @@ -453,12 +616,35 @@ def post_export_revision_statefile( ) -> config.Statefile: """Post-rpc interceptor for export_revision_statefile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_revision_statefile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_export_revision_statefile` interceptor runs + before the `post_export_revision_statefile_with_metadata` interceptor. """ return response + def post_export_revision_statefile_with_metadata( + self, + response: config.Statefile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Statefile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_revision_statefile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_export_revision_statefile_with_metadata` + interceptor in new development instead of the `post_export_revision_statefile` interceptor. + When both interceptors are used, this `post_export_revision_statefile_with_metadata` interceptor runs after the + `post_export_revision_statefile` interceptor. The (possibly modified) response returned by + `post_export_revision_statefile` will be passed to + `post_export_revision_statefile_with_metadata`. + """ + return response, metadata + def pre_get_deployment( self, request: config.GetDeploymentRequest, @@ -474,12 +660,35 @@ def pre_get_deployment( def post_get_deployment(self, response: config.Deployment) -> config.Deployment: """Post-rpc interceptor for get_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_get_deployment` interceptor runs + before the `post_get_deployment_with_metadata` interceptor. """ return response + def post_get_deployment_with_metadata( + self, + response: config.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_deployment_with_metadata` + interceptor in new development instead of the `post_get_deployment` interceptor. + When both interceptors are used, this `post_get_deployment_with_metadata` interceptor runs after the + `post_get_deployment` interceptor. The (possibly modified) response returned by + `post_get_deployment` will be passed to + `post_get_deployment_with_metadata`. + """ + return response, metadata + def pre_get_preview( self, request: config.GetPreviewRequest, @@ -495,12 +704,35 @@ def pre_get_preview( def post_get_preview(self, response: config.Preview) -> config.Preview: """Post-rpc interceptor for get_preview - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_preview_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_get_preview` interceptor runs + before the `post_get_preview_with_metadata` interceptor. """ return response + def post_get_preview_with_metadata( + self, + response: config.Preview, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Preview, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_preview + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_preview_with_metadata` + interceptor in new development instead of the `post_get_preview` interceptor. + When both interceptors are used, this `post_get_preview_with_metadata` interceptor runs after the + `post_get_preview` interceptor. The (possibly modified) response returned by + `post_get_preview` will be passed to + `post_get_preview_with_metadata`. + """ + return response, metadata + def pre_get_resource( self, request: config.GetResourceRequest, @@ -516,12 +748,35 @@ def pre_get_resource( def post_get_resource(self, response: config.Resource) -> config.Resource: """Post-rpc interceptor for get_resource - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_resource_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_get_resource` interceptor runs + before the `post_get_resource_with_metadata` interceptor. """ return response + def post_get_resource_with_metadata( + self, + response: config.Resource, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Resource, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_resource + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_resource_with_metadata` + interceptor in new development instead of the `post_get_resource` interceptor. + When both interceptors are used, this `post_get_resource_with_metadata` interceptor runs after the + `post_get_resource` interceptor. The (possibly modified) response returned by + `post_get_resource` will be passed to + `post_get_resource_with_metadata`. + """ + return response, metadata + def pre_get_revision( self, request: config.GetRevisionRequest, @@ -537,12 +792,35 @@ def pre_get_revision( def post_get_revision(self, response: config.Revision) -> config.Revision: """Post-rpc interceptor for get_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_get_revision` interceptor runs + before the `post_get_revision_with_metadata` interceptor. """ return response + def post_get_revision_with_metadata( + self, + response: config.Revision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Revision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_revision_with_metadata` + interceptor in new development instead of the `post_get_revision` interceptor. + When both interceptors are used, this `post_get_revision_with_metadata` interceptor runs after the + `post_get_revision` interceptor. The (possibly modified) response returned by + `post_get_revision` will be passed to + `post_get_revision_with_metadata`. + """ + return response, metadata + def pre_get_terraform_version( self, request: config.GetTerraformVersionRequest, @@ -562,12 +840,35 @@ def post_get_terraform_version( ) -> config.TerraformVersion: """Post-rpc interceptor for get_terraform_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_terraform_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_get_terraform_version` interceptor runs + before the `post_get_terraform_version_with_metadata` interceptor. """ return response + def post_get_terraform_version_with_metadata( + self, + response: config.TerraformVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.TerraformVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_terraform_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_terraform_version_with_metadata` + interceptor in new development instead of the `post_get_terraform_version` interceptor. + When both interceptors are used, this `post_get_terraform_version_with_metadata` interceptor runs after the + `post_get_terraform_version` interceptor. The (possibly modified) response returned by + `post_get_terraform_version` will be passed to + `post_get_terraform_version_with_metadata`. + """ + return response, metadata + def pre_import_statefile( self, request: config.ImportStatefileRequest, @@ -583,12 +884,35 @@ def pre_import_statefile( def post_import_statefile(self, response: config.Statefile) -> config.Statefile: """Post-rpc interceptor for import_statefile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_statefile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_import_statefile` interceptor runs + before the `post_import_statefile_with_metadata` interceptor. """ return response + def post_import_statefile_with_metadata( + self, + response: config.Statefile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.Statefile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_statefile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_import_statefile_with_metadata` + interceptor in new development instead of the `post_import_statefile` interceptor. + When both interceptors are used, this `post_import_statefile_with_metadata` interceptor runs after the + `post_import_statefile` interceptor. The (possibly modified) response returned by + `post_import_statefile` will be passed to + `post_import_statefile_with_metadata`. + """ + return response, metadata + def pre_list_deployments( self, request: config.ListDeploymentsRequest, @@ -606,12 +930,35 @@ def post_list_deployments( ) -> config.ListDeploymentsResponse: """Post-rpc interceptor for list_deployments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_list_deployments` interceptor runs + before the `post_list_deployments_with_metadata` interceptor. """ return response + def post_list_deployments_with_metadata( + self, + response: config.ListDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_deployments_with_metadata` + interceptor in new development instead of the `post_list_deployments` interceptor. + When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the + `post_list_deployments` interceptor. The (possibly modified) response returned by + `post_list_deployments` will be passed to + `post_list_deployments_with_metadata`. + """ + return response, metadata + def pre_list_previews( self, request: config.ListPreviewsRequest, @@ -629,12 +976,35 @@ def post_list_previews( ) -> config.ListPreviewsResponse: """Post-rpc interceptor for list_previews - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_previews_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_list_previews` interceptor runs + before the `post_list_previews_with_metadata` interceptor. """ return response + def post_list_previews_with_metadata( + self, + response: config.ListPreviewsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListPreviewsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_previews + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_previews_with_metadata` + interceptor in new development instead of the `post_list_previews` interceptor. + When both interceptors are used, this `post_list_previews_with_metadata` interceptor runs after the + `post_list_previews` interceptor. The (possibly modified) response returned by + `post_list_previews` will be passed to + `post_list_previews_with_metadata`. + """ + return response, metadata + def pre_list_resources( self, request: config.ListResourcesRequest, @@ -652,12 +1022,35 @@ def post_list_resources( ) -> config.ListResourcesResponse: """Post-rpc interceptor for list_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_list_resources` interceptor runs + before the `post_list_resources_with_metadata` interceptor. """ return response + def post_list_resources_with_metadata( + self, + response: config.ListResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_resources_with_metadata` + interceptor in new development instead of the `post_list_resources` interceptor. + When both interceptors are used, this `post_list_resources_with_metadata` interceptor runs after the + `post_list_resources` interceptor. The (possibly modified) response returned by + `post_list_resources` will be passed to + `post_list_resources_with_metadata`. + """ + return response, metadata + def pre_list_revisions( self, request: config.ListRevisionsRequest, @@ -675,12 +1068,35 @@ def post_list_revisions( ) -> config.ListRevisionsResponse: """Post-rpc interceptor for list_revisions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_list_revisions` interceptor runs + before the `post_list_revisions_with_metadata` interceptor. """ return response + def post_list_revisions_with_metadata( + self, + response: config.ListRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListRevisionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_revisions_with_metadata` + interceptor in new development instead of the `post_list_revisions` interceptor. + When both interceptors are used, this `post_list_revisions_with_metadata` interceptor runs after the + `post_list_revisions` interceptor. The (possibly modified) response returned by + `post_list_revisions` will be passed to + `post_list_revisions_with_metadata`. + """ + return response, metadata + def pre_list_terraform_versions( self, request: config.ListTerraformVersionsRequest, @@ -700,12 +1116,37 @@ def post_list_terraform_versions( ) -> config.ListTerraformVersionsResponse: """Post-rpc interceptor for list_terraform_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_terraform_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_list_terraform_versions` interceptor runs + before the `post_list_terraform_versions_with_metadata` interceptor. """ return response + def post_list_terraform_versions_with_metadata( + self, + response: config.ListTerraformVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.ListTerraformVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_terraform_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_terraform_versions_with_metadata` + interceptor in new development instead of the `post_list_terraform_versions` interceptor. + When both interceptors are used, this `post_list_terraform_versions_with_metadata` interceptor runs after the + `post_list_terraform_versions` interceptor. The (possibly modified) response returned by + `post_list_terraform_versions` will be passed to + `post_list_terraform_versions_with_metadata`. + """ + return response, metadata + def pre_lock_deployment( self, request: config.LockDeploymentRequest, @@ -723,12 +1164,35 @@ def post_lock_deployment( ) -> operations_pb2.Operation: """Post-rpc interceptor for lock_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lock_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_lock_deployment` interceptor runs + before the `post_lock_deployment_with_metadata` interceptor. """ return response + def post_lock_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lock_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_lock_deployment_with_metadata` + interceptor in new development instead of the `post_lock_deployment` interceptor. + When both interceptors are used, this `post_lock_deployment_with_metadata` interceptor runs after the + `post_lock_deployment` interceptor. The (possibly modified) response returned by + `post_lock_deployment` will be passed to + `post_lock_deployment_with_metadata`. + """ + return response, metadata + def pre_unlock_deployment( self, request: config.UnlockDeploymentRequest, @@ -746,12 +1210,35 @@ def post_unlock_deployment( ) -> operations_pb2.Operation: """Post-rpc interceptor for unlock_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_unlock_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_unlock_deployment` interceptor runs + before the `post_unlock_deployment_with_metadata` interceptor. """ return response + def post_unlock_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for unlock_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_unlock_deployment_with_metadata` + interceptor in new development instead of the `post_unlock_deployment` interceptor. + When both interceptors are used, this `post_unlock_deployment_with_metadata` interceptor runs after the + `post_unlock_deployment` interceptor. The (possibly modified) response returned by + `post_unlock_deployment` will be passed to + `post_unlock_deployment_with_metadata`. + """ + return response, metadata + def pre_update_deployment( self, request: config.UpdateDeploymentRequest, @@ -769,12 +1256,35 @@ def post_update_deployment( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. + it is returned to user code. This `post_update_deployment` interceptor runs + before the `post_update_deployment_with_metadata` interceptor. """ return response + def post_update_deployment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_update_deployment_with_metadata` + interceptor in new development instead of the `post_update_deployment` interceptor. + When both interceptors are used, this `post_update_deployment_with_metadata` interceptor runs after the + `post_update_deployment` interceptor. The (possibly modified) response returned by + `post_update_deployment` will be passed to + `post_update_deployment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1268,6 +1778,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1417,6 +1931,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_preview(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_preview_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1564,6 +2082,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1707,6 +2229,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_preview(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_preview_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1979,6 +2505,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_deployment_statefile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_deployment_statefile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2126,6 +2656,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_lock_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_lock_info_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2276,6 +2810,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_preview_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_preview_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2430,6 +2968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_revision_statefile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_revision_statefile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2575,6 +3117,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2720,6 +3266,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_preview(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_preview_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2865,6 +3415,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_resource(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_resource_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3011,6 +3565,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3157,6 +3715,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_terraform_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_terraform_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3312,6 +3874,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_statefile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_statefile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3458,6 +4024,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deployments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3603,6 +4173,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_previews(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_previews_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3748,6 +4322,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3893,6 +4471,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_revisions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4038,6 +4620,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_terraform_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_terraform_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4190,6 +4776,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_lock_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lock_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4346,6 +4936,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_unlock_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_unlock_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4501,6 +5095,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index e545476a0498..3ca31ba06e74 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-config", - "version": "0.1.14" + "version": "0.1.15" }, "snippets": [ { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index b71e304a5eff..b83fa2ee6633 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -79,6 +79,13 @@ ) from google.cloud.config_v1.types import config +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -301,6 +308,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConfigClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConfigClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -15590,10 +15640,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_list_deployments" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_deployments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_list_deployments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) transcode.return_value = { "method": "post", @@ -15617,6 +15670,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.ListDeploymentsResponse() + post_with_metadata.return_value = config.ListDeploymentsResponse(), metadata client.list_deployments( request, @@ -15628,6 +15682,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deployment_rest_bad_request(request_type=config.GetDeploymentRequest): @@ -15738,10 +15793,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_get_deployment" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_get_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) transcode.return_value = { "method": "post", @@ -15763,6 +15821,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Deployment() + post_with_metadata.return_value = config.Deployment(), metadata client.get_deployment( request, @@ -15774,6 +15833,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_deployment_rest_bad_request( @@ -15972,10 +16032,13 @@ def test_create_deployment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_create_deployment" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_create_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_create_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) transcode.return_value = { "method": "post", @@ -15997,6 +16060,7 @@ def test_create_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_deployment( request, @@ -16008,6 +16072,7 @@ def test_create_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_deployment_rest_bad_request( @@ -16210,10 +16275,13 @@ def test_update_deployment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_update_deployment" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_update_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_update_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) transcode.return_value = { "method": "post", @@ -16235,6 +16303,7 @@ def test_update_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_deployment( request, @@ -16246,6 +16315,7 @@ def test_update_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_deployment_rest_bad_request( @@ -16324,10 +16394,13 @@ def test_delete_deployment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_delete_deployment" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_delete_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) transcode.return_value = { "method": "post", @@ -16349,6 +16422,7 @@ def test_delete_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_deployment( request, @@ -16360,6 +16434,7 @@ def test_delete_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_revisions_rest_bad_request(request_type=config.ListRevisionsRequest): @@ -16442,10 +16517,13 @@ def test_list_revisions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_list_revisions" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_revisions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_list_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) transcode.return_value = { "method": "post", @@ -16469,6 +16547,7 @@ def test_list_revisions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.ListRevisionsResponse() + post_with_metadata.return_value = config.ListRevisionsResponse(), metadata client.list_revisions( request, @@ -16480,6 +16559,7 @@ def test_list_revisions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_revision_rest_bad_request(request_type=config.GetRevisionRequest): @@ -16594,10 +16674,13 @@ def test_get_revision_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_get_revision" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_revision_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_get_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) transcode.return_value = { "method": "post", @@ -16619,6 +16702,7 @@ def test_get_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Revision() + post_with_metadata.return_value = config.Revision(), metadata client.get_revision( request, @@ -16630,6 +16714,7 @@ def test_get_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_resource_rest_bad_request(request_type=config.GetResourceRequest): @@ -16718,10 +16803,13 @@ def test_get_resource_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_get_resource" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_get_resource" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) transcode.return_value = { "method": "post", @@ -16743,6 +16831,7 @@ def test_get_resource_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Resource() + post_with_metadata.return_value = config.Resource(), metadata client.get_resource( request, @@ -16754,6 +16843,7 @@ def test_get_resource_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_resources_rest_bad_request(request_type=config.ListResourcesRequest): @@ -16840,10 +16930,13 @@ def test_list_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_list_resources" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_resources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_list_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) transcode.return_value = { "method": "post", @@ -16867,6 +16960,7 @@ def test_list_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.ListResourcesResponse() + post_with_metadata.return_value = config.ListResourcesResponse(), metadata client.list_resources( request, @@ -16878,6 +16972,7 @@ def test_list_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_deployment_statefile_rest_bad_request( @@ -16960,10 +17055,14 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_export_deployment_statefile" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, + "post_export_deployment_statefile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_export_deployment_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ExportDeploymentStatefileRequest.pb( config.ExportDeploymentStatefileRequest() ) @@ -16987,6 +17086,7 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Statefile() + post_with_metadata.return_value = config.Statefile(), metadata client.export_deployment_statefile( request, @@ -16998,6 +17098,7 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_revision_statefile_rest_bad_request( @@ -17084,10 +17185,13 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_export_revision_statefile" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_export_revision_statefile_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_export_revision_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ExportRevisionStatefileRequest.pb( config.ExportRevisionStatefileRequest() ) @@ -17111,6 +17215,7 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Statefile() + post_with_metadata.return_value = config.Statefile(), metadata client.export_revision_statefile( request, @@ -17122,6 +17227,7 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_statefile_rest_bad_request(request_type=config.ImportStatefileRequest): @@ -17202,10 +17308,13 @@ def test_import_statefile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_import_statefile" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_import_statefile_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_import_statefile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) transcode.return_value = { "method": "post", @@ -17227,6 +17336,7 @@ def test_import_statefile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Statefile() + post_with_metadata.return_value = config.Statefile(), metadata client.import_statefile( request, @@ -17238,6 +17348,7 @@ def test_import_statefile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_statefile_rest_bad_request(request_type=config.DeleteStatefileRequest): @@ -17417,10 +17528,13 @@ def test_lock_deployment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_lock_deployment" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_lock_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_lock_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) transcode.return_value = { "method": "post", @@ -17442,6 +17556,7 @@ def test_lock_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.lock_deployment( request, @@ -17453,6 +17568,7 @@ def test_lock_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_unlock_deployment_rest_bad_request( @@ -17531,10 +17647,13 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_unlock_deployment" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_unlock_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_unlock_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) transcode.return_value = { "method": "post", @@ -17556,6 +17675,7 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.unlock_deployment( request, @@ -17567,6 +17687,7 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_lock_info_rest_bad_request(request_type=config.ExportLockInfoRequest): @@ -17655,10 +17776,13 @@ def test_export_lock_info_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_export_lock_info" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_export_lock_info_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_export_lock_info" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) transcode.return_value = { "method": "post", @@ -17680,6 +17804,7 @@ def test_export_lock_info_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.LockInfo() + post_with_metadata.return_value = config.LockInfo(), metadata client.export_lock_info( request, @@ -17691,6 +17816,7 @@ def test_export_lock_info_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_preview_rest_bad_request(request_type=config.CreatePreviewRequest): @@ -17883,10 +18009,13 @@ def test_create_preview_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_create_preview" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_create_preview_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_create_preview" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.CreatePreviewRequest.pb(config.CreatePreviewRequest()) transcode.return_value = { "method": "post", @@ -17908,6 +18037,7 @@ def test_create_preview_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_preview( request, @@ -17919,6 +18049,7 @@ def test_create_preview_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_preview_rest_bad_request(request_type=config.GetPreviewRequest): @@ -18023,10 +18154,13 @@ def test_get_preview_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_get_preview" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_preview_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_get_preview" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.GetPreviewRequest.pb(config.GetPreviewRequest()) transcode.return_value = { "method": "post", @@ -18048,6 +18182,7 @@ def test_get_preview_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.Preview() + post_with_metadata.return_value = config.Preview(), metadata client.get_preview( request, @@ -18059,6 +18194,7 @@ def test_get_preview_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_previews_rest_bad_request(request_type=config.ListPreviewsRequest): @@ -18141,10 +18277,13 @@ def test_list_previews_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_list_previews" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_previews_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_list_previews" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ListPreviewsRequest.pb(config.ListPreviewsRequest()) transcode.return_value = { "method": "post", @@ -18168,6 +18307,7 @@ def test_list_previews_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.ListPreviewsResponse() + post_with_metadata.return_value = config.ListPreviewsResponse(), metadata client.list_previews( request, @@ -18179,6 +18319,7 @@ def test_list_previews_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_preview_rest_bad_request(request_type=config.DeletePreviewRequest): @@ -18255,10 +18396,13 @@ def test_delete_preview_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConfigRestInterceptor, "post_delete_preview" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_preview_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_delete_preview" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.DeletePreviewRequest.pb(config.DeletePreviewRequest()) transcode.return_value = { "method": "post", @@ -18280,6 +18424,7 @@ def test_delete_preview_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_preview( request, @@ -18291,6 +18436,7 @@ def test_delete_preview_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_preview_result_rest_bad_request( @@ -18370,10 +18516,13 @@ def test_export_preview_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_export_preview_result" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_export_preview_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_export_preview_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ExportPreviewResultRequest.pb( config.ExportPreviewResultRequest() ) @@ -18399,6 +18548,7 @@ def test_export_preview_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.ExportPreviewResultResponse() + post_with_metadata.return_value = config.ExportPreviewResultResponse(), metadata client.export_preview_result( request, @@ -18410,6 +18560,7 @@ def test_export_preview_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_terraform_versions_rest_bad_request( @@ -18494,10 +18645,13 @@ def test_list_terraform_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_list_terraform_versions" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_list_terraform_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_list_terraform_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.ListTerraformVersionsRequest.pb( config.ListTerraformVersionsRequest() ) @@ -18523,6 +18677,10 @@ def test_list_terraform_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.ListTerraformVersionsResponse() + post_with_metadata.return_value = ( + config.ListTerraformVersionsResponse(), + metadata, + ) client.list_terraform_versions( request, @@ -18534,6 +18692,7 @@ def test_list_terraform_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_terraform_version_rest_bad_request( @@ -18622,10 +18781,13 @@ def test_get_terraform_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConfigRestInterceptor, "post_get_terraform_version" ) as post, mock.patch.object( + transports.ConfigRestInterceptor, "post_get_terraform_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConfigRestInterceptor, "pre_get_terraform_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = config.GetTerraformVersionRequest.pb( config.GetTerraformVersionRequest() ) @@ -18649,6 +18811,7 @@ def test_get_terraform_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = config.TerraformVersion() + post_with_metadata.return_value = config.TerraformVersion(), metadata client.get_terraform_version( request, @@ -18660,6 +18823,7 @@ def test_get_terraform_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-contact-center-insights/CHANGELOG.md b/packages/google-cloud-contact-center-insights/CHANGELOG.md index 2bde1728a19f..4a3142463e69 100644 --- a/packages/google-cloud-contact-center-insights/CHANGELOG.md +++ b/packages/google-cloud-contact-center-insights/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [1.23.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.22.0...google-cloud-contact-center-insights-v1.23.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + +## [1.22.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.21.0...google-cloud-contact-center-insights-v1.22.0) (2024-12-18) + + +### Features + +* [google-cloud-contact-center-insights] A new resource pattern value `projects/{project}/locations/{location}/authorizedViewSets/{authorized_view_set}/authorizedViews/{authorized_view}/conversations/{conversation}` added to the resource definition `contactcenterinsights.googleapis.com/Conversation` ([8963da7](https://github.com/googleapis/google-cloud-python/commit/8963da77bf07fd2d2b4058c236b769ac4df63f18)) +* A new field `agent_type` is added to message `.google.cloud.contactcenterinsights.v1.Conversation` ([8963da7](https://github.com/googleapis/google-cloud-python/commit/8963da77bf07fd2d2b4058c236b769ac4df63f18)) +* A new resource pattern value `projects/{project}/locations/{location}/authorizedViewSets/{authorized_view_set}/authorizedViews/{authorized_view}/conversations/{conversation}/analyses/{analysis}` added to the resource definition `contactcenterinsights.googleapis.com/Analysis` ([8963da7](https://github.com/googleapis/google-cloud-python/commit/8963da77bf07fd2d2b4058c236b769ac4df63f18)) +* A new resource pattern value `projects/{project}/locations/{location}/authorizedViewSets/{authorized_view_set}/authorizedViews/{authorized_view}/conversations/{conversation}/feedbackLabels/{feedback_label}` added to the resource definition `contactcenterinsights.googleapis.com/FeedbackLabel` ([8963da7](https://github.com/googleapis/google-cloud-python/commit/8963da77bf07fd2d2b4058c236b769ac4df63f18)) + ## [1.21.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contact-center-insights-v1.20.0...google-cloud-contact-center-insights-v1.21.0) (2024-12-12) diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py index 785067d93b3c..c8313abd74cb 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.0" # {x-release-please-version} +__version__ = "1.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py index 785067d93b3c..c8313abd74cb 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.21.0" # {x-release-please-version} +__version__ = "1.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py index 3a2a7ed162bb..300aee9ac1b8 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -866,6 +868,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -9731,16 +9760,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -9786,16 +9819,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py index 9e065717bc1b..cecff381525b 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest.py @@ -653,12 +653,35 @@ def post_bulk_analyze_conversations( ) -> operations_pb2.Operation: """Post-rpc interceptor for bulk_analyze_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_analyze_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_bulk_analyze_conversations` interceptor runs + before the `post_bulk_analyze_conversations_with_metadata` interceptor. """ return response + def post_bulk_analyze_conversations_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_analyze_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_bulk_analyze_conversations_with_metadata` + interceptor in new development instead of the `post_bulk_analyze_conversations` interceptor. + When both interceptors are used, this `post_bulk_analyze_conversations_with_metadata` interceptor runs after the + `post_bulk_analyze_conversations` interceptor. The (possibly modified) response returned by + `post_bulk_analyze_conversations` will be passed to + `post_bulk_analyze_conversations_with_metadata`. + """ + return response, metadata + def pre_bulk_delete_conversations( self, request: contact_center_insights.BulkDeleteConversationsRequest, @@ -679,12 +702,35 @@ def post_bulk_delete_conversations( ) -> operations_pb2.Operation: """Post-rpc interceptor for bulk_delete_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_delete_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_bulk_delete_conversations` interceptor runs + before the `post_bulk_delete_conversations_with_metadata` interceptor. """ return response + def post_bulk_delete_conversations_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_delete_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_bulk_delete_conversations_with_metadata` + interceptor in new development instead of the `post_bulk_delete_conversations` interceptor. + When both interceptors are used, this `post_bulk_delete_conversations_with_metadata` interceptor runs after the + `post_bulk_delete_conversations` interceptor. The (possibly modified) response returned by + `post_bulk_delete_conversations` will be passed to + `post_bulk_delete_conversations_with_metadata`. + """ + return response, metadata + def pre_bulk_download_feedback_labels( self, request: contact_center_insights.BulkDownloadFeedbackLabelsRequest, @@ -705,12 +751,35 @@ def post_bulk_download_feedback_labels( ) -> operations_pb2.Operation: """Post-rpc interceptor for bulk_download_feedback_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_download_feedback_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_bulk_download_feedback_labels` interceptor runs + before the `post_bulk_download_feedback_labels_with_metadata` interceptor. """ return response + def post_bulk_download_feedback_labels_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_download_feedback_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_bulk_download_feedback_labels_with_metadata` + interceptor in new development instead of the `post_bulk_download_feedback_labels` interceptor. + When both interceptors are used, this `post_bulk_download_feedback_labels_with_metadata` interceptor runs after the + `post_bulk_download_feedback_labels` interceptor. The (possibly modified) response returned by + `post_bulk_download_feedback_labels` will be passed to + `post_bulk_download_feedback_labels_with_metadata`. + """ + return response, metadata + def pre_bulk_upload_feedback_labels( self, request: contact_center_insights.BulkUploadFeedbackLabelsRequest, @@ -731,12 +800,35 @@ def post_bulk_upload_feedback_labels( ) -> operations_pb2.Operation: """Post-rpc interceptor for bulk_upload_feedback_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_bulk_upload_feedback_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_bulk_upload_feedback_labels` interceptor runs + before the `post_bulk_upload_feedback_labels_with_metadata` interceptor. """ return response + def post_bulk_upload_feedback_labels_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for bulk_upload_feedback_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_bulk_upload_feedback_labels_with_metadata` + interceptor in new development instead of the `post_bulk_upload_feedback_labels` interceptor. + When both interceptors are used, this `post_bulk_upload_feedback_labels_with_metadata` interceptor runs after the + `post_bulk_upload_feedback_labels` interceptor. The (possibly modified) response returned by + `post_bulk_upload_feedback_labels` will be passed to + `post_bulk_upload_feedback_labels_with_metadata`. + """ + return response, metadata + def pre_calculate_issue_model_stats( self, request: contact_center_insights.CalculateIssueModelStatsRequest, @@ -757,12 +849,38 @@ def post_calculate_issue_model_stats( ) -> contact_center_insights.CalculateIssueModelStatsResponse: """Post-rpc interceptor for calculate_issue_model_stats - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_calculate_issue_model_stats_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_calculate_issue_model_stats` interceptor runs + before the `post_calculate_issue_model_stats_with_metadata` interceptor. """ return response + def post_calculate_issue_model_stats_with_metadata( + self, + response: contact_center_insights.CalculateIssueModelStatsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.CalculateIssueModelStatsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for calculate_issue_model_stats + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_calculate_issue_model_stats_with_metadata` + interceptor in new development instead of the `post_calculate_issue_model_stats` interceptor. + When both interceptors are used, this `post_calculate_issue_model_stats_with_metadata` interceptor runs after the + `post_calculate_issue_model_stats` interceptor. The (possibly modified) response returned by + `post_calculate_issue_model_stats` will be passed to + `post_calculate_issue_model_stats_with_metadata`. + """ + return response, metadata + def pre_calculate_stats( self, request: contact_center_insights.CalculateStatsRequest, @@ -783,12 +901,38 @@ def post_calculate_stats( ) -> contact_center_insights.CalculateStatsResponse: """Post-rpc interceptor for calculate_stats - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_calculate_stats_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_calculate_stats` interceptor runs + before the `post_calculate_stats_with_metadata` interceptor. """ return response + def post_calculate_stats_with_metadata( + self, + response: contact_center_insights.CalculateStatsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.CalculateStatsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for calculate_stats + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_calculate_stats_with_metadata` + interceptor in new development instead of the `post_calculate_stats` interceptor. + When both interceptors are used, this `post_calculate_stats_with_metadata` interceptor runs after the + `post_calculate_stats` interceptor. The (possibly modified) response returned by + `post_calculate_stats` will be passed to + `post_calculate_stats_with_metadata`. + """ + return response, metadata + def pre_create_analysis( self, request: contact_center_insights.CreateAnalysisRequest, @@ -809,12 +953,35 @@ def post_create_analysis( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_analysis - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_analysis_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_analysis` interceptor runs + before the `post_create_analysis_with_metadata` interceptor. """ return response + def post_create_analysis_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_analysis + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_analysis_with_metadata` + interceptor in new development instead of the `post_create_analysis` interceptor. + When both interceptors are used, this `post_create_analysis_with_metadata` interceptor runs after the + `post_create_analysis` interceptor. The (possibly modified) response returned by + `post_create_analysis` will be passed to + `post_create_analysis_with_metadata`. + """ + return response, metadata + def pre_create_analysis_rule( self, request: contact_center_insights.CreateAnalysisRuleRequest, @@ -835,12 +1002,35 @@ def post_create_analysis_rule( ) -> resources.AnalysisRule: """Post-rpc interceptor for create_analysis_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_analysis_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_analysis_rule` interceptor runs + before the `post_create_analysis_rule_with_metadata` interceptor. """ return response + def post_create_analysis_rule_with_metadata( + self, + response: resources.AnalysisRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AnalysisRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_analysis_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_analysis_rule_with_metadata` + interceptor in new development instead of the `post_create_analysis_rule` interceptor. + When both interceptors are used, this `post_create_analysis_rule_with_metadata` interceptor runs after the + `post_create_analysis_rule` interceptor. The (possibly modified) response returned by + `post_create_analysis_rule` will be passed to + `post_create_analysis_rule_with_metadata`. + """ + return response, metadata + def pre_create_conversation( self, request: contact_center_insights.CreateConversationRequest, @@ -861,12 +1051,35 @@ def post_create_conversation( ) -> resources.Conversation: """Post-rpc interceptor for create_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation` interceptor runs + before the `post_create_conversation_with_metadata` interceptor. """ return response + def post_create_conversation_with_metadata( + self, + response: resources.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_conversation_with_metadata` + interceptor in new development instead of the `post_create_conversation` interceptor. + When both interceptors are used, this `post_create_conversation_with_metadata` interceptor runs after the + `post_create_conversation` interceptor. The (possibly modified) response returned by + `post_create_conversation` will be passed to + `post_create_conversation_with_metadata`. + """ + return response, metadata + def pre_create_feedback_label( self, request: contact_center_insights.CreateFeedbackLabelRequest, @@ -887,12 +1100,35 @@ def post_create_feedback_label( ) -> resources.FeedbackLabel: """Post-rpc interceptor for create_feedback_label - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_feedback_label_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_feedback_label` interceptor runs + before the `post_create_feedback_label_with_metadata` interceptor. """ return response + def post_create_feedback_label_with_metadata( + self, + response: resources.FeedbackLabel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.FeedbackLabel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_feedback_label + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_feedback_label_with_metadata` + interceptor in new development instead of the `post_create_feedback_label` interceptor. + When both interceptors are used, this `post_create_feedback_label_with_metadata` interceptor runs after the + `post_create_feedback_label` interceptor. The (possibly modified) response returned by + `post_create_feedback_label` will be passed to + `post_create_feedback_label_with_metadata`. + """ + return response, metadata + def pre_create_issue_model( self, request: contact_center_insights.CreateIssueModelRequest, @@ -913,12 +1149,35 @@ def post_create_issue_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_issue_model` interceptor runs + before the `post_create_issue_model_with_metadata` interceptor. """ return response + def post_create_issue_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_issue_model_with_metadata` + interceptor in new development instead of the `post_create_issue_model` interceptor. + When both interceptors are used, this `post_create_issue_model_with_metadata` interceptor runs after the + `post_create_issue_model` interceptor. The (possibly modified) response returned by + `post_create_issue_model` will be passed to + `post_create_issue_model_with_metadata`. + """ + return response, metadata + def pre_create_phrase_matcher( self, request: contact_center_insights.CreatePhraseMatcherRequest, @@ -939,12 +1198,35 @@ def post_create_phrase_matcher( ) -> resources.PhraseMatcher: """Post-rpc interceptor for create_phrase_matcher - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_phrase_matcher_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_phrase_matcher` interceptor runs + before the `post_create_phrase_matcher_with_metadata` interceptor. """ return response + def post_create_phrase_matcher_with_metadata( + self, + response: resources.PhraseMatcher, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.PhraseMatcher, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_phrase_matcher + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_phrase_matcher_with_metadata` + interceptor in new development instead of the `post_create_phrase_matcher` interceptor. + When both interceptors are used, this `post_create_phrase_matcher_with_metadata` interceptor runs after the + `post_create_phrase_matcher` interceptor. The (possibly modified) response returned by + `post_create_phrase_matcher` will be passed to + `post_create_phrase_matcher_with_metadata`. + """ + return response, metadata + def pre_create_qa_question( self, request: contact_center_insights.CreateQaQuestionRequest, @@ -965,12 +1247,35 @@ def post_create_qa_question( ) -> resources.QaQuestion: """Post-rpc interceptor for create_qa_question - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_qa_question_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_qa_question` interceptor runs + before the `post_create_qa_question_with_metadata` interceptor. """ return response + def post_create_qa_question_with_metadata( + self, + response: resources.QaQuestion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaQuestion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_qa_question + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_qa_question_with_metadata` + interceptor in new development instead of the `post_create_qa_question` interceptor. + When both interceptors are used, this `post_create_qa_question_with_metadata` interceptor runs after the + `post_create_qa_question` interceptor. The (possibly modified) response returned by + `post_create_qa_question` will be passed to + `post_create_qa_question_with_metadata`. + """ + return response, metadata + def pre_create_qa_scorecard( self, request: contact_center_insights.CreateQaScorecardRequest, @@ -991,12 +1296,35 @@ def post_create_qa_scorecard( ) -> resources.QaScorecard: """Post-rpc interceptor for create_qa_scorecard - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_qa_scorecard_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_qa_scorecard` interceptor runs + before the `post_create_qa_scorecard_with_metadata` interceptor. """ return response + def post_create_qa_scorecard_with_metadata( + self, + response: resources.QaScorecard, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecard, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_qa_scorecard + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_qa_scorecard_with_metadata` + interceptor in new development instead of the `post_create_qa_scorecard` interceptor. + When both interceptors are used, this `post_create_qa_scorecard_with_metadata` interceptor runs after the + `post_create_qa_scorecard` interceptor. The (possibly modified) response returned by + `post_create_qa_scorecard` will be passed to + `post_create_qa_scorecard_with_metadata`. + """ + return response, metadata + def pre_create_qa_scorecard_revision( self, request: contact_center_insights.CreateQaScorecardRevisionRequest, @@ -1017,12 +1345,35 @@ def post_create_qa_scorecard_revision( ) -> resources.QaScorecardRevision: """Post-rpc interceptor for create_qa_scorecard_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_qa_scorecard_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_qa_scorecard_revision` interceptor runs + before the `post_create_qa_scorecard_revision_with_metadata` interceptor. """ return response + def post_create_qa_scorecard_revision_with_metadata( + self, + response: resources.QaScorecardRevision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecardRevision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_qa_scorecard_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_qa_scorecard_revision_with_metadata` + interceptor in new development instead of the `post_create_qa_scorecard_revision` interceptor. + When both interceptors are used, this `post_create_qa_scorecard_revision_with_metadata` interceptor runs after the + `post_create_qa_scorecard_revision` interceptor. The (possibly modified) response returned by + `post_create_qa_scorecard_revision` will be passed to + `post_create_qa_scorecard_revision_with_metadata`. + """ + return response, metadata + def pre_create_view( self, request: contact_center_insights.CreateViewRequest, @@ -1041,12 +1392,35 @@ def pre_create_view( def post_create_view(self, response: resources.View) -> resources.View: """Post-rpc interceptor for create_view - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_view_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_create_view` interceptor runs + before the `post_create_view_with_metadata` interceptor. """ return response + def post_create_view_with_metadata( + self, + response: resources.View, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.View, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_view + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_create_view_with_metadata` + interceptor in new development instead of the `post_create_view` interceptor. + When both interceptors are used, this `post_create_view_with_metadata` interceptor runs after the + `post_create_view` interceptor. The (possibly modified) response returned by + `post_create_view` will be passed to + `post_create_view_with_metadata`. + """ + return response, metadata + def pre_delete_analysis( self, request: contact_center_insights.DeleteAnalysisRequest, @@ -1142,12 +1516,35 @@ def post_delete_issue_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_delete_issue_model` interceptor runs + before the `post_delete_issue_model_with_metadata` interceptor. """ return response + def post_delete_issue_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_delete_issue_model_with_metadata` + interceptor in new development instead of the `post_delete_issue_model` interceptor. + When both interceptors are used, this `post_delete_issue_model_with_metadata` interceptor runs after the + `post_delete_issue_model` interceptor. The (possibly modified) response returned by + `post_delete_issue_model` will be passed to + `post_delete_issue_model_with_metadata`. + """ + return response, metadata + def pre_delete_phrase_matcher( self, request: contact_center_insights.DeletePhraseMatcherRequest, @@ -1243,12 +1640,35 @@ def post_deploy_issue_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_deploy_issue_model` interceptor runs + before the `post_deploy_issue_model_with_metadata` interceptor. """ return response + def post_deploy_issue_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_deploy_issue_model_with_metadata` + interceptor in new development instead of the `post_deploy_issue_model` interceptor. + When both interceptors are used, this `post_deploy_issue_model_with_metadata` interceptor runs after the + `post_deploy_issue_model` interceptor. The (possibly modified) response returned by + `post_deploy_issue_model` will be passed to + `post_deploy_issue_model_with_metadata`. + """ + return response, metadata + def pre_deploy_qa_scorecard_revision( self, request: contact_center_insights.DeployQaScorecardRevisionRequest, @@ -1269,12 +1689,35 @@ def post_deploy_qa_scorecard_revision( ) -> resources.QaScorecardRevision: """Post-rpc interceptor for deploy_qa_scorecard_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_qa_scorecard_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_deploy_qa_scorecard_revision` interceptor runs + before the `post_deploy_qa_scorecard_revision_with_metadata` interceptor. """ return response + def post_deploy_qa_scorecard_revision_with_metadata( + self, + response: resources.QaScorecardRevision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecardRevision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_qa_scorecard_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_deploy_qa_scorecard_revision_with_metadata` + interceptor in new development instead of the `post_deploy_qa_scorecard_revision` interceptor. + When both interceptors are used, this `post_deploy_qa_scorecard_revision_with_metadata` interceptor runs after the + `post_deploy_qa_scorecard_revision` interceptor. The (possibly modified) response returned by + `post_deploy_qa_scorecard_revision` will be passed to + `post_deploy_qa_scorecard_revision_with_metadata`. + """ + return response, metadata + def pre_export_insights_data( self, request: contact_center_insights.ExportInsightsDataRequest, @@ -1295,12 +1738,35 @@ def post_export_insights_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_insights_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_insights_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_export_insights_data` interceptor runs + before the `post_export_insights_data_with_metadata` interceptor. """ return response + def post_export_insights_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_insights_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_export_insights_data_with_metadata` + interceptor in new development instead of the `post_export_insights_data` interceptor. + When both interceptors are used, this `post_export_insights_data_with_metadata` interceptor runs after the + `post_export_insights_data` interceptor. The (possibly modified) response returned by + `post_export_insights_data` will be passed to + `post_export_insights_data_with_metadata`. + """ + return response, metadata + def pre_export_issue_model( self, request: contact_center_insights.ExportIssueModelRequest, @@ -1321,12 +1787,35 @@ def post_export_issue_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_export_issue_model` interceptor runs + before the `post_export_issue_model_with_metadata` interceptor. """ return response + def post_export_issue_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_export_issue_model_with_metadata` + interceptor in new development instead of the `post_export_issue_model` interceptor. + When both interceptors are used, this `post_export_issue_model_with_metadata` interceptor runs after the + `post_export_issue_model` interceptor. The (possibly modified) response returned by + `post_export_issue_model` will be passed to + `post_export_issue_model_with_metadata`. + """ + return response, metadata + def pre_get_analysis( self, request: contact_center_insights.GetAnalysisRequest, @@ -1345,12 +1834,35 @@ def pre_get_analysis( def post_get_analysis(self, response: resources.Analysis) -> resources.Analysis: """Post-rpc interceptor for get_analysis - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_analysis_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_analysis` interceptor runs + before the `post_get_analysis_with_metadata` interceptor. """ return response + def post_get_analysis_with_metadata( + self, + response: resources.Analysis, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Analysis, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_analysis + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_analysis_with_metadata` + interceptor in new development instead of the `post_get_analysis` interceptor. + When both interceptors are used, this `post_get_analysis_with_metadata` interceptor runs after the + `post_get_analysis` interceptor. The (possibly modified) response returned by + `post_get_analysis` will be passed to + `post_get_analysis_with_metadata`. + """ + return response, metadata + def pre_get_analysis_rule( self, request: contact_center_insights.GetAnalysisRuleRequest, @@ -1371,12 +1883,35 @@ def post_get_analysis_rule( ) -> resources.AnalysisRule: """Post-rpc interceptor for get_analysis_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_analysis_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_analysis_rule` interceptor runs + before the `post_get_analysis_rule_with_metadata` interceptor. """ return response + def post_get_analysis_rule_with_metadata( + self, + response: resources.AnalysisRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AnalysisRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_analysis_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_analysis_rule_with_metadata` + interceptor in new development instead of the `post_get_analysis_rule` interceptor. + When both interceptors are used, this `post_get_analysis_rule_with_metadata` interceptor runs after the + `post_get_analysis_rule` interceptor. The (possibly modified) response returned by + `post_get_analysis_rule` will be passed to + `post_get_analysis_rule_with_metadata`. + """ + return response, metadata + def pre_get_conversation( self, request: contact_center_insights.GetConversationRequest, @@ -1397,12 +1932,35 @@ def post_get_conversation( ) -> resources.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: resources.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_get_encryption_spec( self, request: contact_center_insights.GetEncryptionSpecRequest, @@ -1423,12 +1981,35 @@ def post_get_encryption_spec( ) -> resources.EncryptionSpec: """Post-rpc interceptor for get_encryption_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_encryption_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_encryption_spec` interceptor runs + before the `post_get_encryption_spec_with_metadata` interceptor. """ return response + def post_get_encryption_spec_with_metadata( + self, + response: resources.EncryptionSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.EncryptionSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_encryption_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_encryption_spec_with_metadata` + interceptor in new development instead of the `post_get_encryption_spec` interceptor. + When both interceptors are used, this `post_get_encryption_spec_with_metadata` interceptor runs after the + `post_get_encryption_spec` interceptor. The (possibly modified) response returned by + `post_get_encryption_spec` will be passed to + `post_get_encryption_spec_with_metadata`. + """ + return response, metadata + def pre_get_feedback_label( self, request: contact_center_insights.GetFeedbackLabelRequest, @@ -1449,12 +2030,35 @@ def post_get_feedback_label( ) -> resources.FeedbackLabel: """Post-rpc interceptor for get_feedback_label - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_feedback_label_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_feedback_label` interceptor runs + before the `post_get_feedback_label_with_metadata` interceptor. """ return response + def post_get_feedback_label_with_metadata( + self, + response: resources.FeedbackLabel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.FeedbackLabel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_feedback_label + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_feedback_label_with_metadata` + interceptor in new development instead of the `post_get_feedback_label` interceptor. + When both interceptors are used, this `post_get_feedback_label_with_metadata` interceptor runs after the + `post_get_feedback_label` interceptor. The (possibly modified) response returned by + `post_get_feedback_label` will be passed to + `post_get_feedback_label_with_metadata`. + """ + return response, metadata + def pre_get_issue( self, request: contact_center_insights.GetIssueRequest, @@ -1472,12 +2076,35 @@ def pre_get_issue( def post_get_issue(self, response: resources.Issue) -> resources.Issue: """Post-rpc interceptor for get_issue - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_issue_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_issue` interceptor runs + before the `post_get_issue_with_metadata` interceptor. """ return response + def post_get_issue_with_metadata( + self, + response: resources.Issue, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Issue, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_issue + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_issue_with_metadata` + interceptor in new development instead of the `post_get_issue` interceptor. + When both interceptors are used, this `post_get_issue_with_metadata` interceptor runs after the + `post_get_issue` interceptor. The (possibly modified) response returned by + `post_get_issue` will be passed to + `post_get_issue_with_metadata`. + """ + return response, metadata + def pre_get_issue_model( self, request: contact_center_insights.GetIssueModelRequest, @@ -1498,12 +2125,35 @@ def post_get_issue_model( ) -> resources.IssueModel: """Post-rpc interceptor for get_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_issue_model` interceptor runs + before the `post_get_issue_model_with_metadata` interceptor. """ return response + def post_get_issue_model_with_metadata( + self, + response: resources.IssueModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.IssueModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_issue_model_with_metadata` + interceptor in new development instead of the `post_get_issue_model` interceptor. + When both interceptors are used, this `post_get_issue_model_with_metadata` interceptor runs after the + `post_get_issue_model` interceptor. The (possibly modified) response returned by + `post_get_issue_model` will be passed to + `post_get_issue_model_with_metadata`. + """ + return response, metadata + def pre_get_phrase_matcher( self, request: contact_center_insights.GetPhraseMatcherRequest, @@ -1524,12 +2174,35 @@ def post_get_phrase_matcher( ) -> resources.PhraseMatcher: """Post-rpc interceptor for get_phrase_matcher - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_phrase_matcher_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_phrase_matcher` interceptor runs + before the `post_get_phrase_matcher_with_metadata` interceptor. """ return response + def post_get_phrase_matcher_with_metadata( + self, + response: resources.PhraseMatcher, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.PhraseMatcher, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_phrase_matcher + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_phrase_matcher_with_metadata` + interceptor in new development instead of the `post_get_phrase_matcher` interceptor. + When both interceptors are used, this `post_get_phrase_matcher_with_metadata` interceptor runs after the + `post_get_phrase_matcher` interceptor. The (possibly modified) response returned by + `post_get_phrase_matcher` will be passed to + `post_get_phrase_matcher_with_metadata`. + """ + return response, metadata + def pre_get_qa_question( self, request: contact_center_insights.GetQaQuestionRequest, @@ -1550,12 +2223,35 @@ def post_get_qa_question( ) -> resources.QaQuestion: """Post-rpc interceptor for get_qa_question - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_qa_question_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_qa_question` interceptor runs + before the `post_get_qa_question_with_metadata` interceptor. """ return response + def post_get_qa_question_with_metadata( + self, + response: resources.QaQuestion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaQuestion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_qa_question + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_qa_question_with_metadata` + interceptor in new development instead of the `post_get_qa_question` interceptor. + When both interceptors are used, this `post_get_qa_question_with_metadata` interceptor runs after the + `post_get_qa_question` interceptor. The (possibly modified) response returned by + `post_get_qa_question` will be passed to + `post_get_qa_question_with_metadata`. + """ + return response, metadata + def pre_get_qa_scorecard( self, request: contact_center_insights.GetQaScorecardRequest, @@ -1576,12 +2272,35 @@ def post_get_qa_scorecard( ) -> resources.QaScorecard: """Post-rpc interceptor for get_qa_scorecard - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_qa_scorecard_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_qa_scorecard` interceptor runs + before the `post_get_qa_scorecard_with_metadata` interceptor. """ return response + def post_get_qa_scorecard_with_metadata( + self, + response: resources.QaScorecard, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecard, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_qa_scorecard + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_qa_scorecard_with_metadata` + interceptor in new development instead of the `post_get_qa_scorecard` interceptor. + When both interceptors are used, this `post_get_qa_scorecard_with_metadata` interceptor runs after the + `post_get_qa_scorecard` interceptor. The (possibly modified) response returned by + `post_get_qa_scorecard` will be passed to + `post_get_qa_scorecard_with_metadata`. + """ + return response, metadata + def pre_get_qa_scorecard_revision( self, request: contact_center_insights.GetQaScorecardRevisionRequest, @@ -1602,12 +2321,35 @@ def post_get_qa_scorecard_revision( ) -> resources.QaScorecardRevision: """Post-rpc interceptor for get_qa_scorecard_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_qa_scorecard_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_qa_scorecard_revision` interceptor runs + before the `post_get_qa_scorecard_revision_with_metadata` interceptor. """ return response + def post_get_qa_scorecard_revision_with_metadata( + self, + response: resources.QaScorecardRevision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecardRevision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_qa_scorecard_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_qa_scorecard_revision_with_metadata` + interceptor in new development instead of the `post_get_qa_scorecard_revision` interceptor. + When both interceptors are used, this `post_get_qa_scorecard_revision_with_metadata` interceptor runs after the + `post_get_qa_scorecard_revision` interceptor. The (possibly modified) response returned by + `post_get_qa_scorecard_revision` will be passed to + `post_get_qa_scorecard_revision_with_metadata`. + """ + return response, metadata + def pre_get_settings( self, request: contact_center_insights.GetSettingsRequest, @@ -1626,12 +2368,35 @@ def pre_get_settings( def post_get_settings(self, response: resources.Settings) -> resources.Settings: """Post-rpc interceptor for get_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_settings` interceptor runs + before the `post_get_settings_with_metadata` interceptor. """ return response + def post_get_settings_with_metadata( + self, + response: resources.Settings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Settings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_settings_with_metadata` + interceptor in new development instead of the `post_get_settings` interceptor. + When both interceptors are used, this `post_get_settings_with_metadata` interceptor runs after the + `post_get_settings` interceptor. The (possibly modified) response returned by + `post_get_settings` will be passed to + `post_get_settings_with_metadata`. + """ + return response, metadata + def pre_get_view( self, request: contact_center_insights.GetViewRequest, @@ -1649,12 +2414,35 @@ def pre_get_view( def post_get_view(self, response: resources.View) -> resources.View: """Post-rpc interceptor for get_view - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_view_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_get_view` interceptor runs + before the `post_get_view_with_metadata` interceptor. """ return response + def post_get_view_with_metadata( + self, + response: resources.View, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.View, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_view + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_get_view_with_metadata` + interceptor in new development instead of the `post_get_view` interceptor. + When both interceptors are used, this `post_get_view_with_metadata` interceptor runs after the + `post_get_view` interceptor. The (possibly modified) response returned by + `post_get_view` will be passed to + `post_get_view_with_metadata`. + """ + return response, metadata + def pre_import_issue_model( self, request: contact_center_insights.ImportIssueModelRequest, @@ -1675,12 +2463,35 @@ def post_import_issue_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_import_issue_model` interceptor runs + before the `post_import_issue_model_with_metadata` interceptor. """ return response + def post_import_issue_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_import_issue_model_with_metadata` + interceptor in new development instead of the `post_import_issue_model` interceptor. + When both interceptors are used, this `post_import_issue_model_with_metadata` interceptor runs after the + `post_import_issue_model` interceptor. The (possibly modified) response returned by + `post_import_issue_model` will be passed to + `post_import_issue_model_with_metadata`. + """ + return response, metadata + def pre_ingest_conversations( self, request: contact_center_insights.IngestConversationsRequest, @@ -1701,12 +2512,35 @@ def post_ingest_conversations( ) -> operations_pb2.Operation: """Post-rpc interceptor for ingest_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_ingest_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_ingest_conversations` interceptor runs + before the `post_ingest_conversations_with_metadata` interceptor. """ return response + def post_ingest_conversations_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for ingest_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_ingest_conversations_with_metadata` + interceptor in new development instead of the `post_ingest_conversations` interceptor. + When both interceptors are used, this `post_ingest_conversations_with_metadata` interceptor runs after the + `post_ingest_conversations` interceptor. The (possibly modified) response returned by + `post_ingest_conversations` will be passed to + `post_ingest_conversations_with_metadata`. + """ + return response, metadata + def pre_initialize_encryption_spec( self, request: contact_center_insights.InitializeEncryptionSpecRequest, @@ -1727,12 +2561,35 @@ def post_initialize_encryption_spec( ) -> operations_pb2.Operation: """Post-rpc interceptor for initialize_encryption_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_initialize_encryption_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_initialize_encryption_spec` interceptor runs + before the `post_initialize_encryption_spec_with_metadata` interceptor. """ return response + def post_initialize_encryption_spec_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for initialize_encryption_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_initialize_encryption_spec_with_metadata` + interceptor in new development instead of the `post_initialize_encryption_spec` interceptor. + When both interceptors are used, this `post_initialize_encryption_spec_with_metadata` interceptor runs after the + `post_initialize_encryption_spec` interceptor. The (possibly modified) response returned by + `post_initialize_encryption_spec` will be passed to + `post_initialize_encryption_spec_with_metadata`. + """ + return response, metadata + def pre_list_all_feedback_labels( self, request: contact_center_insights.ListAllFeedbackLabelsRequest, @@ -1753,12 +2610,38 @@ def post_list_all_feedback_labels( ) -> contact_center_insights.ListAllFeedbackLabelsResponse: """Post-rpc interceptor for list_all_feedback_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_all_feedback_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_all_feedback_labels` interceptor runs + before the `post_list_all_feedback_labels_with_metadata` interceptor. """ return response + def post_list_all_feedback_labels_with_metadata( + self, + response: contact_center_insights.ListAllFeedbackLabelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListAllFeedbackLabelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_all_feedback_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_all_feedback_labels_with_metadata` + interceptor in new development instead of the `post_list_all_feedback_labels` interceptor. + When both interceptors are used, this `post_list_all_feedback_labels_with_metadata` interceptor runs after the + `post_list_all_feedback_labels` interceptor. The (possibly modified) response returned by + `post_list_all_feedback_labels` will be passed to + `post_list_all_feedback_labels_with_metadata`. + """ + return response, metadata + def pre_list_analyses( self, request: contact_center_insights.ListAnalysesRequest, @@ -1779,12 +2662,38 @@ def post_list_analyses( ) -> contact_center_insights.ListAnalysesResponse: """Post-rpc interceptor for list_analyses - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_analyses_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_analyses` interceptor runs + before the `post_list_analyses_with_metadata` interceptor. """ return response + def post_list_analyses_with_metadata( + self, + response: contact_center_insights.ListAnalysesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListAnalysesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_analyses + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_analyses_with_metadata` + interceptor in new development instead of the `post_list_analyses` interceptor. + When both interceptors are used, this `post_list_analyses_with_metadata` interceptor runs after the + `post_list_analyses` interceptor. The (possibly modified) response returned by + `post_list_analyses` will be passed to + `post_list_analyses_with_metadata`. + """ + return response, metadata + def pre_list_analysis_rules( self, request: contact_center_insights.ListAnalysisRulesRequest, @@ -1805,12 +2714,38 @@ def post_list_analysis_rules( ) -> contact_center_insights.ListAnalysisRulesResponse: """Post-rpc interceptor for list_analysis_rules - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_analysis_rules_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_analysis_rules` interceptor runs + before the `post_list_analysis_rules_with_metadata` interceptor. """ return response + def post_list_analysis_rules_with_metadata( + self, + response: contact_center_insights.ListAnalysisRulesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListAnalysisRulesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_analysis_rules + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_analysis_rules_with_metadata` + interceptor in new development instead of the `post_list_analysis_rules` interceptor. + When both interceptors are used, this `post_list_analysis_rules_with_metadata` interceptor runs after the + `post_list_analysis_rules` interceptor. The (possibly modified) response returned by + `post_list_analysis_rules` will be passed to + `post_list_analysis_rules_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: contact_center_insights.ListConversationsRequest, @@ -1831,12 +2766,38 @@ def post_list_conversations( ) -> contact_center_insights.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: contact_center_insights.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListConversationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_list_feedback_labels( self, request: contact_center_insights.ListFeedbackLabelsRequest, @@ -1857,12 +2818,38 @@ def post_list_feedback_labels( ) -> contact_center_insights.ListFeedbackLabelsResponse: """Post-rpc interceptor for list_feedback_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_feedback_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_feedback_labels` interceptor runs + before the `post_list_feedback_labels_with_metadata` interceptor. """ return response + def post_list_feedback_labels_with_metadata( + self, + response: contact_center_insights.ListFeedbackLabelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListFeedbackLabelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_feedback_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_feedback_labels_with_metadata` + interceptor in new development instead of the `post_list_feedback_labels` interceptor. + When both interceptors are used, this `post_list_feedback_labels_with_metadata` interceptor runs after the + `post_list_feedback_labels` interceptor. The (possibly modified) response returned by + `post_list_feedback_labels` will be passed to + `post_list_feedback_labels_with_metadata`. + """ + return response, metadata + def pre_list_issue_models( self, request: contact_center_insights.ListIssueModelsRequest, @@ -1883,12 +2870,38 @@ def post_list_issue_models( ) -> contact_center_insights.ListIssueModelsResponse: """Post-rpc interceptor for list_issue_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_issue_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_issue_models` interceptor runs + before the `post_list_issue_models_with_metadata` interceptor. """ return response + def post_list_issue_models_with_metadata( + self, + response: contact_center_insights.ListIssueModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListIssueModelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_issue_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_issue_models_with_metadata` + interceptor in new development instead of the `post_list_issue_models` interceptor. + When both interceptors are used, this `post_list_issue_models_with_metadata` interceptor runs after the + `post_list_issue_models` interceptor. The (possibly modified) response returned by + `post_list_issue_models` will be passed to + `post_list_issue_models_with_metadata`. + """ + return response, metadata + def pre_list_issues( self, request: contact_center_insights.ListIssuesRequest, @@ -1909,12 +2922,38 @@ def post_list_issues( ) -> contact_center_insights.ListIssuesResponse: """Post-rpc interceptor for list_issues - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_issues_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_issues` interceptor runs + before the `post_list_issues_with_metadata` interceptor. """ return response + def post_list_issues_with_metadata( + self, + response: contact_center_insights.ListIssuesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListIssuesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_issues + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_issues_with_metadata` + interceptor in new development instead of the `post_list_issues` interceptor. + When both interceptors are used, this `post_list_issues_with_metadata` interceptor runs after the + `post_list_issues` interceptor. The (possibly modified) response returned by + `post_list_issues` will be passed to + `post_list_issues_with_metadata`. + """ + return response, metadata + def pre_list_phrase_matchers( self, request: contact_center_insights.ListPhraseMatchersRequest, @@ -1935,12 +2974,38 @@ def post_list_phrase_matchers( ) -> contact_center_insights.ListPhraseMatchersResponse: """Post-rpc interceptor for list_phrase_matchers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_phrase_matchers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_phrase_matchers` interceptor runs + before the `post_list_phrase_matchers_with_metadata` interceptor. """ return response + def post_list_phrase_matchers_with_metadata( + self, + response: contact_center_insights.ListPhraseMatchersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListPhraseMatchersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_phrase_matchers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_phrase_matchers_with_metadata` + interceptor in new development instead of the `post_list_phrase_matchers` interceptor. + When both interceptors are used, this `post_list_phrase_matchers_with_metadata` interceptor runs after the + `post_list_phrase_matchers` interceptor. The (possibly modified) response returned by + `post_list_phrase_matchers` will be passed to + `post_list_phrase_matchers_with_metadata`. + """ + return response, metadata + def pre_list_qa_questions( self, request: contact_center_insights.ListQaQuestionsRequest, @@ -1961,12 +3026,38 @@ def post_list_qa_questions( ) -> contact_center_insights.ListQaQuestionsResponse: """Post-rpc interceptor for list_qa_questions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_qa_questions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_qa_questions` interceptor runs + before the `post_list_qa_questions_with_metadata` interceptor. """ return response + def post_list_qa_questions_with_metadata( + self, + response: contact_center_insights.ListQaQuestionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListQaQuestionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_qa_questions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_qa_questions_with_metadata` + interceptor in new development instead of the `post_list_qa_questions` interceptor. + When both interceptors are used, this `post_list_qa_questions_with_metadata` interceptor runs after the + `post_list_qa_questions` interceptor. The (possibly modified) response returned by + `post_list_qa_questions` will be passed to + `post_list_qa_questions_with_metadata`. + """ + return response, metadata + def pre_list_qa_scorecard_revisions( self, request: contact_center_insights.ListQaScorecardRevisionsRequest, @@ -1987,12 +3078,38 @@ def post_list_qa_scorecard_revisions( ) -> contact_center_insights.ListQaScorecardRevisionsResponse: """Post-rpc interceptor for list_qa_scorecard_revisions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_qa_scorecard_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_qa_scorecard_revisions` interceptor runs + before the `post_list_qa_scorecard_revisions_with_metadata` interceptor. """ return response + def post_list_qa_scorecard_revisions_with_metadata( + self, + response: contact_center_insights.ListQaScorecardRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListQaScorecardRevisionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_qa_scorecard_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_qa_scorecard_revisions_with_metadata` + interceptor in new development instead of the `post_list_qa_scorecard_revisions` interceptor. + When both interceptors are used, this `post_list_qa_scorecard_revisions_with_metadata` interceptor runs after the + `post_list_qa_scorecard_revisions` interceptor. The (possibly modified) response returned by + `post_list_qa_scorecard_revisions` will be passed to + `post_list_qa_scorecard_revisions_with_metadata`. + """ + return response, metadata + def pre_list_qa_scorecards( self, request: contact_center_insights.ListQaScorecardsRequest, @@ -2013,12 +3130,38 @@ def post_list_qa_scorecards( ) -> contact_center_insights.ListQaScorecardsResponse: """Post-rpc interceptor for list_qa_scorecards - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_qa_scorecards_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_qa_scorecards` interceptor runs + before the `post_list_qa_scorecards_with_metadata` interceptor. """ return response + def post_list_qa_scorecards_with_metadata( + self, + response: contact_center_insights.ListQaScorecardsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListQaScorecardsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_qa_scorecards + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_qa_scorecards_with_metadata` + interceptor in new development instead of the `post_list_qa_scorecards` interceptor. + When both interceptors are used, this `post_list_qa_scorecards_with_metadata` interceptor runs after the + `post_list_qa_scorecards` interceptor. The (possibly modified) response returned by + `post_list_qa_scorecards` will be passed to + `post_list_qa_scorecards_with_metadata`. + """ + return response, metadata + def pre_list_views( self, request: contact_center_insights.ListViewsRequest, @@ -2039,12 +3182,38 @@ def post_list_views( ) -> contact_center_insights.ListViewsResponse: """Post-rpc interceptor for list_views - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_views_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_list_views` interceptor runs + before the `post_list_views_with_metadata` interceptor. """ return response + def post_list_views_with_metadata( + self, + response: contact_center_insights.ListViewsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + contact_center_insights.ListViewsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_views + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_list_views_with_metadata` + interceptor in new development instead of the `post_list_views` interceptor. + When both interceptors are used, this `post_list_views_with_metadata` interceptor runs after the + `post_list_views` interceptor. The (possibly modified) response returned by + `post_list_views` will be passed to + `post_list_views_with_metadata`. + """ + return response, metadata + def pre_query_metrics( self, request: contact_center_insights.QueryMetricsRequest, @@ -2065,12 +3234,35 @@ def post_query_metrics( ) -> operations_pb2.Operation: """Post-rpc interceptor for query_metrics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_metrics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_query_metrics` interceptor runs + before the `post_query_metrics_with_metadata` interceptor. """ return response + def post_query_metrics_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for query_metrics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_query_metrics_with_metadata` + interceptor in new development instead of the `post_query_metrics` interceptor. + When both interceptors are used, this `post_query_metrics_with_metadata` interceptor runs after the + `post_query_metrics` interceptor. The (possibly modified) response returned by + `post_query_metrics` will be passed to + `post_query_metrics_with_metadata`. + """ + return response, metadata + def pre_tune_qa_scorecard_revision( self, request: contact_center_insights.TuneQaScorecardRevisionRequest, @@ -2091,12 +3283,35 @@ def post_tune_qa_scorecard_revision( ) -> operations_pb2.Operation: """Post-rpc interceptor for tune_qa_scorecard_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_tune_qa_scorecard_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_tune_qa_scorecard_revision` interceptor runs + before the `post_tune_qa_scorecard_revision_with_metadata` interceptor. """ return response + def post_tune_qa_scorecard_revision_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for tune_qa_scorecard_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_tune_qa_scorecard_revision_with_metadata` + interceptor in new development instead of the `post_tune_qa_scorecard_revision` interceptor. + When both interceptors are used, this `post_tune_qa_scorecard_revision_with_metadata` interceptor runs after the + `post_tune_qa_scorecard_revision` interceptor. The (possibly modified) response returned by + `post_tune_qa_scorecard_revision` will be passed to + `post_tune_qa_scorecard_revision_with_metadata`. + """ + return response, metadata + def pre_undeploy_issue_model( self, request: contact_center_insights.UndeployIssueModelRequest, @@ -2117,12 +3332,35 @@ def post_undeploy_issue_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for undeploy_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_issue_model` interceptor runs + before the `post_undeploy_issue_model_with_metadata` interceptor. """ return response + def post_undeploy_issue_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_undeploy_issue_model_with_metadata` + interceptor in new development instead of the `post_undeploy_issue_model` interceptor. + When both interceptors are used, this `post_undeploy_issue_model_with_metadata` interceptor runs after the + `post_undeploy_issue_model` interceptor. The (possibly modified) response returned by + `post_undeploy_issue_model` will be passed to + `post_undeploy_issue_model_with_metadata`. + """ + return response, metadata + def pre_undeploy_qa_scorecard_revision( self, request: contact_center_insights.UndeployQaScorecardRevisionRequest, @@ -2143,12 +3381,35 @@ def post_undeploy_qa_scorecard_revision( ) -> resources.QaScorecardRevision: """Post-rpc interceptor for undeploy_qa_scorecard_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_qa_scorecard_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_qa_scorecard_revision` interceptor runs + before the `post_undeploy_qa_scorecard_revision_with_metadata` interceptor. """ return response + def post_undeploy_qa_scorecard_revision_with_metadata( + self, + response: resources.QaScorecardRevision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecardRevision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_qa_scorecard_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_undeploy_qa_scorecard_revision_with_metadata` + interceptor in new development instead of the `post_undeploy_qa_scorecard_revision` interceptor. + When both interceptors are used, this `post_undeploy_qa_scorecard_revision_with_metadata` interceptor runs after the + `post_undeploy_qa_scorecard_revision` interceptor. The (possibly modified) response returned by + `post_undeploy_qa_scorecard_revision` will be passed to + `post_undeploy_qa_scorecard_revision_with_metadata`. + """ + return response, metadata + def pre_update_analysis_rule( self, request: contact_center_insights.UpdateAnalysisRuleRequest, @@ -2169,12 +3430,35 @@ def post_update_analysis_rule( ) -> resources.AnalysisRule: """Post-rpc interceptor for update_analysis_rule - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_analysis_rule_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_analysis_rule` interceptor runs + before the `post_update_analysis_rule_with_metadata` interceptor. """ return response + def post_update_analysis_rule_with_metadata( + self, + response: resources.AnalysisRule, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.AnalysisRule, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_analysis_rule + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_analysis_rule_with_metadata` + interceptor in new development instead of the `post_update_analysis_rule` interceptor. + When both interceptors are used, this `post_update_analysis_rule_with_metadata` interceptor runs after the + `post_update_analysis_rule` interceptor. The (possibly modified) response returned by + `post_update_analysis_rule` will be passed to + `post_update_analysis_rule_with_metadata`. + """ + return response, metadata + def pre_update_conversation( self, request: contact_center_insights.UpdateConversationRequest, @@ -2195,12 +3479,35 @@ def post_update_conversation( ) -> resources.Conversation: """Post-rpc interceptor for update_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_conversation` interceptor runs + before the `post_update_conversation_with_metadata` interceptor. """ return response + def post_update_conversation_with_metadata( + self, + response: resources.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_conversation_with_metadata` + interceptor in new development instead of the `post_update_conversation` interceptor. + When both interceptors are used, this `post_update_conversation_with_metadata` interceptor runs after the + `post_update_conversation` interceptor. The (possibly modified) response returned by + `post_update_conversation` will be passed to + `post_update_conversation_with_metadata`. + """ + return response, metadata + def pre_update_feedback_label( self, request: contact_center_insights.UpdateFeedbackLabelRequest, @@ -2221,12 +3528,35 @@ def post_update_feedback_label( ) -> resources.FeedbackLabel: """Post-rpc interceptor for update_feedback_label - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_feedback_label_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_feedback_label` interceptor runs + before the `post_update_feedback_label_with_metadata` interceptor. """ return response + def post_update_feedback_label_with_metadata( + self, + response: resources.FeedbackLabel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.FeedbackLabel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_feedback_label + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_feedback_label_with_metadata` + interceptor in new development instead of the `post_update_feedback_label` interceptor. + When both interceptors are used, this `post_update_feedback_label_with_metadata` interceptor runs after the + `post_update_feedback_label` interceptor. The (possibly modified) response returned by + `post_update_feedback_label` will be passed to + `post_update_feedback_label_with_metadata`. + """ + return response, metadata + def pre_update_issue( self, request: contact_center_insights.UpdateIssueRequest, @@ -2245,12 +3575,35 @@ def pre_update_issue( def post_update_issue(self, response: resources.Issue) -> resources.Issue: """Post-rpc interceptor for update_issue - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_issue_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_issue` interceptor runs + before the `post_update_issue_with_metadata` interceptor. """ return response + def post_update_issue_with_metadata( + self, + response: resources.Issue, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Issue, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_issue + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_issue_with_metadata` + interceptor in new development instead of the `post_update_issue` interceptor. + When both interceptors are used, this `post_update_issue_with_metadata` interceptor runs after the + `post_update_issue` interceptor. The (possibly modified) response returned by + `post_update_issue` will be passed to + `post_update_issue_with_metadata`. + """ + return response, metadata + def pre_update_issue_model( self, request: contact_center_insights.UpdateIssueModelRequest, @@ -2271,12 +3624,35 @@ def post_update_issue_model( ) -> resources.IssueModel: """Post-rpc interceptor for update_issue_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_issue_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_issue_model` interceptor runs + before the `post_update_issue_model_with_metadata` interceptor. """ return response + def post_update_issue_model_with_metadata( + self, + response: resources.IssueModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.IssueModel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_issue_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_issue_model_with_metadata` + interceptor in new development instead of the `post_update_issue_model` interceptor. + When both interceptors are used, this `post_update_issue_model_with_metadata` interceptor runs after the + `post_update_issue_model` interceptor. The (possibly modified) response returned by + `post_update_issue_model` will be passed to + `post_update_issue_model_with_metadata`. + """ + return response, metadata + def pre_update_phrase_matcher( self, request: contact_center_insights.UpdatePhraseMatcherRequest, @@ -2297,12 +3673,35 @@ def post_update_phrase_matcher( ) -> resources.PhraseMatcher: """Post-rpc interceptor for update_phrase_matcher - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_phrase_matcher_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_phrase_matcher` interceptor runs + before the `post_update_phrase_matcher_with_metadata` interceptor. """ return response + def post_update_phrase_matcher_with_metadata( + self, + response: resources.PhraseMatcher, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.PhraseMatcher, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_phrase_matcher + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_phrase_matcher_with_metadata` + interceptor in new development instead of the `post_update_phrase_matcher` interceptor. + When both interceptors are used, this `post_update_phrase_matcher_with_metadata` interceptor runs after the + `post_update_phrase_matcher` interceptor. The (possibly modified) response returned by + `post_update_phrase_matcher` will be passed to + `post_update_phrase_matcher_with_metadata`. + """ + return response, metadata + def pre_update_qa_question( self, request: contact_center_insights.UpdateQaQuestionRequest, @@ -2323,12 +3722,35 @@ def post_update_qa_question( ) -> resources.QaQuestion: """Post-rpc interceptor for update_qa_question - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_qa_question_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_qa_question` interceptor runs + before the `post_update_qa_question_with_metadata` interceptor. """ return response + def post_update_qa_question_with_metadata( + self, + response: resources.QaQuestion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaQuestion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_qa_question + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_qa_question_with_metadata` + interceptor in new development instead of the `post_update_qa_question` interceptor. + When both interceptors are used, this `post_update_qa_question_with_metadata` interceptor runs after the + `post_update_qa_question` interceptor. The (possibly modified) response returned by + `post_update_qa_question` will be passed to + `post_update_qa_question_with_metadata`. + """ + return response, metadata + def pre_update_qa_scorecard( self, request: contact_center_insights.UpdateQaScorecardRequest, @@ -2349,12 +3771,35 @@ def post_update_qa_scorecard( ) -> resources.QaScorecard: """Post-rpc interceptor for update_qa_scorecard - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_qa_scorecard_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_qa_scorecard` interceptor runs + before the `post_update_qa_scorecard_with_metadata` interceptor. """ return response + def post_update_qa_scorecard_with_metadata( + self, + response: resources.QaScorecard, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.QaScorecard, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_qa_scorecard + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_qa_scorecard_with_metadata` + interceptor in new development instead of the `post_update_qa_scorecard` interceptor. + When both interceptors are used, this `post_update_qa_scorecard_with_metadata` interceptor runs after the + `post_update_qa_scorecard` interceptor. The (possibly modified) response returned by + `post_update_qa_scorecard` will be passed to + `post_update_qa_scorecard_with_metadata`. + """ + return response, metadata + def pre_update_settings( self, request: contact_center_insights.UpdateSettingsRequest, @@ -2373,12 +3818,35 @@ def pre_update_settings( def post_update_settings(self, response: resources.Settings) -> resources.Settings: """Post-rpc interceptor for update_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_settings` interceptor runs + before the `post_update_settings_with_metadata` interceptor. """ return response + def post_update_settings_with_metadata( + self, + response: resources.Settings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Settings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_settings_with_metadata` + interceptor in new development instead of the `post_update_settings` interceptor. + When both interceptors are used, this `post_update_settings_with_metadata` interceptor runs after the + `post_update_settings` interceptor. The (possibly modified) response returned by + `post_update_settings` will be passed to + `post_update_settings_with_metadata`. + """ + return response, metadata + def pre_update_view( self, request: contact_center_insights.UpdateViewRequest, @@ -2397,12 +3865,35 @@ def pre_update_view( def post_update_view(self, response: resources.View) -> resources.View: """Post-rpc interceptor for update_view - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_view_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_update_view` interceptor runs + before the `post_update_view_with_metadata` interceptor. """ return response + def post_update_view_with_metadata( + self, + response: resources.View, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.View, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_view + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_update_view_with_metadata` + interceptor in new development instead of the `post_update_view` interceptor. + When both interceptors are used, this `post_update_view_with_metadata` interceptor runs after the + `post_update_view` interceptor. The (possibly modified) response returned by + `post_update_view` will be passed to + `post_update_view_with_metadata`. + """ + return response, metadata + def pre_upload_conversation( self, request: contact_center_insights.UploadConversationRequest, @@ -2423,12 +3914,35 @@ def post_upload_conversation( ) -> operations_pb2.Operation: """Post-rpc interceptor for upload_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upload_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContactCenterInsights server but before - it is returned to user code. + it is returned to user code. This `post_upload_conversation` interceptor runs + before the `post_upload_conversation_with_metadata` interceptor. """ return response + def post_upload_conversation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upload_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContactCenterInsights server but before it is returned to user code. + + We recommend only using this `post_upload_conversation_with_metadata` + interceptor in new development instead of the `post_upload_conversation` interceptor. + When both interceptors are used, this `post_upload_conversation_with_metadata` interceptor runs after the + `post_upload_conversation` interceptor. The (possibly modified) response returned by + `post_upload_conversation` will be passed to + `post_upload_conversation_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -2766,6 +4280,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_analyze_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_analyze_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2917,6 +4435,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_delete_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_delete_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3069,6 +4591,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_download_feedback_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_bulk_download_feedback_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3221,6 +4750,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_bulk_upload_feedback_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_upload_feedback_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3368,6 +4901,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_calculate_issue_model_stats(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_calculate_issue_model_stats_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3514,6 +5051,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_calculate_stats(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_calculate_stats_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3664,6 +5205,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_analysis(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_analysis_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3826,6 +5371,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_analysis_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_analysis_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3977,6 +5526,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4131,6 +5684,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_feedback_label(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_feedback_label_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4283,6 +5840,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4434,6 +5995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_phrase_matcher(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_phrase_matcher_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4588,6 +6153,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_qa_question(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_qa_question_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4743,6 +6312,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_qa_scorecard(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_qa_scorecard_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4904,6 +6477,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_qa_scorecard_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_qa_scorecard_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5051,6 +6628,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_view(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_view_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5746,6 +7327,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6452,6 +8037,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6613,6 +8202,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_qa_scorecard_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_qa_scorecard_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6765,6 +8358,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_insights_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_insights_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6917,6 +8514,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7058,6 +8659,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_analysis(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_analysis_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7214,6 +8819,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_analysis_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_analysis_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7359,6 +8968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7508,6 +9121,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_encryption_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_encryption_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7656,6 +9273,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_feedback_label(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_feedback_label_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7797,6 +9418,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_issue(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_issue_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7938,6 +9563,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8084,6 +9713,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_phrase_matcher(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_phrase_matcher_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8227,6 +9860,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_qa_question(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_qa_question_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8374,6 +10011,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_qa_scorecard(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_qa_scorecard_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8528,6 +10169,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_qa_scorecard_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_qa_scorecard_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8676,6 +10321,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8817,6 +10466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_view(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_view_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8969,6 +10622,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9121,6 +10778,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_ingest_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_ingest_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9273,6 +10934,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_initialize_encryption_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_initialize_encryption_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9421,6 +11086,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_all_feedback_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_all_feedback_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9566,6 +11235,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_analyses(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_analyses_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9713,6 +11386,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_analysis_rules(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_analysis_rules_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9864,6 +11541,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10016,6 +11697,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_feedback_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_feedback_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10165,6 +11850,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_issue_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_issue_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10310,6 +11999,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_issues(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_issues_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10459,6 +12152,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_phrase_matchers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_phrase_matchers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10610,6 +12307,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_qa_questions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_qa_questions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10760,6 +12461,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_qa_scorecard_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_qa_scorecard_revisions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -10909,6 +12614,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_qa_scorecards(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_qa_scorecards_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11054,6 +12763,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_views(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_views_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11204,6 +12917,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_metrics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_metrics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11356,6 +13073,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_tune_qa_scorecard_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_tune_qa_scorecard_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11509,6 +13230,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undeploy_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11672,6 +13397,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_qa_scorecard_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_undeploy_qa_scorecard_revision_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11834,6 +13566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_analysis_rule(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_analysis_rule_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -11985,6 +13721,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12139,6 +13879,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_feedback_label(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_feedback_label_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12286,6 +14030,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_issue(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_issue_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12437,6 +14185,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_issue_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_issue_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12589,6 +14341,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_phrase_matcher(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_phrase_matcher_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12743,6 +14499,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_qa_question(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_qa_question_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -12898,6 +14658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_qa_scorecard(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_qa_scorecard_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13052,6 +14816,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13199,6 +14967,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_view(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_view_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -13351,6 +15123,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_upload_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upload_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest_base.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest_base.py index 306fa9225329..76d0481dd82f 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest_base.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/services/contact_center_insights/transports/rest_base.py @@ -400,6 +400,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{location=projects/*/locations/*}/conversations:calculateStats", }, + { + "method": "get", + "uri": "/v1/{location=projects/*/locations/*/authorizedViewSet/*/authorizedView/*}:calculateStats", + }, ] return http_options @@ -3123,6 +3127,11 @@ def _get_http_options(): "uri": "/v1/{location=projects/*/locations/*}:queryMetrics", "body": "*", }, + { + "method": "post", + "uri": "/v1/{location=projects/*/locations/*/authorizedViewSet/*/authorizedView/*}:queryMetrics", + "body": "*", + }, ] return http_options diff --git a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py index c66c651e5e83..62588f3070ac 100644 --- a/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py +++ b/packages/google-cloud-contact-center-insights/google/cloud/contact_center_insights_v1/types/resources.py @@ -273,6 +273,8 @@ class AgentInfo(proto.Message): disposition_code (str): A user-provided string indicating the outcome of the agent's segment of the call. + agent_type (google.cloud.contact_center_insights_v1.types.ConversationParticipant.Role): + The agent type, e.g. HUMAN_AGENT. """ agent_id: str = proto.Field( @@ -291,6 +293,11 @@ class AgentInfo(proto.Message): proto.STRING, number=4, ) + agent_type: "ConversationParticipant.Role" = proto.Field( + proto.ENUM, + number=5, + enum="ConversationParticipant.Role", + ) customer_satisfaction_rating: int = proto.Field( proto.INT32, diff --git a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json index f61f8310164c..634799fedfdd 100644 --- a/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json +++ b/packages/google-cloud-contact-center-insights/samples/generated_samples/snippet_metadata_google.cloud.contactcenterinsights.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contact-center-insights", - "version": "1.21.0" + "version": "1.23.0" }, "snippets": [ { diff --git a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py index 476530f8ef21..8fb1f5ca0bba 100644 --- a/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py +++ b/packages/google-cloud-contact-center-insights/tests/unit/gapic/contact_center_insights_v1/test_contact_center_insights.py @@ -80,6 +80,13 @@ resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -355,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ContactCenterInsightsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ContactCenterInsightsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -46721,6 +46771,7 @@ def test_create_conversation_rest_call_success(request_type): "display_name": "display_name_value", "team": "team_value", "disposition_code": "disposition_code_value", + "agent_type": 1, } ], }, @@ -47071,10 +47122,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_conversation" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateConversationRequest.pb( contact_center_insights.CreateConversationRequest() ) @@ -47098,6 +47153,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Conversation() + post_with_metadata.return_value = resources.Conversation(), metadata client.create_conversation( request, @@ -47109,6 +47165,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_upload_conversation_rest_bad_request( @@ -47189,10 +47246,14 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_upload_conversation" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_upload_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_upload_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UploadConversationRequest.pb( contact_center_insights.UploadConversationRequest() ) @@ -47216,6 +47277,7 @@ def test_upload_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.upload_conversation( request, @@ -47227,6 +47289,7 @@ def test_upload_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_conversation_rest_bad_request( @@ -47307,6 +47370,7 @@ def test_update_conversation_rest_call_success(request_type): "display_name": "display_name_value", "team": "team_value", "disposition_code": "disposition_code_value", + "agent_type": 1, } ], }, @@ -47657,10 +47721,14 @@ def test_update_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_conversation" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateConversationRequest.pb( contact_center_insights.UpdateConversationRequest() ) @@ -47684,6 +47752,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Conversation() + post_with_metadata.return_value = resources.Conversation(), metadata client.update_conversation( request, @@ -47695,6 +47764,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -47791,10 +47861,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetConversationRequest.pb( contact_center_insights.GetConversationRequest() ) @@ -47818,6 +47892,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Conversation() + post_with_metadata.return_value = resources.Conversation(), metadata client.get_conversation( request, @@ -47829,6 +47904,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversations_rest_bad_request( @@ -47915,10 +47991,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListConversationsRequest.pb( contact_center_insights.ListConversationsRequest() ) @@ -47944,6 +48024,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListConversationsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -47955,6 +48039,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_rest_bad_request( @@ -48348,10 +48433,14 @@ def test_create_analysis_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_analysis" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_analysis_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateAnalysisRequest.pb( contact_center_insights.CreateAnalysisRequest() ) @@ -48375,6 +48464,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_analysis( request, @@ -48386,6 +48476,7 @@ def test_create_analysis_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_analysis_rest_bad_request( @@ -48474,10 +48565,14 @@ def test_get_analysis_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_analysis" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_analysis_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetAnalysisRequest.pb( contact_center_insights.GetAnalysisRequest() ) @@ -48501,6 +48596,7 @@ def test_get_analysis_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Analysis() + post_with_metadata.return_value = resources.Analysis(), metadata client.get_analysis( request, @@ -48512,6 +48608,7 @@ def test_get_analysis_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_analyses_rest_bad_request( @@ -48600,10 +48697,14 @@ def test_list_analyses_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_analyses" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_analyses_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_analyses" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListAnalysesRequest.pb( contact_center_insights.ListAnalysesRequest() ) @@ -48629,6 +48730,10 @@ def test_list_analyses_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListAnalysesResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListAnalysesResponse(), + metadata, + ) client.list_analyses( request, @@ -48640,6 +48745,7 @@ def test_list_analyses_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_analysis_rest_bad_request( @@ -48834,11 +48940,15 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_bulk_analyze_conversations", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_analyze_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_bulk_analyze_conversations", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.BulkAnalyzeConversationsRequest.pb( contact_center_insights.BulkAnalyzeConversationsRequest() ) @@ -48862,6 +48972,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.bulk_analyze_conversations( request, @@ -48873,6 +48984,7 @@ def test_bulk_analyze_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_delete_conversations_rest_bad_request( @@ -48954,10 +49066,14 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_bulk_delete_conversations", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_delete_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_bulk_delete_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.BulkDeleteConversationsRequest.pb( contact_center_insights.BulkDeleteConversationsRequest() ) @@ -48981,6 +49097,7 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.bulk_delete_conversations( request, @@ -48992,6 +49109,7 @@ def test_bulk_delete_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_ingest_conversations_rest_bad_request( @@ -49072,10 +49190,14 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_ingest_conversations" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_ingest_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_ingest_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.IngestConversationsRequest.pb( contact_center_insights.IngestConversationsRequest() ) @@ -49099,6 +49221,7 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.ingest_conversations( request, @@ -49110,6 +49233,7 @@ def test_ingest_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_insights_data_rest_bad_request( @@ -49190,10 +49314,14 @@ def test_export_insights_data_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_export_insights_data" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_export_insights_data_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_export_insights_data" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ExportInsightsDataRequest.pb( contact_center_insights.ExportInsightsDataRequest() ) @@ -49217,6 +49345,7 @@ def test_export_insights_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_insights_data( request, @@ -49228,6 +49357,7 @@ def test_export_insights_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_issue_model_rest_bad_request( @@ -49397,10 +49527,14 @@ def test_create_issue_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateIssueModelRequest.pb( contact_center_insights.CreateIssueModelRequest() ) @@ -49424,6 +49558,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_issue_model( request, @@ -49435,6 +49570,7 @@ def test_create_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_issue_model_rest_bad_request( @@ -49626,10 +49762,14 @@ def test_update_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateIssueModelRequest.pb( contact_center_insights.UpdateIssueModelRequest() ) @@ -49653,6 +49793,7 @@ def test_update_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.IssueModel() + post_with_metadata.return_value = resources.IssueModel(), metadata client.update_issue_model( request, @@ -49664,6 +49805,7 @@ def test_update_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_issue_model_rest_bad_request( @@ -49758,10 +49900,14 @@ def test_get_issue_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetIssueModelRequest.pb( contact_center_insights.GetIssueModelRequest() ) @@ -49785,6 +49931,7 @@ def test_get_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.IssueModel() + post_with_metadata.return_value = resources.IssueModel(), metadata client.get_issue_model( request, @@ -49796,6 +49943,7 @@ def test_get_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_issue_models_rest_bad_request( @@ -49877,10 +50025,14 @@ def test_list_issue_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_issue_models" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_issue_models_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_issue_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListIssueModelsRequest.pb( contact_center_insights.ListIssueModelsRequest() ) @@ -49906,6 +50058,10 @@ def test_list_issue_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListIssueModelsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListIssueModelsResponse(), + metadata, + ) client.list_issue_models( request, @@ -49917,6 +50073,7 @@ def test_list_issue_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_issue_model_rest_bad_request( @@ -49997,10 +50154,14 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_delete_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_delete_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_delete_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.DeleteIssueModelRequest.pb( contact_center_insights.DeleteIssueModelRequest() ) @@ -50024,6 +50185,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_issue_model( request, @@ -50035,6 +50197,7 @@ def test_delete_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_issue_model_rest_bad_request( @@ -50115,10 +50278,14 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_deploy_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_deploy_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_deploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.DeployIssueModelRequest.pb( contact_center_insights.DeployIssueModelRequest() ) @@ -50142,6 +50309,7 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_issue_model( request, @@ -50153,6 +50321,7 @@ def test_deploy_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_issue_model_rest_bad_request( @@ -50233,10 +50402,14 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_undeploy_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_undeploy_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UndeployIssueModelRequest.pb( contact_center_insights.UndeployIssueModelRequest() ) @@ -50260,6 +50433,7 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undeploy_issue_model( request, @@ -50271,6 +50445,7 @@ def test_undeploy_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_issue_model_rest_bad_request( @@ -50351,10 +50526,14 @@ def test_export_issue_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_export_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_export_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_export_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ExportIssueModelRequest.pb( contact_center_insights.ExportIssueModelRequest() ) @@ -50378,6 +50557,7 @@ def test_export_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_issue_model( request, @@ -50389,6 +50569,7 @@ def test_export_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_issue_model_rest_bad_request( @@ -50469,10 +50650,14 @@ def test_import_issue_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_import_issue_model" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_import_issue_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_import_issue_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ImportIssueModelRequest.pb( contact_center_insights.ImportIssueModelRequest() ) @@ -50496,6 +50681,7 @@ def test_import_issue_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_issue_model( request, @@ -50507,6 +50693,7 @@ def test_import_issue_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_issue_rest_bad_request( @@ -50601,10 +50788,13 @@ def test_get_issue_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_issue" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_get_issue_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_issue" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetIssueRequest.pb( contact_center_insights.GetIssueRequest() ) @@ -50628,6 +50818,7 @@ def test_get_issue_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Issue() + post_with_metadata.return_value = resources.Issue(), metadata client.get_issue( request, @@ -50639,6 +50830,7 @@ def test_get_issue_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_issues_rest_bad_request( @@ -50720,10 +50912,14 @@ def test_list_issues_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_issues" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_issues_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_issues" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListIssuesRequest.pb( contact_center_insights.ListIssuesRequest() ) @@ -50749,6 +50945,10 @@ def test_list_issues_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListIssuesResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListIssuesResponse(), + metadata, + ) client.list_issues( request, @@ -50760,6 +50960,7 @@ def test_list_issues_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_issue_rest_bad_request( @@ -50933,10 +51134,14 @@ def test_update_issue_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_issue" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_issue_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_issue" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateIssueRequest.pb( contact_center_insights.UpdateIssueRequest() ) @@ -50960,6 +51165,7 @@ def test_update_issue_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Issue() + post_with_metadata.return_value = resources.Issue(), metadata client.update_issue( request, @@ -50971,6 +51177,7 @@ def test_update_issue_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_issue_rest_bad_request( @@ -51174,11 +51381,15 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_calculate_issue_model_stats", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_calculate_issue_model_stats_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_calculate_issue_model_stats", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CalculateIssueModelStatsRequest.pb( contact_center_insights.CalculateIssueModelStatsRequest() ) @@ -51204,6 +51415,10 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.CalculateIssueModelStatsResponse() + post_with_metadata.return_value = ( + contact_center_insights.CalculateIssueModelStatsResponse(), + metadata, + ) client.calculate_issue_model_stats( request, @@ -51215,6 +51430,7 @@ def test_calculate_issue_model_stats_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_phrase_matcher_rest_bad_request( @@ -51404,10 +51620,14 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_phrase_matcher" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_phrase_matcher_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreatePhraseMatcherRequest.pb( contact_center_insights.CreatePhraseMatcherRequest() ) @@ -51431,6 +51651,7 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.PhraseMatcher() + post_with_metadata.return_value = resources.PhraseMatcher(), metadata client.create_phrase_matcher( request, @@ -51442,6 +51663,7 @@ def test_create_phrase_matcher_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_phrase_matcher_rest_bad_request( @@ -51538,10 +51760,14 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_phrase_matcher" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_phrase_matcher_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetPhraseMatcherRequest.pb( contact_center_insights.GetPhraseMatcherRequest() ) @@ -51565,6 +51791,7 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.PhraseMatcher() + post_with_metadata.return_value = resources.PhraseMatcher(), metadata client.get_phrase_matcher( request, @@ -51576,6 +51803,7 @@ def test_get_phrase_matcher_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_phrase_matchers_rest_bad_request( @@ -51662,10 +51890,14 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_phrase_matchers" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_phrase_matchers_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_phrase_matchers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListPhraseMatchersRequest.pb( contact_center_insights.ListPhraseMatchersRequest() ) @@ -51691,6 +51923,10 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListPhraseMatchersResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListPhraseMatchersResponse(), + metadata, + ) client.list_phrase_matchers( request, @@ -51702,6 +51938,7 @@ def test_list_phrase_matchers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_phrase_matcher_rest_bad_request( @@ -52008,10 +52245,14 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_phrase_matcher" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_phrase_matcher_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_phrase_matcher" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdatePhraseMatcherRequest.pb( contact_center_insights.UpdatePhraseMatcherRequest() ) @@ -52035,6 +52276,7 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.PhraseMatcher() + post_with_metadata.return_value = resources.PhraseMatcher(), metadata client.update_phrase_matcher( request, @@ -52046,6 +52288,7 @@ def test_update_phrase_matcher_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_calculate_stats_rest_bad_request( @@ -52132,10 +52375,14 @@ def test_calculate_stats_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_calculate_stats" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_calculate_stats_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_calculate_stats" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CalculateStatsRequest.pb( contact_center_insights.CalculateStatsRequest() ) @@ -52161,6 +52408,10 @@ def test_calculate_stats_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.CalculateStatsResponse() + post_with_metadata.return_value = ( + contact_center_insights.CalculateStatsResponse(), + metadata, + ) client.calculate_stats( request, @@ -52172,6 +52423,7 @@ def test_calculate_stats_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_settings_rest_bad_request( @@ -52258,10 +52510,14 @@ def test_get_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_settings" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetSettingsRequest.pb( contact_center_insights.GetSettingsRequest() ) @@ -52285,6 +52541,7 @@ def test_get_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Settings() + post_with_metadata.return_value = resources.Settings(), metadata client.get_settings( request, @@ -52296,6 +52553,7 @@ def test_get_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_settings_rest_bad_request( @@ -52491,10 +52749,14 @@ def test_update_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_settings" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateSettingsRequest.pb( contact_center_insights.UpdateSettingsRequest() ) @@ -52518,6 +52780,7 @@ def test_update_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Settings() + post_with_metadata.return_value = resources.Settings(), metadata client.update_settings( request, @@ -52529,6 +52792,7 @@ def test_update_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_analysis_rule_rest_bad_request( @@ -52724,10 +52988,14 @@ def test_create_analysis_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_analysis_rule" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_analysis_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_analysis_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateAnalysisRuleRequest.pb( contact_center_insights.CreateAnalysisRuleRequest() ) @@ -52751,6 +53019,7 @@ def test_create_analysis_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AnalysisRule() + post_with_metadata.return_value = resources.AnalysisRule(), metadata client.create_analysis_rule( request, @@ -52762,6 +53031,7 @@ def test_create_analysis_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_analysis_rule_rest_bad_request( @@ -52854,10 +53124,14 @@ def test_get_analysis_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_analysis_rule" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_analysis_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_analysis_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetAnalysisRuleRequest.pb( contact_center_insights.GetAnalysisRuleRequest() ) @@ -52881,6 +53155,7 @@ def test_get_analysis_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AnalysisRule() + post_with_metadata.return_value = resources.AnalysisRule(), metadata client.get_analysis_rule( request, @@ -52892,6 +53167,7 @@ def test_get_analysis_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_analysis_rules_rest_bad_request( @@ -52978,10 +53254,14 @@ def test_list_analysis_rules_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_analysis_rules" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_analysis_rules_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_analysis_rules" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListAnalysisRulesRequest.pb( contact_center_insights.ListAnalysisRulesRequest() ) @@ -53007,6 +53287,10 @@ def test_list_analysis_rules_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListAnalysisRulesResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListAnalysisRulesResponse(), + metadata, + ) client.list_analysis_rules( request, @@ -53018,6 +53302,7 @@ def test_list_analysis_rules_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_analysis_rule_rest_bad_request( @@ -53221,10 +53506,14 @@ def test_update_analysis_rule_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_analysis_rule" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_analysis_rule_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_analysis_rule" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateAnalysisRuleRequest.pb( contact_center_insights.UpdateAnalysisRuleRequest() ) @@ -53248,6 +53537,7 @@ def test_update_analysis_rule_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.AnalysisRule() + post_with_metadata.return_value = resources.AnalysisRule(), metadata client.update_analysis_rule( request, @@ -53259,6 +53549,7 @@ def test_update_analysis_rule_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_analysis_rule_rest_bad_request( @@ -53454,10 +53745,14 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_encryption_spec" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_encryption_spec_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_encryption_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetEncryptionSpecRequest.pb( contact_center_insights.GetEncryptionSpecRequest() ) @@ -53481,6 +53776,7 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.EncryptionSpec() + post_with_metadata.return_value = resources.EncryptionSpec(), metadata client.get_encryption_spec( request, @@ -53492,6 +53788,7 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_encryption_spec_rest_bad_request( @@ -53577,11 +53874,15 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_initialize_encryption_spec", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_initialize_encryption_spec_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_initialize_encryption_spec", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.InitializeEncryptionSpecRequest.pb( contact_center_insights.InitializeEncryptionSpecRequest() ) @@ -53605,6 +53906,7 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.initialize_encryption_spec( request, @@ -53616,6 +53918,7 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_view_rest_bad_request( @@ -53778,10 +54081,14 @@ def test_create_view_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_view" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_view_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_view" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateViewRequest.pb( contact_center_insights.CreateViewRequest() ) @@ -53805,6 +54112,7 @@ def test_create_view_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.View() + post_with_metadata.return_value = resources.View(), metadata client.create_view( request, @@ -53816,6 +54124,7 @@ def test_create_view_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_view_rest_bad_request(request_type=contact_center_insights.GetViewRequest): @@ -53902,10 +54211,13 @@ def test_get_view_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_view" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_get_view_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_view" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetViewRequest.pb( contact_center_insights.GetViewRequest() ) @@ -53929,6 +54241,7 @@ def test_get_view_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.View() + post_with_metadata.return_value = resources.View(), metadata client.get_view( request, @@ -53940,6 +54253,7 @@ def test_get_view_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_views_rest_bad_request( @@ -54024,10 +54338,13 @@ def test_list_views_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_views" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, "post_list_views_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_views" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListViewsRequest.pb( contact_center_insights.ListViewsRequest() ) @@ -54053,6 +54370,10 @@ def test_list_views_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListViewsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListViewsResponse(), + metadata, + ) client.list_views( request, @@ -54064,6 +54385,7 @@ def test_list_views_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_view_rest_bad_request( @@ -54230,10 +54552,14 @@ def test_update_view_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_view" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_view_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_view" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateViewRequest.pb( contact_center_insights.UpdateViewRequest() ) @@ -54257,6 +54583,7 @@ def test_update_view_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.View() + post_with_metadata.return_value = resources.View(), metadata client.update_view( request, @@ -54268,6 +54595,7 @@ def test_update_view_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_view_rest_bad_request( @@ -54457,10 +54785,14 @@ def test_query_metrics_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_query_metrics" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_query_metrics_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_query_metrics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.QueryMetricsRequest.pb( contact_center_insights.QueryMetricsRequest() ) @@ -54484,6 +54816,7 @@ def test_query_metrics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.query_metrics( request, @@ -54495,6 +54828,7 @@ def test_query_metrics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_qa_question_rest_bad_request( @@ -54688,10 +55022,14 @@ def test_create_qa_question_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_qa_question" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_qa_question_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_qa_question" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateQaQuestionRequest.pb( contact_center_insights.CreateQaQuestionRequest() ) @@ -54715,6 +55053,7 @@ def test_create_qa_question_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaQuestion() + post_with_metadata.return_value = resources.QaQuestion(), metadata client.create_qa_question( request, @@ -54726,6 +55065,7 @@ def test_create_qa_question_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_qa_question_rest_bad_request( @@ -54824,10 +55164,14 @@ def test_get_qa_question_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_qa_question" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_qa_question_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_qa_question" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetQaQuestionRequest.pb( contact_center_insights.GetQaQuestionRequest() ) @@ -54851,6 +55195,7 @@ def test_get_qa_question_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaQuestion() + post_with_metadata.return_value = resources.QaQuestion(), metadata client.get_qa_question( request, @@ -54862,6 +55207,7 @@ def test_get_qa_question_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_qa_question_rest_bad_request( @@ -55059,10 +55405,14 @@ def test_update_qa_question_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_qa_question" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_qa_question_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_qa_question" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateQaQuestionRequest.pb( contact_center_insights.UpdateQaQuestionRequest() ) @@ -55086,6 +55436,7 @@ def test_update_qa_question_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaQuestion() + post_with_metadata.return_value = resources.QaQuestion(), metadata client.update_qa_question( request, @@ -55097,6 +55448,7 @@ def test_update_qa_question_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_qa_question_rest_bad_request( @@ -55298,10 +55650,14 @@ def test_list_qa_questions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_qa_questions" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_qa_questions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_qa_questions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListQaQuestionsRequest.pb( contact_center_insights.ListQaQuestionsRequest() ) @@ -55327,6 +55683,10 @@ def test_list_qa_questions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListQaQuestionsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListQaQuestionsResponse(), + metadata, + ) client.list_qa_questions( request, @@ -55338,6 +55698,7 @@ def test_list_qa_questions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_qa_scorecard_rest_bad_request( @@ -55502,10 +55863,14 @@ def test_create_qa_scorecard_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_qa_scorecard" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_qa_scorecard_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_qa_scorecard" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateQaScorecardRequest.pb( contact_center_insights.CreateQaScorecardRequest() ) @@ -55529,6 +55894,7 @@ def test_create_qa_scorecard_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecard() + post_with_metadata.return_value = resources.QaScorecard(), metadata client.create_qa_scorecard( request, @@ -55540,6 +55906,7 @@ def test_create_qa_scorecard_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_qa_scorecard_rest_bad_request( @@ -55628,10 +55995,14 @@ def test_get_qa_scorecard_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_qa_scorecard" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_qa_scorecard_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_qa_scorecard" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetQaScorecardRequest.pb( contact_center_insights.GetQaScorecardRequest() ) @@ -55655,6 +56026,7 @@ def test_get_qa_scorecard_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecard() + post_with_metadata.return_value = resources.QaScorecard(), metadata client.get_qa_scorecard( request, @@ -55666,6 +56038,7 @@ def test_get_qa_scorecard_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_qa_scorecard_rest_bad_request( @@ -55838,10 +56211,14 @@ def test_update_qa_scorecard_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_qa_scorecard" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_qa_scorecard_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_qa_scorecard" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateQaScorecardRequest.pb( contact_center_insights.UpdateQaScorecardRequest() ) @@ -55865,6 +56242,7 @@ def test_update_qa_scorecard_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecard() + post_with_metadata.return_value = resources.QaScorecard(), metadata client.update_qa_scorecard( request, @@ -55876,6 +56254,7 @@ def test_update_qa_scorecard_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_qa_scorecard_rest_bad_request( @@ -56069,10 +56448,14 @@ def test_list_qa_scorecards_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_qa_scorecards" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_qa_scorecards_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_qa_scorecards" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListQaScorecardsRequest.pb( contact_center_insights.ListQaScorecardsRequest() ) @@ -56098,6 +56481,10 @@ def test_list_qa_scorecards_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListQaScorecardsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListQaScorecardsResponse(), + metadata, + ) client.list_qa_scorecards( request, @@ -56109,6 +56496,7 @@ def test_list_qa_scorecards_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_qa_scorecard_revision_rest_bad_request( @@ -56282,11 +56670,15 @@ def test_create_qa_scorecard_revision_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_create_qa_scorecard_revision", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_qa_scorecard_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_qa_scorecard_revision", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateQaScorecardRevisionRequest.pb( contact_center_insights.CreateQaScorecardRevisionRequest() ) @@ -56312,6 +56704,7 @@ def test_create_qa_scorecard_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecardRevision() + post_with_metadata.return_value = resources.QaScorecardRevision(), metadata client.create_qa_scorecard_revision( request, @@ -56323,6 +56716,7 @@ def test_create_qa_scorecard_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_qa_scorecard_revision_rest_bad_request( @@ -56416,10 +56810,14 @@ def test_get_qa_scorecard_revision_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_get_qa_scorecard_revision", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_qa_scorecard_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_qa_scorecard_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetQaScorecardRevisionRequest.pb( contact_center_insights.GetQaScorecardRevisionRequest() ) @@ -56445,6 +56843,7 @@ def test_get_qa_scorecard_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecardRevision() + post_with_metadata.return_value = resources.QaScorecardRevision(), metadata client.get_qa_scorecard_revision( request, @@ -56456,6 +56855,7 @@ def test_get_qa_scorecard_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_tune_qa_scorecard_revision_rest_bad_request( @@ -56541,11 +56941,15 @@ def test_tune_qa_scorecard_revision_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_tune_qa_scorecard_revision", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_tune_qa_scorecard_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_tune_qa_scorecard_revision", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.TuneQaScorecardRevisionRequest.pb( contact_center_insights.TuneQaScorecardRevisionRequest() ) @@ -56569,6 +56973,7 @@ def test_tune_qa_scorecard_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.tune_qa_scorecard_revision( request, @@ -56580,6 +56985,7 @@ def test_tune_qa_scorecard_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_qa_scorecard_revision_rest_bad_request( @@ -56673,11 +57079,15 @@ def test_deploy_qa_scorecard_revision_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_deploy_qa_scorecard_revision", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_deploy_qa_scorecard_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_deploy_qa_scorecard_revision", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.DeployQaScorecardRevisionRequest.pb( contact_center_insights.DeployQaScorecardRevisionRequest() ) @@ -56703,6 +57113,7 @@ def test_deploy_qa_scorecard_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecardRevision() + post_with_metadata.return_value = resources.QaScorecardRevision(), metadata client.deploy_qa_scorecard_revision( request, @@ -56714,6 +57125,7 @@ def test_deploy_qa_scorecard_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_qa_scorecard_revision_rest_bad_request( @@ -56807,11 +57219,15 @@ def test_undeploy_qa_scorecard_revision_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_undeploy_qa_scorecard_revision", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_undeploy_qa_scorecard_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_undeploy_qa_scorecard_revision", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UndeployQaScorecardRevisionRequest.pb( contact_center_insights.UndeployQaScorecardRevisionRequest() ) @@ -56837,6 +57253,7 @@ def test_undeploy_qa_scorecard_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.QaScorecardRevision() + post_with_metadata.return_value = resources.QaScorecardRevision(), metadata client.undeploy_qa_scorecard_revision( request, @@ -56848,6 +57265,7 @@ def test_undeploy_qa_scorecard_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_qa_scorecard_revision_rest_bad_request( @@ -57049,11 +57467,15 @@ def test_list_qa_scorecard_revisions_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_list_qa_scorecard_revisions", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_qa_scorecard_revisions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_qa_scorecard_revisions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListQaScorecardRevisionsRequest.pb( contact_center_insights.ListQaScorecardRevisionsRequest() ) @@ -57079,6 +57501,10 @@ def test_list_qa_scorecard_revisions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListQaScorecardRevisionsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListQaScorecardRevisionsResponse(), + metadata, + ) client.list_qa_scorecard_revisions( request, @@ -57090,6 +57516,7 @@ def test_list_qa_scorecard_revisions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_feedback_label_rest_bad_request( @@ -57267,10 +57694,14 @@ def test_create_feedback_label_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_create_feedback_label" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_create_feedback_label_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_create_feedback_label" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.CreateFeedbackLabelRequest.pb( contact_center_insights.CreateFeedbackLabelRequest() ) @@ -57294,6 +57725,7 @@ def test_create_feedback_label_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.FeedbackLabel() + post_with_metadata.return_value = resources.FeedbackLabel(), metadata client.create_feedback_label( request, @@ -57305,6 +57737,7 @@ def test_create_feedback_label_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_feedback_labels_rest_bad_request( @@ -57395,10 +57828,14 @@ def test_list_feedback_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_feedback_labels" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_feedback_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_feedback_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListFeedbackLabelsRequest.pb( contact_center_insights.ListFeedbackLabelsRequest() ) @@ -57424,6 +57861,10 @@ def test_list_feedback_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListFeedbackLabelsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListFeedbackLabelsResponse(), + metadata, + ) client.list_feedback_labels( request, @@ -57435,6 +57876,7 @@ def test_list_feedback_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_feedback_label_rest_bad_request( @@ -57526,10 +57968,14 @@ def test_get_feedback_label_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_get_feedback_label" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_get_feedback_label_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_get_feedback_label" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.GetFeedbackLabelRequest.pb( contact_center_insights.GetFeedbackLabelRequest() ) @@ -57553,6 +57999,7 @@ def test_get_feedback_label_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.FeedbackLabel() + post_with_metadata.return_value = resources.FeedbackLabel(), metadata client.get_feedback_label( request, @@ -57564,6 +58011,7 @@ def test_get_feedback_label_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_feedback_label_rest_bad_request( @@ -57745,10 +58193,14 @@ def test_update_feedback_label_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_update_feedback_label" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_update_feedback_label_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_update_feedback_label" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.UpdateFeedbackLabelRequest.pb( contact_center_insights.UpdateFeedbackLabelRequest() ) @@ -57772,6 +58224,7 @@ def test_update_feedback_label_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.FeedbackLabel() + post_with_metadata.return_value = resources.FeedbackLabel(), metadata client.update_feedback_label( request, @@ -57783,6 +58236,7 @@ def test_update_feedback_label_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_feedback_label_rest_bad_request( @@ -57982,10 +58436,14 @@ def test_list_all_feedback_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "post_list_all_feedback_labels" ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_list_all_feedback_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_list_all_feedback_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.ListAllFeedbackLabelsRequest.pb( contact_center_insights.ListAllFeedbackLabelsRequest() ) @@ -58011,6 +58469,10 @@ def test_list_all_feedback_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = contact_center_insights.ListAllFeedbackLabelsResponse() + post_with_metadata.return_value = ( + contact_center_insights.ListAllFeedbackLabelsResponse(), + metadata, + ) client.list_all_feedback_labels( request, @@ -58022,6 +58484,7 @@ def test_list_all_feedback_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_upload_feedback_labels_rest_bad_request( @@ -58103,11 +58566,15 @@ def test_bulk_upload_feedback_labels_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_bulk_upload_feedback_labels", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_upload_feedback_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_bulk_upload_feedback_labels", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.BulkUploadFeedbackLabelsRequest.pb( contact_center_insights.BulkUploadFeedbackLabelsRequest() ) @@ -58131,6 +58598,7 @@ def test_bulk_upload_feedback_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.bulk_upload_feedback_labels( request, @@ -58142,6 +58610,7 @@ def test_bulk_upload_feedback_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_bulk_download_feedback_labels_rest_bad_request( @@ -58223,11 +58692,15 @@ def test_bulk_download_feedback_labels_rest_interceptors(null_interceptor): transports.ContactCenterInsightsRestInterceptor, "post_bulk_download_feedback_labels", ) as post, mock.patch.object( + transports.ContactCenterInsightsRestInterceptor, + "post_bulk_download_feedback_labels_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContactCenterInsightsRestInterceptor, "pre_bulk_download_feedback_labels", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = contact_center_insights.BulkDownloadFeedbackLabelsRequest.pb( contact_center_insights.BulkDownloadFeedbackLabelsRequest() ) @@ -58251,6 +58724,7 @@ def test_bulk_download_feedback_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.bulk_download_feedback_labels( request, @@ -58262,6 +58736,7 @@ def test_bulk_download_feedback_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-container/CHANGELOG.md b/packages/google-cloud-container/CHANGELOG.md index 7a32b44bbf00..688f76b26f79 100644 --- a/packages/google-cloud-container/CHANGELOG.md +++ b/packages/google-cloud-container/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-container/#history +## [2.56.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.55.1...google-cloud-container-v2.56.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + +## [2.55.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.55.0...google-cloud-container-v2.55.1) (2025-01-27) + + +### Documentation + +* [google-cloud-container] broken (or ambiguous) markdown link ([#13468](https://github.com/googleapis/google-cloud-python/issues/13468)) ([5579df8](https://github.com/googleapis/google-cloud-python/commit/5579df80a0859fb23b644ba666e63cc2864bf25b)) + ## [2.55.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-container-v2.54.0...google-cloud-container-v2.55.0) (2024-12-12) diff --git a/packages/google-cloud-container/google/cloud/container/gapic_version.py b/packages/google-cloud-container/google/cloud/container/gapic_version.py index 4fa43f226ae2..6f2c060f10b0 100644 --- a/packages/google-cloud-container/google/cloud/container/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.55.0" # {x-release-please-version} +__version__ = "2.56.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py index 4fa43f226ae2..6f2c060f10b0 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.55.0" # {x-release-please-version} +__version__ = "2.56.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py index 06edda889c30..cc44b0c232f4 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -529,6 +531,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py index 018d2a0beb06..15c8fec90053 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py +++ b/packages/google-cloud-container/google/cloud/container_v1/services/cluster_manager/transports/rest.py @@ -373,12 +373,38 @@ def post_check_autopilot_compatibility( ) -> cluster_service.CheckAutopilotCompatibilityResponse: """Post-rpc interceptor for check_autopilot_compatibility - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_autopilot_compatibility_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_check_autopilot_compatibility` interceptor runs + before the `post_check_autopilot_compatibility_with_metadata` interceptor. """ return response + def post_check_autopilot_compatibility_with_metadata( + self, + response: cluster_service.CheckAutopilotCompatibilityResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cluster_service.CheckAutopilotCompatibilityResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for check_autopilot_compatibility + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_check_autopilot_compatibility_with_metadata` + interceptor in new development instead of the `post_check_autopilot_compatibility` interceptor. + When both interceptors are used, this `post_check_autopilot_compatibility_with_metadata` interceptor runs after the + `post_check_autopilot_compatibility` interceptor. The (possibly modified) response returned by + `post_check_autopilot_compatibility` will be passed to + `post_check_autopilot_compatibility_with_metadata`. + """ + return response, metadata + def pre_complete_ip_rotation( self, request: cluster_service.CompleteIPRotationRequest, @@ -399,12 +425,35 @@ def post_complete_ip_rotation( ) -> cluster_service.Operation: """Post-rpc interceptor for complete_ip_rotation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_ip_rotation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_complete_ip_rotation` interceptor runs + before the `post_complete_ip_rotation_with_metadata` interceptor. """ return response + def post_complete_ip_rotation_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for complete_ip_rotation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_complete_ip_rotation_with_metadata` + interceptor in new development instead of the `post_complete_ip_rotation` interceptor. + When both interceptors are used, this `post_complete_ip_rotation_with_metadata` interceptor runs after the + `post_complete_ip_rotation` interceptor. The (possibly modified) response returned by + `post_complete_ip_rotation` will be passed to + `post_complete_ip_rotation_with_metadata`. + """ + return response, metadata + def pre_complete_node_pool_upgrade( self, request: cluster_service.CompleteNodePoolUpgradeRequest, @@ -439,12 +488,35 @@ def post_create_cluster( ) -> cluster_service.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_create_node_pool( self, request: cluster_service.CreateNodePoolRequest, @@ -464,12 +536,35 @@ def post_create_node_pool( ) -> cluster_service.Operation: """Post-rpc interceptor for create_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_create_node_pool` interceptor runs + before the `post_create_node_pool_with_metadata` interceptor. """ return response + def post_create_node_pool_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_create_node_pool_with_metadata` + interceptor in new development instead of the `post_create_node_pool` interceptor. + When both interceptors are used, this `post_create_node_pool_with_metadata` interceptor runs after the + `post_create_node_pool` interceptor. The (possibly modified) response returned by + `post_create_node_pool` will be passed to + `post_create_node_pool_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: cluster_service.DeleteClusterRequest, @@ -489,12 +584,35 @@ def post_delete_cluster( ) -> cluster_service.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_node_pool( self, request: cluster_service.DeleteNodePoolRequest, @@ -514,12 +632,35 @@ def post_delete_node_pool( ) -> cluster_service.Operation: """Post-rpc interceptor for delete_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_node_pool` interceptor runs + before the `post_delete_node_pool_with_metadata` interceptor. """ return response + def post_delete_node_pool_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_delete_node_pool_with_metadata` + interceptor in new development instead of the `post_delete_node_pool` interceptor. + When both interceptors are used, this `post_delete_node_pool_with_metadata` interceptor runs after the + `post_delete_node_pool` interceptor. The (possibly modified) response returned by + `post_delete_node_pool` will be passed to + `post_delete_node_pool_with_metadata`. + """ + return response, metadata + def pre_get_cluster( self, request: cluster_service.GetClusterRequest, @@ -539,12 +680,35 @@ def post_get_cluster( ) -> cluster_service.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: cluster_service.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_get_json_web_keys( self, request: cluster_service.GetJSONWebKeysRequest, @@ -564,12 +728,37 @@ def post_get_json_web_keys( ) -> cluster_service.GetJSONWebKeysResponse: """Post-rpc interceptor for get_json_web_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_json_web_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_get_json_web_keys` interceptor runs + before the `post_get_json_web_keys_with_metadata` interceptor. """ return response + def post_get_json_web_keys_with_metadata( + self, + response: cluster_service.GetJSONWebKeysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cluster_service.GetJSONWebKeysResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_json_web_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_get_json_web_keys_with_metadata` + interceptor in new development instead of the `post_get_json_web_keys` interceptor. + When both interceptors are used, this `post_get_json_web_keys_with_metadata` interceptor runs after the + `post_get_json_web_keys` interceptor. The (possibly modified) response returned by + `post_get_json_web_keys` will be passed to + `post_get_json_web_keys_with_metadata`. + """ + return response, metadata + def pre_get_node_pool( self, request: cluster_service.GetNodePoolRequest, @@ -589,12 +778,35 @@ def post_get_node_pool( ) -> cluster_service.NodePool: """Post-rpc interceptor for get_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_get_node_pool` interceptor runs + before the `post_get_node_pool_with_metadata` interceptor. """ return response + def post_get_node_pool_with_metadata( + self, + response: cluster_service.NodePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.NodePool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_get_node_pool_with_metadata` + interceptor in new development instead of the `post_get_node_pool` interceptor. + When both interceptors are used, this `post_get_node_pool_with_metadata` interceptor runs after the + `post_get_node_pool` interceptor. The (possibly modified) response returned by + `post_get_node_pool` will be passed to + `post_get_node_pool_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: cluster_service.GetOperationRequest, @@ -614,12 +826,35 @@ def post_get_operation( ) -> cluster_service.Operation: """Post-rpc interceptor for get_operation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_operation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_get_operation` interceptor runs + before the `post_get_operation_with_metadata` interceptor. """ return response + def post_get_operation_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_operation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_get_operation_with_metadata` + interceptor in new development instead of the `post_get_operation` interceptor. + When both interceptors are used, this `post_get_operation_with_metadata` interceptor runs after the + `post_get_operation` interceptor. The (possibly modified) response returned by + `post_get_operation` will be passed to + `post_get_operation_with_metadata`. + """ + return response, metadata + def pre_get_server_config( self, request: cluster_service.GetServerConfigRequest, @@ -639,12 +874,35 @@ def post_get_server_config( ) -> cluster_service.ServerConfig: """Post-rpc interceptor for get_server_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_server_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_get_server_config` interceptor runs + before the `post_get_server_config_with_metadata` interceptor. """ return response + def post_get_server_config_with_metadata( + self, + response: cluster_service.ServerConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.ServerConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_server_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_get_server_config_with_metadata` + interceptor in new development instead of the `post_get_server_config` interceptor. + When both interceptors are used, this `post_get_server_config_with_metadata` interceptor runs after the + `post_get_server_config` interceptor. The (possibly modified) response returned by + `post_get_server_config` will be passed to + `post_get_server_config_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: cluster_service.ListClustersRequest, @@ -664,12 +922,37 @@ def post_list_clusters( ) -> cluster_service.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: cluster_service.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cluster_service.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_list_node_pools( self, request: cluster_service.ListNodePoolsRequest, @@ -689,12 +972,37 @@ def post_list_node_pools( ) -> cluster_service.ListNodePoolsResponse: """Post-rpc interceptor for list_node_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_node_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_list_node_pools` interceptor runs + before the `post_list_node_pools_with_metadata` interceptor. """ return response + def post_list_node_pools_with_metadata( + self, + response: cluster_service.ListNodePoolsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cluster_service.ListNodePoolsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_node_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_list_node_pools_with_metadata` + interceptor in new development instead of the `post_list_node_pools` interceptor. + When both interceptors are used, this `post_list_node_pools_with_metadata` interceptor runs after the + `post_list_node_pools` interceptor. The (possibly modified) response returned by + `post_list_node_pools` will be passed to + `post_list_node_pools_with_metadata`. + """ + return response, metadata + def pre_list_operations( self, request: cluster_service.ListOperationsRequest, @@ -714,12 +1022,37 @@ def post_list_operations( ) -> cluster_service.ListOperationsResponse: """Post-rpc interceptor for list_operations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_operations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_list_operations` interceptor runs + before the `post_list_operations_with_metadata` interceptor. """ return response + def post_list_operations_with_metadata( + self, + response: cluster_service.ListOperationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cluster_service.ListOperationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_operations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_list_operations_with_metadata` + interceptor in new development instead of the `post_list_operations` interceptor. + When both interceptors are used, this `post_list_operations_with_metadata` interceptor runs after the + `post_list_operations` interceptor. The (possibly modified) response returned by + `post_list_operations` will be passed to + `post_list_operations_with_metadata`. + """ + return response, metadata + def pre_list_usable_subnetworks( self, request: cluster_service.ListUsableSubnetworksRequest, @@ -740,12 +1073,38 @@ def post_list_usable_subnetworks( ) -> cluster_service.ListUsableSubnetworksResponse: """Post-rpc interceptor for list_usable_subnetworks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_usable_subnetworks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_list_usable_subnetworks` interceptor runs + before the `post_list_usable_subnetworks_with_metadata` interceptor. """ return response + def post_list_usable_subnetworks_with_metadata( + self, + response: cluster_service.ListUsableSubnetworksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cluster_service.ListUsableSubnetworksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_usable_subnetworks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_list_usable_subnetworks_with_metadata` + interceptor in new development instead of the `post_list_usable_subnetworks` interceptor. + When both interceptors are used, this `post_list_usable_subnetworks_with_metadata` interceptor runs after the + `post_list_usable_subnetworks` interceptor. The (possibly modified) response returned by + `post_list_usable_subnetworks` will be passed to + `post_list_usable_subnetworks_with_metadata`. + """ + return response, metadata + def pre_rollback_node_pool_upgrade( self, request: cluster_service.RollbackNodePoolUpgradeRequest, @@ -766,12 +1125,35 @@ def post_rollback_node_pool_upgrade( ) -> cluster_service.Operation: """Post-rpc interceptor for rollback_node_pool_upgrade - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_node_pool_upgrade_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_rollback_node_pool_upgrade` interceptor runs + before the `post_rollback_node_pool_upgrade_with_metadata` interceptor. """ return response + def post_rollback_node_pool_upgrade_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback_node_pool_upgrade + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_rollback_node_pool_upgrade_with_metadata` + interceptor in new development instead of the `post_rollback_node_pool_upgrade` interceptor. + When both interceptors are used, this `post_rollback_node_pool_upgrade_with_metadata` interceptor runs after the + `post_rollback_node_pool_upgrade` interceptor. The (possibly modified) response returned by + `post_rollback_node_pool_upgrade` will be passed to + `post_rollback_node_pool_upgrade_with_metadata`. + """ + return response, metadata + def pre_set_addons_config( self, request: cluster_service.SetAddonsConfigRequest, @@ -791,12 +1173,35 @@ def post_set_addons_config( ) -> cluster_service.Operation: """Post-rpc interceptor for set_addons_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_addons_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_addons_config` interceptor runs + before the `post_set_addons_config_with_metadata` interceptor. """ return response + def post_set_addons_config_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_addons_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_addons_config_with_metadata` + interceptor in new development instead of the `post_set_addons_config` interceptor. + When both interceptors are used, this `post_set_addons_config_with_metadata` interceptor runs after the + `post_set_addons_config` interceptor. The (possibly modified) response returned by + `post_set_addons_config` will be passed to + `post_set_addons_config_with_metadata`. + """ + return response, metadata + def pre_set_labels( self, request: cluster_service.SetLabelsRequest, @@ -816,12 +1221,35 @@ def post_set_labels( ) -> cluster_service.Operation: """Post-rpc interceptor for set_labels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. """ return response + def post_set_labels_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + def pre_set_legacy_abac( self, request: cluster_service.SetLegacyAbacRequest, @@ -841,12 +1269,35 @@ def post_set_legacy_abac( ) -> cluster_service.Operation: """Post-rpc interceptor for set_legacy_abac - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_legacy_abac_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_legacy_abac` interceptor runs + before the `post_set_legacy_abac_with_metadata` interceptor. """ return response + def post_set_legacy_abac_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_legacy_abac + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_legacy_abac_with_metadata` + interceptor in new development instead of the `post_set_legacy_abac` interceptor. + When both interceptors are used, this `post_set_legacy_abac_with_metadata` interceptor runs after the + `post_set_legacy_abac` interceptor. The (possibly modified) response returned by + `post_set_legacy_abac` will be passed to + `post_set_legacy_abac_with_metadata`. + """ + return response, metadata + def pre_set_locations( self, request: cluster_service.SetLocationsRequest, @@ -866,12 +1317,35 @@ def post_set_locations( ) -> cluster_service.Operation: """Post-rpc interceptor for set_locations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_locations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_locations` interceptor runs + before the `post_set_locations_with_metadata` interceptor. """ return response + def post_set_locations_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_locations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_locations_with_metadata` + interceptor in new development instead of the `post_set_locations` interceptor. + When both interceptors are used, this `post_set_locations_with_metadata` interceptor runs after the + `post_set_locations` interceptor. The (possibly modified) response returned by + `post_set_locations` will be passed to + `post_set_locations_with_metadata`. + """ + return response, metadata + def pre_set_logging_service( self, request: cluster_service.SetLoggingServiceRequest, @@ -892,12 +1366,35 @@ def post_set_logging_service( ) -> cluster_service.Operation: """Post-rpc interceptor for set_logging_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_logging_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_logging_service` interceptor runs + before the `post_set_logging_service_with_metadata` interceptor. """ return response + def post_set_logging_service_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_logging_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_logging_service_with_metadata` + interceptor in new development instead of the `post_set_logging_service` interceptor. + When both interceptors are used, this `post_set_logging_service_with_metadata` interceptor runs after the + `post_set_logging_service` interceptor. The (possibly modified) response returned by + `post_set_logging_service` will be passed to + `post_set_logging_service_with_metadata`. + """ + return response, metadata + def pre_set_maintenance_policy( self, request: cluster_service.SetMaintenancePolicyRequest, @@ -918,12 +1415,35 @@ def post_set_maintenance_policy( ) -> cluster_service.Operation: """Post-rpc interceptor for set_maintenance_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_maintenance_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_maintenance_policy` interceptor runs + before the `post_set_maintenance_policy_with_metadata` interceptor. """ return response + def post_set_maintenance_policy_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_maintenance_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_maintenance_policy_with_metadata` + interceptor in new development instead of the `post_set_maintenance_policy` interceptor. + When both interceptors are used, this `post_set_maintenance_policy_with_metadata` interceptor runs after the + `post_set_maintenance_policy` interceptor. The (possibly modified) response returned by + `post_set_maintenance_policy` will be passed to + `post_set_maintenance_policy_with_metadata`. + """ + return response, metadata + def pre_set_master_auth( self, request: cluster_service.SetMasterAuthRequest, @@ -943,12 +1463,35 @@ def post_set_master_auth( ) -> cluster_service.Operation: """Post-rpc interceptor for set_master_auth - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_master_auth_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_master_auth` interceptor runs + before the `post_set_master_auth_with_metadata` interceptor. """ return response + def post_set_master_auth_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_master_auth + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_master_auth_with_metadata` + interceptor in new development instead of the `post_set_master_auth` interceptor. + When both interceptors are used, this `post_set_master_auth_with_metadata` interceptor runs after the + `post_set_master_auth` interceptor. The (possibly modified) response returned by + `post_set_master_auth` will be passed to + `post_set_master_auth_with_metadata`. + """ + return response, metadata + def pre_set_monitoring_service( self, request: cluster_service.SetMonitoringServiceRequest, @@ -969,12 +1512,35 @@ def post_set_monitoring_service( ) -> cluster_service.Operation: """Post-rpc interceptor for set_monitoring_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_monitoring_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_monitoring_service` interceptor runs + before the `post_set_monitoring_service_with_metadata` interceptor. """ return response + def post_set_monitoring_service_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_monitoring_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_monitoring_service_with_metadata` + interceptor in new development instead of the `post_set_monitoring_service` interceptor. + When both interceptors are used, this `post_set_monitoring_service_with_metadata` interceptor runs after the + `post_set_monitoring_service` interceptor. The (possibly modified) response returned by + `post_set_monitoring_service` will be passed to + `post_set_monitoring_service_with_metadata`. + """ + return response, metadata + def pre_set_network_policy( self, request: cluster_service.SetNetworkPolicyRequest, @@ -994,12 +1560,35 @@ def post_set_network_policy( ) -> cluster_service.Operation: """Post-rpc interceptor for set_network_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_network_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_network_policy` interceptor runs + before the `post_set_network_policy_with_metadata` interceptor. """ return response + def post_set_network_policy_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_network_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_network_policy_with_metadata` + interceptor in new development instead of the `post_set_network_policy` interceptor. + When both interceptors are used, this `post_set_network_policy_with_metadata` interceptor runs after the + `post_set_network_policy` interceptor. The (possibly modified) response returned by + `post_set_network_policy` will be passed to + `post_set_network_policy_with_metadata`. + """ + return response, metadata + def pre_set_node_pool_autoscaling( self, request: cluster_service.SetNodePoolAutoscalingRequest, @@ -1020,12 +1609,35 @@ def post_set_node_pool_autoscaling( ) -> cluster_service.Operation: """Post-rpc interceptor for set_node_pool_autoscaling - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_node_pool_autoscaling_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_node_pool_autoscaling` interceptor runs + before the `post_set_node_pool_autoscaling_with_metadata` interceptor. """ return response + def post_set_node_pool_autoscaling_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_node_pool_autoscaling + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_node_pool_autoscaling_with_metadata` + interceptor in new development instead of the `post_set_node_pool_autoscaling` interceptor. + When both interceptors are used, this `post_set_node_pool_autoscaling_with_metadata` interceptor runs after the + `post_set_node_pool_autoscaling` interceptor. The (possibly modified) response returned by + `post_set_node_pool_autoscaling` will be passed to + `post_set_node_pool_autoscaling_with_metadata`. + """ + return response, metadata + def pre_set_node_pool_management( self, request: cluster_service.SetNodePoolManagementRequest, @@ -1046,12 +1658,35 @@ def post_set_node_pool_management( ) -> cluster_service.Operation: """Post-rpc interceptor for set_node_pool_management - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_node_pool_management_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_node_pool_management` interceptor runs + before the `post_set_node_pool_management_with_metadata` interceptor. """ return response + def post_set_node_pool_management_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_node_pool_management + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_node_pool_management_with_metadata` + interceptor in new development instead of the `post_set_node_pool_management` interceptor. + When both interceptors are used, this `post_set_node_pool_management_with_metadata` interceptor runs after the + `post_set_node_pool_management` interceptor. The (possibly modified) response returned by + `post_set_node_pool_management` will be passed to + `post_set_node_pool_management_with_metadata`. + """ + return response, metadata + def pre_set_node_pool_size( self, request: cluster_service.SetNodePoolSizeRequest, @@ -1071,12 +1706,35 @@ def post_set_node_pool_size( ) -> cluster_service.Operation: """Post-rpc interceptor for set_node_pool_size - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_node_pool_size_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_set_node_pool_size` interceptor runs + before the `post_set_node_pool_size_with_metadata` interceptor. """ return response + def post_set_node_pool_size_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_node_pool_size + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_set_node_pool_size_with_metadata` + interceptor in new development instead of the `post_set_node_pool_size` interceptor. + When both interceptors are used, this `post_set_node_pool_size_with_metadata` interceptor runs after the + `post_set_node_pool_size` interceptor. The (possibly modified) response returned by + `post_set_node_pool_size` will be passed to + `post_set_node_pool_size_with_metadata`. + """ + return response, metadata + def pre_start_ip_rotation( self, request: cluster_service.StartIPRotationRequest, @@ -1096,12 +1754,35 @@ def post_start_ip_rotation( ) -> cluster_service.Operation: """Post-rpc interceptor for start_ip_rotation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_ip_rotation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_start_ip_rotation` interceptor runs + before the `post_start_ip_rotation_with_metadata` interceptor. """ return response + def post_start_ip_rotation_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_ip_rotation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_start_ip_rotation_with_metadata` + interceptor in new development instead of the `post_start_ip_rotation` interceptor. + When both interceptors are used, this `post_start_ip_rotation_with_metadata` interceptor runs after the + `post_start_ip_rotation` interceptor. The (possibly modified) response returned by + `post_start_ip_rotation` will be passed to + `post_start_ip_rotation_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: cluster_service.UpdateClusterRequest, @@ -1121,12 +1802,35 @@ def post_update_cluster( ) -> cluster_service.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_update_master( self, request: cluster_service.UpdateMasterRequest, @@ -1146,12 +1850,35 @@ def post_update_master( ) -> cluster_service.Operation: """Post-rpc interceptor for update_master - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_master_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_update_master` interceptor runs + before the `post_update_master_with_metadata` interceptor. """ return response + def post_update_master_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_master + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_update_master_with_metadata` + interceptor in new development instead of the `post_update_master` interceptor. + When both interceptors are used, this `post_update_master_with_metadata` interceptor runs after the + `post_update_master` interceptor. The (possibly modified) response returned by + `post_update_master` will be passed to + `post_update_master_with_metadata`. + """ + return response, metadata + def pre_update_node_pool( self, request: cluster_service.UpdateNodePoolRequest, @@ -1171,12 +1898,35 @@ def post_update_node_pool( ) -> cluster_service.Operation: """Post-rpc interceptor for update_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterManager server but before - it is returned to user code. + it is returned to user code. This `post_update_node_pool` interceptor runs + before the `post_update_node_pool_with_metadata` interceptor. """ return response + def post_update_node_pool_with_metadata( + self, + response: cluster_service.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cluster_service.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterManager server but before it is returned to user code. + + We recommend only using this `post_update_node_pool_with_metadata` + interceptor in new development instead of the `post_update_node_pool` interceptor. + When both interceptors are used, this `post_update_node_pool_with_metadata` interceptor runs after the + `post_update_node_pool` interceptor. The (possibly modified) response returned by + `post_update_node_pool` will be passed to + `post_update_node_pool_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ClusterManagerRestStub: @@ -1506,6 +2256,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_autopilot_compatibility(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_check_autopilot_compatibility_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1663,6 +2420,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_ip_rotation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_ip_rotation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1934,6 +2695,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2087,6 +2852,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2232,6 +3001,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2379,6 +3152,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2522,6 +3299,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2671,6 +3452,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_json_web_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_json_web_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2824,6 +3609,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2969,6 +3758,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_operation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_operation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3114,6 +3907,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_server_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_server_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3256,6 +4053,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3401,6 +4202,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_node_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_node_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3546,6 +4351,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_operations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_operations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3696,6 +4505,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_usable_subnetworks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_usable_subnetworks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3858,6 +4671,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rollback_node_pool_upgrade(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_node_pool_upgrade_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4011,6 +4828,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_addons_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_addons_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4171,6 +4992,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4323,6 +5148,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_legacy_abac(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_legacy_abac_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4474,6 +5303,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_locations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_locations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4627,6 +5460,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_logging_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_logging_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4781,6 +5618,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_maintenance_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_maintenance_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4932,6 +5773,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_master_auth(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_master_auth_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5086,6 +5931,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_monitoring_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_monitoring_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5240,6 +6089,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_network_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_network_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5396,6 +6249,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_node_pool_autoscaling(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_node_pool_autoscaling_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5551,6 +6408,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_node_pool_management(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_node_pool_management_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5704,6 +6565,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_node_pool_size(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_node_pool_size_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5859,6 +6724,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_ip_rotation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_ip_rotation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6010,6 +6879,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6161,6 +7034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_master(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_master_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6314,6 +7191,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py index 7e4d3f93d382..8c944e004ef3 100644 --- a/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1/types/cluster_service.py @@ -8342,7 +8342,7 @@ class NetworkConfig(proto.Message): Attributes: network (str): Output only. The relative name of the Google Compute Engine - [network]`google.container.v1.NetworkConfig.network `__ + `network `__ to which the cluster is connected. Example: projects/my-project/global/networks/my-network subnetwork (str): diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py index 4fa43f226ae2..6f2c060f10b0 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.55.0" # {x-release-please-version} +__version__ = "2.56.0" # {x-release-please-version} diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py index e4af4ca8b98d..30763066673d 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/services/cluster_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -479,6 +481,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py index f6187795c40e..98a387be93ab 100644 --- a/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py +++ b/packages/google-cloud-container/google/cloud/container_v1beta1/types/cluster_service.py @@ -8945,7 +8945,7 @@ class NetworkConfig(proto.Message): Attributes: network (str): Output only. The relative name of the Google Compute Engine - [network]`google.container.v1beta1.NetworkConfig.network `__ + `network `__ to which the cluster is connected. Example: projects/my-project/global/networks/my-network subnetwork (str): diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json index 94f59ca01be3..ba10ba391651 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.55.0" + "version": "2.56.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json index c0c9270a83a4..9d3c14c48bdd 100644 --- a/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json +++ b/packages/google-cloud-container/samples/generated_samples/snippet_metadata_google.container.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-container", - "version": "2.55.0" + "version": "2.56.0" }, "snippets": [ { diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py index 0e48fd43902f..457a9efc153a 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1/test_cluster_manager.py @@ -65,6 +65,13 @@ ) from google.cloud.container_v1.types import cluster_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -323,6 +330,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ClusterManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ClusterManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -20427,10 +20477,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.ListClustersRequest.pb( cluster_service.ListClustersRequest() ) @@ -20456,6 +20509,10 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.ListClustersResponse() + post_with_metadata.return_value = ( + cluster_service.ListClustersResponse(), + metadata, + ) client.list_clusters( request, @@ -20467,6 +20524,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=cluster_service.GetClusterRequest): @@ -20611,10 +20669,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.GetClusterRequest.pb( cluster_service.GetClusterRequest() ) @@ -20638,6 +20699,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Cluster() + post_with_metadata.return_value = cluster_service.Cluster(), metadata client.get_cluster( request, @@ -20649,6 +20711,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cluster_rest_bad_request( @@ -20753,10 +20816,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.CreateClusterRequest.pb( cluster_service.CreateClusterRequest() ) @@ -20780,6 +20846,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.create_cluster( request, @@ -20791,6 +20858,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request( @@ -20895,10 +20963,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.UpdateClusterRequest.pb( cluster_service.UpdateClusterRequest() ) @@ -20922,6 +20993,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.update_cluster( request, @@ -20933,6 +21005,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_node_pool_rest_bad_request( @@ -21041,10 +21114,13 @@ def test_update_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_update_node_pool" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_update_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_update_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.UpdateNodePoolRequest.pb( cluster_service.UpdateNodePoolRequest() ) @@ -21068,6 +21144,7 @@ def test_update_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.update_node_pool( request, @@ -21079,6 +21156,7 @@ def test_update_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_node_pool_autoscaling_rest_bad_request( @@ -21187,10 +21265,14 @@ def test_set_node_pool_autoscaling_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_node_pool_autoscaling" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_node_pool_autoscaling_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_node_pool_autoscaling" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetNodePoolAutoscalingRequest.pb( cluster_service.SetNodePoolAutoscalingRequest() ) @@ -21214,6 +21296,7 @@ def test_set_node_pool_autoscaling_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_node_pool_autoscaling( request, @@ -21225,6 +21308,7 @@ def test_set_node_pool_autoscaling_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_logging_service_rest_bad_request( @@ -21329,10 +21413,14 @@ def test_set_logging_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_logging_service" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_logging_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_logging_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetLoggingServiceRequest.pb( cluster_service.SetLoggingServiceRequest() ) @@ -21356,6 +21444,7 @@ def test_set_logging_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_logging_service( request, @@ -21367,6 +21456,7 @@ def test_set_logging_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_monitoring_service_rest_bad_request( @@ -21471,10 +21561,14 @@ def test_set_monitoring_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_monitoring_service" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_monitoring_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_monitoring_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetMonitoringServiceRequest.pb( cluster_service.SetMonitoringServiceRequest() ) @@ -21498,6 +21592,7 @@ def test_set_monitoring_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_monitoring_service( request, @@ -21509,6 +21604,7 @@ def test_set_monitoring_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_addons_config_rest_bad_request( @@ -21613,10 +21709,13 @@ def test_set_addons_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_addons_config" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_addons_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_addons_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetAddonsConfigRequest.pb( cluster_service.SetAddonsConfigRequest() ) @@ -21640,6 +21739,7 @@ def test_set_addons_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_addons_config( request, @@ -21651,6 +21751,7 @@ def test_set_addons_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_locations_rest_bad_request( @@ -21755,10 +21856,13 @@ def test_set_locations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_locations" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_locations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_locations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetLocationsRequest.pb( cluster_service.SetLocationsRequest() ) @@ -21782,6 +21886,7 @@ def test_set_locations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_locations( request, @@ -21793,6 +21898,7 @@ def test_set_locations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_master_rest_bad_request( @@ -21897,10 +22003,13 @@ def test_update_master_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_update_master" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_update_master_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_update_master" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.UpdateMasterRequest.pb( cluster_service.UpdateMasterRequest() ) @@ -21924,6 +22033,7 @@ def test_update_master_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.update_master( request, @@ -21935,6 +22045,7 @@ def test_update_master_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_master_auth_rest_bad_request( @@ -22039,10 +22150,13 @@ def test_set_master_auth_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_master_auth" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_master_auth_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_master_auth" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetMasterAuthRequest.pb( cluster_service.SetMasterAuthRequest() ) @@ -22066,6 +22180,7 @@ def test_set_master_auth_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_master_auth( request, @@ -22077,6 +22192,7 @@ def test_set_master_auth_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request( @@ -22181,10 +22297,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.DeleteClusterRequest.pb( cluster_service.DeleteClusterRequest() ) @@ -22208,6 +22327,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.delete_cluster( request, @@ -22219,6 +22339,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_operations_rest_bad_request( @@ -22303,10 +22424,13 @@ def test_list_operations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_list_operations" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_operations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_list_operations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.ListOperationsRequest.pb( cluster_service.ListOperationsRequest() ) @@ -22332,6 +22456,10 @@ def test_list_operations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.ListOperationsResponse() + post_with_metadata.return_value = ( + cluster_service.ListOperationsResponse(), + metadata, + ) client.list_operations( request, @@ -22343,6 +22471,7 @@ def test_list_operations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( @@ -22447,10 +22576,13 @@ def test_get_operation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_get_operation" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_operation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_get_operation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.GetOperationRequest.pb( cluster_service.GetOperationRequest() ) @@ -22474,6 +22606,7 @@ def test_get_operation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.get_operation( request, @@ -22485,6 +22618,7 @@ def test_get_operation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( @@ -22686,10 +22820,13 @@ def test_get_server_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_get_server_config" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_server_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_get_server_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.GetServerConfigRequest.pb( cluster_service.GetServerConfigRequest() ) @@ -22715,6 +22852,7 @@ def test_get_server_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.ServerConfig() + post_with_metadata.return_value = cluster_service.ServerConfig(), metadata client.get_server_config( request, @@ -22726,6 +22864,7 @@ def test_get_server_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_json_web_keys_rest_bad_request( @@ -22807,10 +22946,13 @@ def test_get_json_web_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_get_json_web_keys" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_json_web_keys_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_get_json_web_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.GetJSONWebKeysRequest.pb( cluster_service.GetJSONWebKeysRequest() ) @@ -22836,6 +22978,10 @@ def test_get_json_web_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.GetJSONWebKeysResponse() + post_with_metadata.return_value = ( + cluster_service.GetJSONWebKeysResponse(), + metadata, + ) client.get_json_web_keys( request, @@ -22847,6 +22993,7 @@ def test_get_json_web_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_node_pools_rest_bad_request( @@ -22928,10 +23075,13 @@ def test_list_node_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_list_node_pools" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_list_node_pools_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_list_node_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.ListNodePoolsRequest.pb( cluster_service.ListNodePoolsRequest() ) @@ -22957,6 +23107,10 @@ def test_list_node_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.ListNodePoolsResponse() + post_with_metadata.return_value = ( + cluster_service.ListNodePoolsResponse(), + metadata, + ) client.list_node_pools( request, @@ -22968,6 +23122,7 @@ def test_list_node_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_node_pool_rest_bad_request( @@ -23074,10 +23229,13 @@ def test_get_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_get_node_pool" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_get_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_get_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.GetNodePoolRequest.pb( cluster_service.GetNodePoolRequest() ) @@ -23101,6 +23259,7 @@ def test_get_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.NodePool() + post_with_metadata.return_value = cluster_service.NodePool(), metadata client.get_node_pool( request, @@ -23112,6 +23271,7 @@ def test_get_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_node_pool_rest_bad_request( @@ -23216,10 +23376,13 @@ def test_create_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_create_node_pool" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_create_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_create_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.CreateNodePoolRequest.pb( cluster_service.CreateNodePoolRequest() ) @@ -23243,6 +23406,7 @@ def test_create_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.create_node_pool( request, @@ -23254,6 +23418,7 @@ def test_create_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_node_pool_rest_bad_request( @@ -23362,10 +23527,13 @@ def test_delete_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_delete_node_pool" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_delete_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_delete_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.DeleteNodePoolRequest.pb( cluster_service.DeleteNodePoolRequest() ) @@ -23389,6 +23557,7 @@ def test_delete_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.delete_node_pool( request, @@ -23400,6 +23569,7 @@ def test_delete_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_complete_node_pool_upgrade_rest_bad_request( @@ -23621,10 +23791,14 @@ def test_rollback_node_pool_upgrade_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_rollback_node_pool_upgrade" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_rollback_node_pool_upgrade_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_rollback_node_pool_upgrade" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.RollbackNodePoolUpgradeRequest.pb( cluster_service.RollbackNodePoolUpgradeRequest() ) @@ -23648,6 +23822,7 @@ def test_rollback_node_pool_upgrade_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.rollback_node_pool_upgrade( request, @@ -23659,6 +23834,7 @@ def test_rollback_node_pool_upgrade_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_node_pool_management_rest_bad_request( @@ -23767,10 +23943,14 @@ def test_set_node_pool_management_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_node_pool_management" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_node_pool_management_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_node_pool_management" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetNodePoolManagementRequest.pb( cluster_service.SetNodePoolManagementRequest() ) @@ -23794,6 +23974,7 @@ def test_set_node_pool_management_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_node_pool_management( request, @@ -23805,6 +23986,7 @@ def test_set_node_pool_management_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_labels_rest_bad_request(request_type=cluster_service.SetLabelsRequest): @@ -23907,10 +24089,13 @@ def test_set_labels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_labels" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_labels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetLabelsRequest.pb( cluster_service.SetLabelsRequest() ) @@ -23934,6 +24119,7 @@ def test_set_labels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_labels( request, @@ -23945,6 +24131,7 @@ def test_set_labels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_legacy_abac_rest_bad_request( @@ -24049,10 +24236,13 @@ def test_set_legacy_abac_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_legacy_abac" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_set_legacy_abac_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_legacy_abac" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetLegacyAbacRequest.pb( cluster_service.SetLegacyAbacRequest() ) @@ -24076,6 +24266,7 @@ def test_set_legacy_abac_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_legacy_abac( request, @@ -24087,6 +24278,7 @@ def test_set_legacy_abac_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_ip_rotation_rest_bad_request( @@ -24191,10 +24383,13 @@ def test_start_ip_rotation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_start_ip_rotation" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, "post_start_ip_rotation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_start_ip_rotation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.StartIPRotationRequest.pb( cluster_service.StartIPRotationRequest() ) @@ -24218,6 +24413,7 @@ def test_start_ip_rotation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.start_ip_rotation( request, @@ -24229,6 +24425,7 @@ def test_start_ip_rotation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_complete_ip_rotation_rest_bad_request( @@ -24333,10 +24530,14 @@ def test_complete_ip_rotation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_complete_ip_rotation" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_complete_ip_rotation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_complete_ip_rotation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.CompleteIPRotationRequest.pb( cluster_service.CompleteIPRotationRequest() ) @@ -24360,6 +24561,7 @@ def test_complete_ip_rotation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.complete_ip_rotation( request, @@ -24371,6 +24573,7 @@ def test_complete_ip_rotation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_node_pool_size_rest_bad_request( @@ -24479,10 +24682,14 @@ def test_set_node_pool_size_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_node_pool_size" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_node_pool_size_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_node_pool_size" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetNodePoolSizeRequest.pb( cluster_service.SetNodePoolSizeRequest() ) @@ -24506,6 +24713,7 @@ def test_set_node_pool_size_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_node_pool_size( request, @@ -24517,6 +24725,7 @@ def test_set_node_pool_size_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_network_policy_rest_bad_request( @@ -24621,10 +24830,14 @@ def test_set_network_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_network_policy" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_network_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_network_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetNetworkPolicyRequest.pb( cluster_service.SetNetworkPolicyRequest() ) @@ -24648,6 +24861,7 @@ def test_set_network_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_network_policy( request, @@ -24659,6 +24873,7 @@ def test_set_network_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_maintenance_policy_rest_bad_request( @@ -24763,10 +24978,14 @@ def test_set_maintenance_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_set_maintenance_policy" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_set_maintenance_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_set_maintenance_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.SetMaintenancePolicyRequest.pb( cluster_service.SetMaintenancePolicyRequest() ) @@ -24790,6 +25009,7 @@ def test_set_maintenance_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.Operation() + post_with_metadata.return_value = cluster_service.Operation(), metadata client.set_maintenance_policy( request, @@ -24801,6 +25021,7 @@ def test_set_maintenance_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_usable_subnetworks_rest_bad_request( @@ -24885,10 +25106,14 @@ def test_list_usable_subnetworks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_list_usable_subnetworks" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_list_usable_subnetworks_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_list_usable_subnetworks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.ListUsableSubnetworksRequest.pb( cluster_service.ListUsableSubnetworksRequest() ) @@ -24914,6 +25139,10 @@ def test_list_usable_subnetworks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.ListUsableSubnetworksResponse() + post_with_metadata.return_value = ( + cluster_service.ListUsableSubnetworksResponse(), + metadata, + ) client.list_usable_subnetworks( request, @@ -24925,6 +25154,7 @@ def test_list_usable_subnetworks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_check_autopilot_compatibility_rest_bad_request( @@ -25011,10 +25241,14 @@ def test_check_autopilot_compatibility_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterManagerRestInterceptor, "post_check_autopilot_compatibility" ) as post, mock.patch.object( + transports.ClusterManagerRestInterceptor, + "post_check_autopilot_compatibility_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterManagerRestInterceptor, "pre_check_autopilot_compatibility" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cluster_service.CheckAutopilotCompatibilityRequest.pb( cluster_service.CheckAutopilotCompatibilityRequest() ) @@ -25040,6 +25274,10 @@ def test_check_autopilot_compatibility_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cluster_service.CheckAutopilotCompatibilityResponse() + post_with_metadata.return_value = ( + cluster_service.CheckAutopilotCompatibilityResponse(), + metadata, + ) client.check_autopilot_compatibility( request, @@ -25051,6 +25289,7 @@ def test_check_autopilot_compatibility_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py index 593f7711426f..a4e9cf39d5f0 100644 --- a/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py +++ b/packages/google-cloud-container/tests/unit/gapic/container_v1beta1/test_cluster_manager.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -60,6 +61,13 @@ ) from google.cloud.container_v1beta1.types import cluster_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ClusterManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ClusterManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-containeranalysis/CHANGELOG.md b/packages/google-cloud-containeranalysis/CHANGELOG.md index ece94445b9cd..e98a2d8386e5 100644 --- a/packages/google-cloud-containeranalysis/CHANGELOG.md +++ b/packages/google-cloud-containeranalysis/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-containeranalysis-v2.16.0...google-cloud-containeranalysis-v2.17.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [2.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-containeranalysis-v2.15.1...google-cloud-containeranalysis-v2.16.0) (2024-12-12) diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py index e154065d8da8..6053ad2404bf 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py index e154065d8da8..6053ad2404bf 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto deleted file mode 100644 index f1f1e2730ccb..000000000000 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2019 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.devtools.containeranalysis.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/iam/v1/iam_policy.proto"; -import "google/iam/v1/policy.proto"; -import "google/protobuf/timestamp.proto"; -import "grafeas/v1/vulnerability.proto"; - -option csharp_namespace = "Google.Cloud.DevTools.ContainerAnalysis.V1"; -option go_package = "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1;containeranalysis"; -option java_multiple_files = true; -option java_package = "com.google.containeranalysis.v1"; -option objc_class_prefix = "GCA"; -option ruby_package = "Google::Cloud::ContainerAnalysis::V1"; - -// Retrieves analysis results of Cloud components such as Docker container -// images. The Container Analysis API is an implementation of the -// [Grafeas](https://grafeas.io) API. -// -// Analysis results are stored as a series of occurrences. An `Occurrence` -// contains information about a specific analysis instance on a resource. An -// occurrence refers to a `Note`. A note contains details describing the -// analysis and is generally stored in a separate project, called a `Provider`. -// Multiple occurrences can refer to the same note. -// -// For example, an SSL vulnerability could affect multiple images. In this case, -// there would be one note for the vulnerability and an occurrence for each -// image with the vulnerability referring to that note. -service ContainerAnalysis { - option (google.api.default_host) = "containeranalysis.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Sets the access control policy on the specified note or occurrence. - // Requires `containeranalysis.notes.setIamPolicy` or - // `containeranalysis.occurrences.setIamPolicy` permission if the resource is - // a note or an occurrence, respectively. - // - // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for - // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for - // occurrences. - rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) returns (google.iam.v1.Policy) { - option (google.api.http) = { - post: "/v1/{resource=projects/*/notes/*}:setIamPolicy" - body: "*" - additional_bindings { - post: "/v1/{resource=projects/*/occurrences/*}:setIamPolicy" - body: "*" - } - }; - option (google.api.method_signature) = "resource,policy"; - } - - // Gets the access control policy for a note or an occurrence resource. - // Requires `containeranalysis.notes.setIamPolicy` or - // `containeranalysis.occurrences.setIamPolicy` permission if the resource is - // a note or occurrence, respectively. - // - // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for - // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for - // occurrences. - rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) returns (google.iam.v1.Policy) { - option (google.api.http) = { - post: "/v1/{resource=projects/*/notes/*}:getIamPolicy" - body: "*" - additional_bindings { - post: "/v1/{resource=projects/*/occurrences/*}:getIamPolicy" - body: "*" - } - }; - option (google.api.method_signature) = "resource"; - } - - // Returns the permissions that a caller has on the specified note or - // occurrence. Requires list permission on the project (for example, - // `containeranalysis.notes.list`). - // - // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for - // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for - // occurrences. - rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) returns (google.iam.v1.TestIamPermissionsResponse) { - option (google.api.http) = { - post: "/v1/{resource=projects/*/notes/*}:testIamPermissions" - body: "*" - additional_bindings { - post: "/v1/{resource=projects/*/occurrences/*}:testIamPermissions" - body: "*" - } - }; - option (google.api.method_signature) = "resource,permissions"; - } - - // Gets a summary of the number and severity of occurrences. - rpc GetVulnerabilityOccurrencesSummary(GetVulnerabilityOccurrencesSummaryRequest) returns (VulnerabilityOccurrencesSummary) { - option (google.api.http) = { - get: "/v1/{parent=projects/*}/occurrences:vulnerabilitySummary" - }; - option (google.api.method_signature) = "parent,filter"; - } -} - -// Request to get a vulnerability summary for some set of occurrences. -message GetVulnerabilityOccurrencesSummaryRequest { - // The name of the project to get a vulnerability summary for in the form of - // `projects/[PROJECT_ID]`. - string parent = 1 [ - (google.api.resource_reference).type = "cloudresourcemanager.googleapis.com/Project", - (google.api.field_behavior) = REQUIRED - ]; - - // The filter expression. - string filter = 2; -} - -// A summary of how many vulnerability occurrences there are per resource and -// severity type. -message VulnerabilityOccurrencesSummary { - // Per resource and severity counts of fixable and total vulnerabilities. - message FixableTotalByDigest { - // The affected resource. - string resource_uri = 1; - - // The severity for this count. SEVERITY_UNSPECIFIED indicates total across - // all severities. - grafeas.v1.Severity severity = 2; - - // The number of fixable vulnerabilities associated with this resource. - int64 fixable_count = 3; - - // The total number of vulnerabilities associated with this resource. - int64 total_count = 4; - } - - // A listing by resource of the number of fixable and total vulnerabilities. - repeated FixableTotalByDigest counts = 1; -} diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py index 8a3d9797f0ac..7536af1548e5 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -481,6 +483,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/rest.py b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/rest.py index e65bf1a30375..f8b6095a7180 100644 --- a/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/rest.py +++ b/packages/google-cloud-containeranalysis/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/rest.py @@ -125,12 +125,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContainerAnalysis server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContainerAnalysis server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_vulnerability_occurrences_summary( self, request: containeranalysis.GetVulnerabilityOccurrencesSummaryRequest, @@ -151,12 +174,38 @@ def post_get_vulnerability_occurrences_summary( ) -> containeranalysis.VulnerabilityOccurrencesSummary: """Post-rpc interceptor for get_vulnerability_occurrences_summary - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_vulnerability_occurrences_summary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContainerAnalysis server but before - it is returned to user code. + it is returned to user code. This `post_get_vulnerability_occurrences_summary` interceptor runs + before the `post_get_vulnerability_occurrences_summary_with_metadata` interceptor. """ return response + def post_get_vulnerability_occurrences_summary_with_metadata( + self, + response: containeranalysis.VulnerabilityOccurrencesSummary, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + containeranalysis.VulnerabilityOccurrencesSummary, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_vulnerability_occurrences_summary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContainerAnalysis server but before it is returned to user code. + + We recommend only using this `post_get_vulnerability_occurrences_summary_with_metadata` + interceptor in new development instead of the `post_get_vulnerability_occurrences_summary` interceptor. + When both interceptors are used, this `post_get_vulnerability_occurrences_summary_with_metadata` interceptor runs after the + `post_get_vulnerability_occurrences_summary` interceptor. The (possibly modified) response returned by + `post_get_vulnerability_occurrences_summary` will be passed to + `post_get_vulnerability_occurrences_summary_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -174,12 +223,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContainerAnalysis server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContainerAnalysis server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -200,12 +272,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ContainerAnalysis server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ContainerAnalysis server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ContainerAnalysisRestStub: @@ -505,6 +603,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -658,6 +760,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_vulnerability_occurrences_summary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_vulnerability_occurrences_summary_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -883,6 +992,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1032,6 +1145,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json index 80c3335e233b..cbceeefe0d27 100644 --- a/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json +++ b/packages/google-cloud-containeranalysis/samples/generated_samples/snippet_metadata_google.devtools.containeranalysis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-containeranalysis", - "version": "2.16.0" + "version": "2.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py index c06694ce385d..b9b16ca2a123 100644 --- a/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py +++ b/packages/google-cloud-containeranalysis/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py @@ -64,6 +64,13 @@ ) from google.cloud.devtools.containeranalysis_v1.types import containeranalysis +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ContainerAnalysisClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ContainerAnalysisClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3662,10 +3712,13 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.ContainerAnalysisRestInterceptor, "post_set_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -3687,6 +3740,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -3698,6 +3752,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -3781,10 +3836,13 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.ContainerAnalysisRestInterceptor, "post_get_iam_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -3806,6 +3864,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -3817,6 +3876,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -3898,10 +3958,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.ContainerAnalysisRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -3925,6 +3989,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -3936,6 +4004,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_vulnerability_occurrences_summary_rest_bad_request( @@ -4020,11 +4089,15 @@ def test_get_vulnerability_occurrences_summary_rest_interceptors(null_intercepto transports.ContainerAnalysisRestInterceptor, "post_get_vulnerability_occurrences_summary", ) as post, mock.patch.object( + transports.ContainerAnalysisRestInterceptor, + "post_get_vulnerability_occurrences_summary_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ContainerAnalysisRestInterceptor, "pre_get_vulnerability_occurrences_summary", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = containeranalysis.GetVulnerabilityOccurrencesSummaryRequest.pb( containeranalysis.GetVulnerabilityOccurrencesSummaryRequest() ) @@ -4050,6 +4123,10 @@ def test_get_vulnerability_occurrences_summary_rest_interceptors(null_intercepto ] pre.return_value = request, metadata post.return_value = containeranalysis.VulnerabilityOccurrencesSummary() + post_with_metadata.return_value = ( + containeranalysis.VulnerabilityOccurrencesSummary(), + metadata, + ) client.get_vulnerability_occurrences_summary( request, @@ -4061,6 +4138,7 @@ def test_get_vulnerability_occurrences_summary_rest_interceptors(null_intercepto pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-contentwarehouse/CHANGELOG.md b/packages/google-cloud-contentwarehouse/CHANGELOG.md index 4877884c4616..42e0a2de2956 100644 --- a/packages/google-cloud-contentwarehouse/CHANGELOG.md +++ b/packages/google-cloud-contentwarehouse/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.7.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.7.12...google-cloud-contentwarehouse-v0.7.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [0.7.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-contentwarehouse-v0.7.11...google-cloud-contentwarehouse-v0.7.12) (2024-12-12) diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py index 43f4d6aa457c..04fd3b965904 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.12" # {x-release-please-version} +__version__ = "0.7.13" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py index 43f4d6aa457c..04fd3b965904 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.7.12" # {x-release-please-version} +__version__ = "0.7.13" # {x-release-please-version} diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py index 78942e2c5a82..6434bb3ca204 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1219,16 +1248,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py index 073434a430fb..f15033c8e0c6 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_link_service/transports/rest.py @@ -124,12 +124,37 @@ def post_create_document_link( ) -> document_link_service.DocumentLink: """Post-rpc interceptor for create_document_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentLinkService server but before - it is returned to user code. + it is returned to user code. This `post_create_document_link` interceptor runs + before the `post_create_document_link_with_metadata` interceptor. """ return response + def post_create_document_link_with_metadata( + self, + response: document_link_service.DocumentLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_link_service.DocumentLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_document_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentLinkService server but before it is returned to user code. + + We recommend only using this `post_create_document_link_with_metadata` + interceptor in new development instead of the `post_create_document_link` interceptor. + When both interceptors are used, this `post_create_document_link_with_metadata` interceptor runs after the + `post_create_document_link` interceptor. The (possibly modified) response returned by + `post_create_document_link` will be passed to + `post_create_document_link_with_metadata`. + """ + return response, metadata + def pre_delete_document_link( self, request: document_link_service.DeleteDocumentLinkRequest, @@ -165,12 +190,38 @@ def post_list_linked_sources( ) -> document_link_service.ListLinkedSourcesResponse: """Post-rpc interceptor for list_linked_sources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_linked_sources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentLinkService server but before - it is returned to user code. + it is returned to user code. This `post_list_linked_sources` interceptor runs + before the `post_list_linked_sources_with_metadata` interceptor. """ return response + def post_list_linked_sources_with_metadata( + self, + response: document_link_service.ListLinkedSourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_link_service.ListLinkedSourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_linked_sources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentLinkService server but before it is returned to user code. + + We recommend only using this `post_list_linked_sources_with_metadata` + interceptor in new development instead of the `post_list_linked_sources` interceptor. + When both interceptors are used, this `post_list_linked_sources_with_metadata` interceptor runs after the + `post_list_linked_sources` interceptor. The (possibly modified) response returned by + `post_list_linked_sources` will be passed to + `post_list_linked_sources_with_metadata`. + """ + return response, metadata + def pre_list_linked_targets( self, request: document_link_service.ListLinkedTargetsRequest, @@ -191,12 +242,38 @@ def post_list_linked_targets( ) -> document_link_service.ListLinkedTargetsResponse: """Post-rpc interceptor for list_linked_targets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_linked_targets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentLinkService server but before - it is returned to user code. + it is returned to user code. This `post_list_linked_targets` interceptor runs + before the `post_list_linked_targets_with_metadata` interceptor. """ return response + def post_list_linked_targets_with_metadata( + self, + response: document_link_service.ListLinkedTargetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_link_service.ListLinkedTargetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_linked_targets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentLinkService server but before it is returned to user code. + + We recommend only using this `post_list_linked_targets_with_metadata` + interceptor in new development instead of the `post_list_linked_targets` interceptor. + When both interceptors are used, this `post_list_linked_targets_with_metadata` interceptor runs after the + `post_list_linked_targets` interceptor. The (possibly modified) response returned by + `post_list_linked_targets` will be passed to + `post_list_linked_targets_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -442,6 +519,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -716,6 +797,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_linked_sources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_linked_sources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -874,6 +959,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_linked_targets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_linked_targets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py index 09ec871376a0..c815d72f8e13 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -510,6 +512,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1343,16 +1372,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py index cc082ce5425c..d4fa1eb88180 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_schema_service/transports/rest.py @@ -136,12 +136,37 @@ def post_create_document_schema( ) -> gcc_document_schema.DocumentSchema: """Post-rpc interceptor for create_document_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentSchemaService server but before - it is returned to user code. + it is returned to user code. This `post_create_document_schema` interceptor runs + before the `post_create_document_schema_with_metadata` interceptor. """ return response + def post_create_document_schema_with_metadata( + self, + response: gcc_document_schema.DocumentSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcc_document_schema.DocumentSchema, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_document_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentSchemaService server but before it is returned to user code. + + We recommend only using this `post_create_document_schema_with_metadata` + interceptor in new development instead of the `post_create_document_schema` interceptor. + When both interceptors are used, this `post_create_document_schema_with_metadata` interceptor runs after the + `post_create_document_schema` interceptor. The (possibly modified) response returned by + `post_create_document_schema` will be passed to + `post_create_document_schema_with_metadata`. + """ + return response, metadata + def pre_delete_document_schema( self, request: document_schema_service.DeleteDocumentSchemaRequest, @@ -177,12 +202,35 @@ def post_get_document_schema( ) -> document_schema.DocumentSchema: """Post-rpc interceptor for get_document_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentSchemaService server but before - it is returned to user code. + it is returned to user code. This `post_get_document_schema` interceptor runs + before the `post_get_document_schema_with_metadata` interceptor. """ return response + def post_get_document_schema_with_metadata( + self, + response: document_schema.DocumentSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document_schema.DocumentSchema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentSchemaService server but before it is returned to user code. + + We recommend only using this `post_get_document_schema_with_metadata` + interceptor in new development instead of the `post_get_document_schema` interceptor. + When both interceptors are used, this `post_get_document_schema_with_metadata` interceptor runs after the + `post_get_document_schema` interceptor. The (possibly modified) response returned by + `post_get_document_schema` will be passed to + `post_get_document_schema_with_metadata`. + """ + return response, metadata + def pre_list_document_schemas( self, request: document_schema_service.ListDocumentSchemasRequest, @@ -203,12 +251,38 @@ def post_list_document_schemas( ) -> document_schema_service.ListDocumentSchemasResponse: """Post-rpc interceptor for list_document_schemas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_document_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentSchemaService server but before - it is returned to user code. + it is returned to user code. This `post_list_document_schemas` interceptor runs + before the `post_list_document_schemas_with_metadata` interceptor. """ return response + def post_list_document_schemas_with_metadata( + self, + response: document_schema_service.ListDocumentSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_schema_service.ListDocumentSchemasResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_document_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentSchemaService server but before it is returned to user code. + + We recommend only using this `post_list_document_schemas_with_metadata` + interceptor in new development instead of the `post_list_document_schemas` interceptor. + When both interceptors are used, this `post_list_document_schemas_with_metadata` interceptor runs after the + `post_list_document_schemas` interceptor. The (possibly modified) response returned by + `post_list_document_schemas` will be passed to + `post_list_document_schemas_with_metadata`. + """ + return response, metadata + def pre_update_document_schema( self, request: document_schema_service.UpdateDocumentSchemaRequest, @@ -229,12 +303,37 @@ def post_update_document_schema( ) -> gcc_document_schema.DocumentSchema: """Post-rpc interceptor for update_document_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentSchemaService server but before - it is returned to user code. + it is returned to user code. This `post_update_document_schema` interceptor runs + before the `post_update_document_schema_with_metadata` interceptor. """ return response + def post_update_document_schema_with_metadata( + self, + response: gcc_document_schema.DocumentSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcc_document_schema.DocumentSchema, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_document_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentSchemaService server but before it is returned to user code. + + We recommend only using this `post_update_document_schema_with_metadata` + interceptor in new development instead of the `post_update_document_schema` interceptor. + When both interceptors are used, this `post_update_document_schema_with_metadata` interceptor runs after the + `post_update_document_schema` interceptor. The (possibly modified) response returned by + `post_update_document_schema` will be passed to + `post_update_document_schema_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -478,6 +577,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -740,6 +843,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -888,6 +995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_document_schemas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_document_schemas_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1046,6 +1157,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py index af97ded9e546..1a465a9337d3 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -534,6 +536,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1727,16 +1756,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py index f63c43f85b30..bd19343e9f80 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/document_service/transports/rest.py @@ -160,12 +160,37 @@ def post_create_document( ) -> document_service.CreateDocumentResponse: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: document_service.CreateDocumentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.CreateDocumentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: document_service_request.DeleteDocumentRequest, @@ -201,12 +226,37 @@ def post_fetch_acl( ) -> document_service.FetchAclResponse: """Post-rpc interceptor for fetch_acl - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_acl_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_acl` interceptor runs + before the `post_fetch_acl_with_metadata` interceptor. """ return response + def post_fetch_acl_with_metadata( + self, + response: document_service.FetchAclResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.FetchAclResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_acl + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_fetch_acl_with_metadata` + interceptor in new development instead of the `post_fetch_acl` interceptor. + When both interceptors are used, this `post_fetch_acl_with_metadata` interceptor runs after the + `post_fetch_acl` interceptor. The (possibly modified) response returned by + `post_fetch_acl` will be passed to + `post_fetch_acl_with_metadata`. + """ + return response, metadata + def pre_get_document( self, request: document_service_request.GetDocumentRequest, @@ -227,12 +277,35 @@ def post_get_document( ) -> gcc_document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: gcc_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcc_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_lock_document( self, request: document_service_request.LockDocumentRequest, @@ -253,12 +326,35 @@ def post_lock_document( ) -> gcc_document.Document: """Post-rpc interceptor for lock_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lock_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_lock_document` interceptor runs + before the `post_lock_document_with_metadata` interceptor. """ return response + def post_lock_document_with_metadata( + self, + response: gcc_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcc_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lock_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_lock_document_with_metadata` + interceptor in new development instead of the `post_lock_document` interceptor. + When both interceptors are used, this `post_lock_document_with_metadata` interceptor runs after the + `post_lock_document` interceptor. The (possibly modified) response returned by + `post_lock_document` will be passed to + `post_lock_document_with_metadata`. + """ + return response, metadata + def pre_search_documents( self, request: document_service_request.SearchDocumentsRequest, @@ -279,12 +375,38 @@ def post_search_documents( ) -> document_service.SearchDocumentsResponse: """Post-rpc interceptor for search_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_search_documents` interceptor runs + before the `post_search_documents_with_metadata` interceptor. """ return response + def post_search_documents_with_metadata( + self, + response: document_service.SearchDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.SearchDocumentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_search_documents_with_metadata` + interceptor in new development instead of the `post_search_documents` interceptor. + When both interceptors are used, this `post_search_documents_with_metadata` interceptor runs after the + `post_search_documents` interceptor. The (possibly modified) response returned by + `post_search_documents` will be passed to + `post_search_documents_with_metadata`. + """ + return response, metadata + def pre_set_acl( self, request: document_service_request.SetAclRequest, @@ -304,12 +426,37 @@ def post_set_acl( ) -> document_service.SetAclResponse: """Post-rpc interceptor for set_acl - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_acl_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_set_acl` interceptor runs + before the `post_set_acl_with_metadata` interceptor. """ return response + def post_set_acl_with_metadata( + self, + response: document_service.SetAclResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.SetAclResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for set_acl + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_set_acl_with_metadata` + interceptor in new development instead of the `post_set_acl` interceptor. + When both interceptors are used, this `post_set_acl_with_metadata` interceptor runs after the + `post_set_acl` interceptor. The (possibly modified) response returned by + `post_set_acl` will be passed to + `post_set_acl_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: document_service_request.UpdateDocumentRequest, @@ -330,12 +477,37 @@ def post_update_document( ) -> document_service.UpdateDocumentResponse: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: document_service.UpdateDocumentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.UpdateDocumentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -574,6 +746,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -844,6 +1020,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_acl(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_acl_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -995,6 +1175,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1144,6 +1328,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lock_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lock_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1295,6 +1483,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1450,6 +1642,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_acl(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_acl_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1599,6 +1795,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py index 2c81f27be6c0..e1b74665bfbb 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -504,6 +506,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -880,16 +909,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py index 80f147358162..b3669bca8c18 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/pipeline_service/transports/rest.py @@ -102,12 +102,35 @@ def post_run_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the PipelineService server but before - it is returned to user code. + it is returned to user code. This `post_run_pipeline` interceptor runs + before the `post_run_pipeline_with_metadata` interceptor. """ return response + def post_run_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PipelineService server but before it is returned to user code. + + We recommend only using this `post_run_pipeline_with_metadata` + interceptor in new development instead of the `post_run_pipeline` interceptor. + When both interceptors are used, this `post_run_pipeline_with_metadata` interceptor runs after the + `post_run_pipeline` interceptor. The (possibly modified) response returned by + `post_run_pipeline` will be passed to + `post_run_pipeline_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -380,6 +403,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py index abd7608daa4c..f17e73c0b4ff 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -525,6 +527,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1335,16 +1364,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py index 72d159664462..ea31518aa9cb 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/rule_set_service/transports/rest.py @@ -132,12 +132,35 @@ def post_create_rule_set( ) -> rule_engine.RuleSet: """Post-rpc interceptor for create_rule_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_rule_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuleSetService server but before - it is returned to user code. + it is returned to user code. This `post_create_rule_set` interceptor runs + before the `post_create_rule_set_with_metadata` interceptor. """ return response + def post_create_rule_set_with_metadata( + self, + response: rule_engine.RuleSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rule_engine.RuleSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_rule_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuleSetService server but before it is returned to user code. + + We recommend only using this `post_create_rule_set_with_metadata` + interceptor in new development instead of the `post_create_rule_set` interceptor. + When both interceptors are used, this `post_create_rule_set_with_metadata` interceptor runs after the + `post_create_rule_set` interceptor. The (possibly modified) response returned by + `post_create_rule_set` will be passed to + `post_create_rule_set_with_metadata`. + """ + return response, metadata + def pre_delete_rule_set( self, request: ruleset_service_request.DeleteRuleSetRequest, @@ -171,12 +194,35 @@ def pre_get_rule_set( def post_get_rule_set(self, response: rule_engine.RuleSet) -> rule_engine.RuleSet: """Post-rpc interceptor for get_rule_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rule_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuleSetService server but before - it is returned to user code. + it is returned to user code. This `post_get_rule_set` interceptor runs + before the `post_get_rule_set_with_metadata` interceptor. """ return response + def post_get_rule_set_with_metadata( + self, + response: rule_engine.RuleSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rule_engine.RuleSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rule_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuleSetService server but before it is returned to user code. + + We recommend only using this `post_get_rule_set_with_metadata` + interceptor in new development instead of the `post_get_rule_set` interceptor. + When both interceptors are used, this `post_get_rule_set_with_metadata` interceptor runs after the + `post_get_rule_set` interceptor. The (possibly modified) response returned by + `post_get_rule_set` will be passed to + `post_get_rule_set_with_metadata`. + """ + return response, metadata + def pre_list_rule_sets( self, request: ruleset_service_request.ListRuleSetsRequest, @@ -197,12 +243,38 @@ def post_list_rule_sets( ) -> ruleset_service_request.ListRuleSetsResponse: """Post-rpc interceptor for list_rule_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_rule_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuleSetService server but before - it is returned to user code. + it is returned to user code. This `post_list_rule_sets` interceptor runs + before the `post_list_rule_sets_with_metadata` interceptor. """ return response + def post_list_rule_sets_with_metadata( + self, + response: ruleset_service_request.ListRuleSetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + ruleset_service_request.ListRuleSetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_rule_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuleSetService server but before it is returned to user code. + + We recommend only using this `post_list_rule_sets_with_metadata` + interceptor in new development instead of the `post_list_rule_sets` interceptor. + When both interceptors are used, this `post_list_rule_sets_with_metadata` interceptor runs after the + `post_list_rule_sets` interceptor. The (possibly modified) response returned by + `post_list_rule_sets` will be passed to + `post_list_rule_sets_with_metadata`. + """ + return response, metadata + def pre_update_rule_set( self, request: ruleset_service_request.UpdateRuleSetRequest, @@ -223,12 +295,35 @@ def post_update_rule_set( ) -> rule_engine.RuleSet: """Post-rpc interceptor for update_rule_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_rule_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RuleSetService server but before - it is returned to user code. + it is returned to user code. This `post_update_rule_set` interceptor runs + before the `post_update_rule_set_with_metadata` interceptor. """ return response + def post_update_rule_set_with_metadata( + self, + response: rule_engine.RuleSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rule_engine.RuleSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_rule_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RuleSetService server but before it is returned to user code. + + We recommend only using this `post_update_rule_set_with_metadata` + interceptor in new development instead of the `post_update_rule_set` interceptor. + When both interceptors are used, this `post_update_rule_set_with_metadata` interceptor runs after the + `post_update_rule_set` interceptor. The (possibly modified) response returned by + `post_update_rule_set` will be passed to + `post_update_rule_set_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -467,6 +562,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_rule_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_rule_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -719,6 +818,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rule_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rule_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -862,6 +965,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_rule_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_rule_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1013,6 +1120,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_rule_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_rule_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py index 8eaf81880821..491fe09b83cc 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -506,6 +508,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1356,16 +1385,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py index ae0a7f14cbd7..08f69d2b2b9f 100644 --- a/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py +++ b/packages/google-cloud-contentwarehouse/google/cloud/contentwarehouse_v1/services/synonym_set_service/transports/rest.py @@ -135,12 +135,35 @@ def post_create_synonym_set( ) -> synonymset.SynonymSet: """Post-rpc interceptor for create_synonym_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_synonym_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SynonymSetService server but before - it is returned to user code. + it is returned to user code. This `post_create_synonym_set` interceptor runs + before the `post_create_synonym_set_with_metadata` interceptor. """ return response + def post_create_synonym_set_with_metadata( + self, + response: synonymset.SynonymSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[synonymset.SynonymSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_synonym_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SynonymSetService server but before it is returned to user code. + + We recommend only using this `post_create_synonym_set_with_metadata` + interceptor in new development instead of the `post_create_synonym_set` interceptor. + When both interceptors are used, this `post_create_synonym_set_with_metadata` interceptor runs after the + `post_create_synonym_set` interceptor. The (possibly modified) response returned by + `post_create_synonym_set` will be passed to + `post_create_synonym_set_with_metadata`. + """ + return response, metadata + def pre_delete_synonym_set( self, request: synonymset_service_request.DeleteSynonymSetRequest, @@ -176,12 +199,35 @@ def post_get_synonym_set( ) -> synonymset.SynonymSet: """Post-rpc interceptor for get_synonym_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_synonym_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SynonymSetService server but before - it is returned to user code. + it is returned to user code. This `post_get_synonym_set` interceptor runs + before the `post_get_synonym_set_with_metadata` interceptor. """ return response + def post_get_synonym_set_with_metadata( + self, + response: synonymset.SynonymSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[synonymset.SynonymSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_synonym_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SynonymSetService server but before it is returned to user code. + + We recommend only using this `post_get_synonym_set_with_metadata` + interceptor in new development instead of the `post_get_synonym_set` interceptor. + When both interceptors are used, this `post_get_synonym_set_with_metadata` interceptor runs after the + `post_get_synonym_set` interceptor. The (possibly modified) response returned by + `post_get_synonym_set` will be passed to + `post_get_synonym_set_with_metadata`. + """ + return response, metadata + def pre_list_synonym_sets( self, request: synonymset_service_request.ListSynonymSetsRequest, @@ -202,12 +248,38 @@ def post_list_synonym_sets( ) -> synonymset_service_request.ListSynonymSetsResponse: """Post-rpc interceptor for list_synonym_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_synonym_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SynonymSetService server but before - it is returned to user code. + it is returned to user code. This `post_list_synonym_sets` interceptor runs + before the `post_list_synonym_sets_with_metadata` interceptor. """ return response + def post_list_synonym_sets_with_metadata( + self, + response: synonymset_service_request.ListSynonymSetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + synonymset_service_request.ListSynonymSetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_synonym_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SynonymSetService server but before it is returned to user code. + + We recommend only using this `post_list_synonym_sets_with_metadata` + interceptor in new development instead of the `post_list_synonym_sets` interceptor. + When both interceptors are used, this `post_list_synonym_sets_with_metadata` interceptor runs after the + `post_list_synonym_sets` interceptor. The (possibly modified) response returned by + `post_list_synonym_sets` will be passed to + `post_list_synonym_sets_with_metadata`. + """ + return response, metadata + def pre_update_synonym_set( self, request: synonymset_service_request.UpdateSynonymSetRequest, @@ -228,12 +300,35 @@ def post_update_synonym_set( ) -> synonymset.SynonymSet: """Post-rpc interceptor for update_synonym_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_synonym_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SynonymSetService server but before - it is returned to user code. + it is returned to user code. This `post_update_synonym_set` interceptor runs + before the `post_update_synonym_set_with_metadata` interceptor. """ return response + def post_update_synonym_set_with_metadata( + self, + response: synonymset.SynonymSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[synonymset.SynonymSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_synonym_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SynonymSetService server but before it is returned to user code. + + We recommend only using this `post_update_synonym_set_with_metadata` + interceptor in new development instead of the `post_update_synonym_set` interceptor. + When both interceptors are used, this `post_update_synonym_set_with_metadata` interceptor runs after the + `post_update_synonym_set` interceptor. The (possibly modified) response returned by + `post_update_synonym_set` will be passed to + `post_update_synonym_set_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -482,6 +577,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_synonym_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_synonym_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -744,6 +843,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_synonym_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_synonym_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -892,6 +995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_synonym_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_synonym_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1058,6 +1165,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_synonym_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_synonym_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json index a4ef6e0c7057..ff016d747a0f 100644 --- a/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json +++ b/packages/google-cloud-contentwarehouse/samples/generated_samples/snippet_metadata_google.cloud.contentwarehouse.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-contentwarehouse", - "version": "0.7.12" + "version": "0.7.13" }, "snippets": [ { diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py index 2837aaf39f58..30c39cba35bc 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_link_service.py @@ -66,6 +66,13 @@ document_link_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -339,6 +346,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentLinkServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentLinkServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3936,10 +3986,14 @@ def test_list_linked_targets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentLinkServiceRestInterceptor, "post_list_linked_targets" ) as post, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, + "post_list_linked_targets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentLinkServiceRestInterceptor, "pre_list_linked_targets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_link_service.ListLinkedTargetsRequest.pb( document_link_service.ListLinkedTargetsRequest() ) @@ -3965,6 +4019,10 @@ def test_list_linked_targets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_link_service.ListLinkedTargetsResponse() + post_with_metadata.return_value = ( + document_link_service.ListLinkedTargetsResponse(), + metadata, + ) client.list_linked_targets( request, @@ -3976,6 +4034,7 @@ def test_list_linked_targets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_linked_sources_rest_bad_request( @@ -4060,10 +4119,14 @@ def test_list_linked_sources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentLinkServiceRestInterceptor, "post_list_linked_sources" ) as post, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, + "post_list_linked_sources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentLinkServiceRestInterceptor, "pre_list_linked_sources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_link_service.ListLinkedSourcesRequest.pb( document_link_service.ListLinkedSourcesRequest() ) @@ -4089,6 +4152,10 @@ def test_list_linked_sources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_link_service.ListLinkedSourcesResponse() + post_with_metadata.return_value = ( + document_link_service.ListLinkedSourcesResponse(), + metadata, + ) client.list_linked_sources( request, @@ -4100,6 +4167,7 @@ def test_list_linked_sources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_link_rest_bad_request( @@ -4188,10 +4256,14 @@ def test_create_document_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentLinkServiceRestInterceptor, "post_create_document_link" ) as post, mock.patch.object( + transports.DocumentLinkServiceRestInterceptor, + "post_create_document_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentLinkServiceRestInterceptor, "pre_create_document_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_link_service.CreateDocumentLinkRequest.pb( document_link_service.CreateDocumentLinkRequest() ) @@ -4217,6 +4289,7 @@ def test_create_document_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_link_service.DocumentLink() + post_with_metadata.return_value = document_link_service.DocumentLink(), metadata client.create_document_link( request, @@ -4228,6 +4301,7 @@ def test_create_document_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_link_rest_bad_request( diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py index f438ebdf86e5..98ed8fc315e8 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_schema_service.py @@ -66,6 +66,13 @@ from google.cloud.contentwarehouse_v1.types import document_schema from google.cloud.contentwarehouse_v1.types import document_schema_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentSchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentSchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4696,10 +4746,14 @@ def test_create_document_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "post_create_document_schema" ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, + "post_create_document_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "pre_create_document_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_schema_service.CreateDocumentSchemaRequest.pb( document_schema_service.CreateDocumentSchemaRequest() ) @@ -4725,6 +4779,7 @@ def test_create_document_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcc_document_schema.DocumentSchema() + post_with_metadata.return_value = gcc_document_schema.DocumentSchema(), metadata client.create_document_schema( request, @@ -4736,6 +4791,7 @@ def test_create_document_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_schema_rest_bad_request( @@ -4830,10 +4886,14 @@ def test_update_document_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "post_update_document_schema" ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, + "post_update_document_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "pre_update_document_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_schema_service.UpdateDocumentSchemaRequest.pb( document_schema_service.UpdateDocumentSchemaRequest() ) @@ -4859,6 +4919,7 @@ def test_update_document_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcc_document_schema.DocumentSchema() + post_with_metadata.return_value = gcc_document_schema.DocumentSchema(), metadata client.update_document_schema( request, @@ -4870,6 +4931,7 @@ def test_update_document_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_schema_rest_bad_request( @@ -4964,10 +5026,14 @@ def test_get_document_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "post_get_document_schema" ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, + "post_get_document_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "pre_get_document_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_schema_service.GetDocumentSchemaRequest.pb( document_schema_service.GetDocumentSchemaRequest() ) @@ -4993,6 +5059,7 @@ def test_get_document_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_schema.DocumentSchema() + post_with_metadata.return_value = document_schema.DocumentSchema(), metadata client.get_document_schema( request, @@ -5004,6 +5071,7 @@ def test_get_document_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_schema_rest_bad_request( @@ -5203,10 +5271,14 @@ def test_list_document_schemas_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "post_list_document_schemas" ) as post, mock.patch.object( + transports.DocumentSchemaServiceRestInterceptor, + "post_list_document_schemas_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentSchemaServiceRestInterceptor, "pre_list_document_schemas" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_schema_service.ListDocumentSchemasRequest.pb( document_schema_service.ListDocumentSchemasRequest() ) @@ -5232,6 +5304,10 @@ def test_list_document_schemas_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_schema_service.ListDocumentSchemasResponse() + post_with_metadata.return_value = ( + document_schema_service.ListDocumentSchemasResponse(), + metadata, + ) client.list_document_schemas( request, @@ -5243,6 +5319,7 @@ def test_list_document_schemas_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py index fd8d8f1719db..b5fc6ccd9ca6 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_document_service.py @@ -86,6 +86,13 @@ from google.cloud.contentwarehouse_v1.types import common from google.cloud.contentwarehouse_v1.types import document as gcc_document +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6186,10 +6236,13 @@ def test_create_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.CreateDocumentRequest.pb( document_service_request.CreateDocumentRequest() ) @@ -6215,6 +6268,10 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.CreateDocumentResponse() + post_with_metadata.return_value = ( + document_service.CreateDocumentResponse(), + metadata, + ) client.create_document( request, @@ -6226,6 +6283,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_rest_bad_request( @@ -6345,10 +6403,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.GetDocumentRequest.pb( document_service_request.GetDocumentRequest() ) @@ -6372,6 +6433,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcc_document.Document() + post_with_metadata.return_value = gcc_document.Document(), metadata client.get_document( request, @@ -6383,6 +6445,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -6464,10 +6527,13 @@ def test_update_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.UpdateDocumentRequest.pb( document_service_request.UpdateDocumentRequest() ) @@ -6493,6 +6559,10 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.UpdateDocumentResponse() + post_with_metadata.return_value = ( + document_service.UpdateDocumentResponse(), + metadata, + ) client.update_document( request, @@ -6504,6 +6574,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request( @@ -6701,10 +6772,13 @@ def test_search_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_search_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_search_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_search_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.SearchDocumentsRequest.pb( document_service_request.SearchDocumentsRequest() ) @@ -6730,6 +6804,10 @@ def test_search_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.SearchDocumentsResponse() + post_with_metadata.return_value = ( + document_service.SearchDocumentsResponse(), + metadata, + ) client.search_documents( request, @@ -6741,6 +6819,7 @@ def test_search_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lock_document_rest_bad_request( @@ -6860,10 +6939,13 @@ def test_lock_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_lock_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_lock_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_lock_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.LockDocumentRequest.pb( document_service_request.LockDocumentRequest() ) @@ -6887,6 +6969,7 @@ def test_lock_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcc_document.Document() + post_with_metadata.return_value = gcc_document.Document(), metadata client.lock_document( request, @@ -6898,6 +6981,7 @@ def test_lock_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_acl_rest_bad_request( @@ -6979,10 +7063,13 @@ def test_fetch_acl_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_fetch_acl" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_fetch_acl_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_fetch_acl" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.FetchAclRequest.pb( document_service_request.FetchAclRequest() ) @@ -7008,6 +7095,7 @@ def test_fetch_acl_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.FetchAclResponse() + post_with_metadata.return_value = document_service.FetchAclResponse(), metadata client.fetch_acl( request, @@ -7019,6 +7107,7 @@ def test_fetch_acl_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_acl_rest_bad_request(request_type=document_service_request.SetAclRequest): @@ -7098,10 +7187,13 @@ def test_set_acl_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_set_acl" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_set_acl_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_set_acl" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service_request.SetAclRequest.pb( document_service_request.SetAclRequest() ) @@ -7127,6 +7219,7 @@ def test_set_acl_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.SetAclResponse() + post_with_metadata.return_value = document_service.SetAclResponse(), metadata client.set_acl( request, @@ -7138,6 +7231,7 @@ def test_set_acl_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py index b2ab9bdcfb17..33341fa83509 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_pipeline_service.py @@ -71,6 +71,13 @@ ) from google.cloud.contentwarehouse_v1.types import common, pipeline_service, pipelines +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PipelineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PipelineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1850,10 +1900,13 @@ def test_run_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.PipelineServiceRestInterceptor, "post_run_pipeline" ) as post, mock.patch.object( + transports.PipelineServiceRestInterceptor, "post_run_pipeline_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PipelineServiceRestInterceptor, "pre_run_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pipeline_service.RunPipelineRequest.pb( pipeline_service.RunPipelineRequest() ) @@ -1877,6 +1930,7 @@ def test_run_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_pipeline( request, @@ -1888,6 +1942,7 @@ def test_run_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py index 7848e16f6034..eacfbb42420d 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_rule_set_service.py @@ -63,6 +63,13 @@ ) from google.cloud.contentwarehouse_v1.types import rule_engine, ruleset_service_request +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +328,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RuleSetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RuleSetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4470,10 +4520,13 @@ def test_create_rule_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RuleSetServiceRestInterceptor, "post_create_rule_set" ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_create_rule_set_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RuleSetServiceRestInterceptor, "pre_create_rule_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ruleset_service_request.CreateRuleSetRequest.pb( ruleset_service_request.CreateRuleSetRequest() ) @@ -4497,6 +4550,7 @@ def test_create_rule_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rule_engine.RuleSet() + post_with_metadata.return_value = rule_engine.RuleSet(), metadata client.create_rule_set( request, @@ -4508,6 +4562,7 @@ def test_create_rule_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rule_set_rest_bad_request( @@ -4596,10 +4651,13 @@ def test_get_rule_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RuleSetServiceRestInterceptor, "post_get_rule_set" ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_get_rule_set_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RuleSetServiceRestInterceptor, "pre_get_rule_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ruleset_service_request.GetRuleSetRequest.pb( ruleset_service_request.GetRuleSetRequest() ) @@ -4623,6 +4681,7 @@ def test_get_rule_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rule_engine.RuleSet() + post_with_metadata.return_value = rule_engine.RuleSet(), metadata client.get_rule_set( request, @@ -4634,6 +4693,7 @@ def test_get_rule_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_rule_set_rest_bad_request( @@ -4722,10 +4782,13 @@ def test_update_rule_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RuleSetServiceRestInterceptor, "post_update_rule_set" ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_update_rule_set_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RuleSetServiceRestInterceptor, "pre_update_rule_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ruleset_service_request.UpdateRuleSetRequest.pb( ruleset_service_request.UpdateRuleSetRequest() ) @@ -4749,6 +4812,7 @@ def test_update_rule_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rule_engine.RuleSet() + post_with_metadata.return_value = rule_engine.RuleSet(), metadata client.update_rule_set( request, @@ -4760,6 +4824,7 @@ def test_update_rule_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_rule_set_rest_bad_request( @@ -4953,10 +5018,13 @@ def test_list_rule_sets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RuleSetServiceRestInterceptor, "post_list_rule_sets" ) as post, mock.patch.object( + transports.RuleSetServiceRestInterceptor, "post_list_rule_sets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RuleSetServiceRestInterceptor, "pre_list_rule_sets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ruleset_service_request.ListRuleSetsRequest.pb( ruleset_service_request.ListRuleSetsRequest() ) @@ -4982,6 +5050,10 @@ def test_list_rule_sets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ruleset_service_request.ListRuleSetsResponse() + post_with_metadata.return_value = ( + ruleset_service_request.ListRuleSetsResponse(), + metadata, + ) client.list_rule_sets( request, @@ -4993,6 +5065,7 @@ def test_list_rule_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py index 6541b5f3a56a..7282b0f09f27 100644 --- a/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py +++ b/packages/google-cloud-contentwarehouse/tests/unit/gapic/contentwarehouse_v1/test_synonym_set_service.py @@ -64,6 +64,13 @@ synonymset_service_request, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SynonymSetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SynonymSetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4566,10 +4616,14 @@ def test_create_synonym_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "post_create_synonym_set" ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, + "post_create_synonym_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "pre_create_synonym_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = synonymset_service_request.CreateSynonymSetRequest.pb( synonymset_service_request.CreateSynonymSetRequest() ) @@ -4593,6 +4647,7 @@ def test_create_synonym_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = synonymset.SynonymSet() + post_with_metadata.return_value = synonymset.SynonymSet(), metadata client.create_synonym_set( request, @@ -4604,6 +4659,7 @@ def test_create_synonym_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_synonym_set_rest_bad_request( @@ -4690,10 +4746,14 @@ def test_get_synonym_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "post_get_synonym_set" ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, + "post_get_synonym_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "pre_get_synonym_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = synonymset_service_request.GetSynonymSetRequest.pb( synonymset_service_request.GetSynonymSetRequest() ) @@ -4717,6 +4777,7 @@ def test_get_synonym_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = synonymset.SynonymSet() + post_with_metadata.return_value = synonymset.SynonymSet(), metadata client.get_synonym_set( request, @@ -4728,6 +4789,7 @@ def test_get_synonym_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_synonym_set_rest_bad_request( @@ -4888,10 +4950,14 @@ def test_update_synonym_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "post_update_synonym_set" ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, + "post_update_synonym_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "pre_update_synonym_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = synonymset_service_request.UpdateSynonymSetRequest.pb( synonymset_service_request.UpdateSynonymSetRequest() ) @@ -4915,6 +4981,7 @@ def test_update_synonym_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = synonymset.SynonymSet() + post_with_metadata.return_value = synonymset.SynonymSet(), metadata client.update_synonym_set( request, @@ -4926,6 +4993,7 @@ def test_update_synonym_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_synonym_set_rest_bad_request( @@ -5121,10 +5189,14 @@ def test_list_synonym_sets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "post_list_synonym_sets" ) as post, mock.patch.object( + transports.SynonymSetServiceRestInterceptor, + "post_list_synonym_sets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SynonymSetServiceRestInterceptor, "pre_list_synonym_sets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = synonymset_service_request.ListSynonymSetsRequest.pb( synonymset_service_request.ListSynonymSetsRequest() ) @@ -5150,6 +5222,10 @@ def test_list_synonym_sets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = synonymset_service_request.ListSynonymSetsResponse() + post_with_metadata.return_value = ( + synonymset_service_request.ListSynonymSetsResponse(), + metadata, + ) client.list_synonym_sets( request, @@ -5161,6 +5237,7 @@ def test_list_synonym_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-data-fusion/CHANGELOG.md b/packages/google-cloud-data-fusion/CHANGELOG.md index 1a9a2613085a..f484ccaa8be1 100644 --- a/packages/google-cloud-data-fusion/CHANGELOG.md +++ b/packages/google-cloud-data-fusion/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-fusion-v1.12.0...google-cloud-data-fusion-v1.13.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-fusion-v1.11.1...google-cloud-data-fusion-v1.12.0) (2024-12-12) diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py index 73aae58c91f0..75497fc61ba6 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/transports/rest.py b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/transports/rest.py index dc7929200ee1..87a3a46191b6 100644 --- a/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/transports/rest.py +++ b/packages/google-cloud-data-fusion/google/cloud/data_fusion_v1/services/data_fusion/transports/rest.py @@ -150,12 +150,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: datafusion.DeleteInstanceRequest, @@ -175,12 +198,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: datafusion.GetInstanceRequest, @@ -196,12 +242,35 @@ def pre_get_instance( def post_get_instance(self, response: datafusion.Instance) -> datafusion.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: datafusion.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datafusion.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_list_available_versions( self, request: datafusion.ListAvailableVersionsRequest, @@ -221,12 +290,38 @@ def post_list_available_versions( ) -> datafusion.ListAvailableVersionsResponse: """Post-rpc interceptor for list_available_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_available_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_list_available_versions` interceptor runs + before the `post_list_available_versions_with_metadata` interceptor. """ return response + def post_list_available_versions_with_metadata( + self, + response: datafusion.ListAvailableVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datafusion.ListAvailableVersionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_available_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_list_available_versions_with_metadata` + interceptor in new development instead of the `post_list_available_versions` interceptor. + When both interceptors are used, this `post_list_available_versions_with_metadata` interceptor runs after the + `post_list_available_versions` interceptor. The (possibly modified) response returned by + `post_list_available_versions` will be passed to + `post_list_available_versions_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: datafusion.ListInstancesRequest, @@ -246,12 +341,37 @@ def post_list_instances( ) -> datafusion.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: datafusion.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datafusion.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_restart_instance( self, request: datafusion.RestartInstanceRequest, @@ -271,12 +391,35 @@ def post_restart_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for restart_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restart_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_restart_instance` interceptor runs + before the `post_restart_instance_with_metadata` interceptor. """ return response + def post_restart_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restart_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_restart_instance_with_metadata` + interceptor in new development instead of the `post_restart_instance` interceptor. + When both interceptors are used, this `post_restart_instance_with_metadata` interceptor runs after the + `post_restart_instance` interceptor. The (possibly modified) response returned by + `post_restart_instance` will be passed to + `post_restart_instance_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: datafusion.UpdateInstanceRequest, @@ -296,12 +439,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataFusion server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataFusion server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DataFusionRestStub: @@ -574,6 +740,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -718,6 +888,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -863,6 +1037,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1008,6 +1186,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_available_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_available_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1157,6 +1339,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1309,6 +1495,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restart_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restart_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1464,6 +1654,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json index 39a755aecf3d..961112da7b31 100644 --- a/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json +++ b/packages/google-cloud-data-fusion/samples/generated_samples/snippet_metadata_google.cloud.datafusion.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-fusion", - "version": "1.12.0" + "version": "1.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index faf8ce38b522..45cb51b97d2c 100644 --- a/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/packages/google-cloud-data-fusion/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -73,6 +73,13 @@ ) from google.cloud.data_fusion_v1.types import datafusion +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -308,6 +315,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataFusionClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataFusionClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5425,10 +5475,14 @@ def test_list_available_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataFusionRestInterceptor, "post_list_available_versions" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, + "post_list_available_versions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_list_available_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.ListAvailableVersionsRequest.pb( datafusion.ListAvailableVersionsRequest() ) @@ -5454,6 +5508,10 @@ def test_list_available_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datafusion.ListAvailableVersionsResponse() + post_with_metadata.return_value = ( + datafusion.ListAvailableVersionsResponse(), + metadata, + ) client.list_available_versions( request, @@ -5465,6 +5523,7 @@ def test_list_available_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_instances_rest_bad_request(request_type=datafusion.ListInstancesRequest): @@ -5549,10 +5608,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataFusionRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.ListInstancesRequest.pb( datafusion.ListInstancesRequest() ) @@ -5578,6 +5640,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datafusion.ListInstancesResponse() + post_with_metadata.return_value = datafusion.ListInstancesResponse(), metadata client.list_instances( request, @@ -5589,6 +5652,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=datafusion.GetInstanceRequest): @@ -5711,10 +5775,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataFusionRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.GetInstanceRequest.pb(datafusion.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -5736,6 +5803,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datafusion.Instance() + post_with_metadata.return_value = datafusion.Instance(), metadata client.get_instance( request, @@ -5747,6 +5815,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request( @@ -5937,10 +6006,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataFusionRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.CreateInstanceRequest.pb( datafusion.CreateInstanceRequest() ) @@ -5964,6 +6036,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -5975,6 +6048,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -6055,10 +6129,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataFusionRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.DeleteInstanceRequest.pb( datafusion.DeleteInstanceRequest() ) @@ -6082,6 +6159,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -6093,6 +6171,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -6287,10 +6366,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataFusionRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.UpdateInstanceRequest.pb( datafusion.UpdateInstanceRequest() ) @@ -6314,6 +6396,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -6325,6 +6408,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restart_instance_rest_bad_request( @@ -6405,10 +6489,13 @@ def test_restart_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataFusionRestInterceptor, "post_restart_instance" ) as post, mock.patch.object( + transports.DataFusionRestInterceptor, "post_restart_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataFusionRestInterceptor, "pre_restart_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datafusion.RestartInstanceRequest.pb( datafusion.RestartInstanceRequest() ) @@ -6432,6 +6519,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restart_instance( request, @@ -6443,6 +6531,7 @@ def test_restart_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-data-qna/CHANGELOG.md b/packages/google-cloud-data-qna/CHANGELOG.md index d33390e809bd..e3828b582e7e 100644 --- a/packages/google-cloud-data-qna/CHANGELOG.md +++ b/packages/google-cloud-data-qna/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.10.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-qna-v0.10.14...google-cloud-data-qna-v0.10.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) +* Add support for reading selective GAPIC generation methods from service YAML ([e22e2bd](https://github.com/googleapis/google-cloud-python/commit/e22e2bde55d11d2f85e9d2caf1d152a4027f88cf)) + ## [0.10.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-data-qna-v0.10.13...google-cloud-data-qna-v0.10.14) (2024-12-12) diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py index e9c1f8ae6685..7008b740153b 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.14" # {x-release-please-version} +__version__ = "0.10.15" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py index e9c1f8ae6685..7008b740153b 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.14" # {x-release-please-version} +__version__ = "0.10.15" # {x-release-please-version} diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py index 9b32b8eb282f..af544a5b7180 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -534,6 +536,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/rest.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/rest.py index ad3719a8f2e6..42bd5e0fa975 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/rest.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/rest.py @@ -102,12 +102,38 @@ def post_suggest_queries( ) -> auto_suggestion_service.SuggestQueriesResponse: """Post-rpc interceptor for suggest_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoSuggestionService server but before - it is returned to user code. + it is returned to user code. This `post_suggest_queries` interceptor runs + before the `post_suggest_queries_with_metadata` interceptor. """ return response + def post_suggest_queries_with_metadata( + self, + response: auto_suggestion_service.SuggestQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + auto_suggestion_service.SuggestQueriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for suggest_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoSuggestionService server but before it is returned to user code. + + We recommend only using this `post_suggest_queries_with_metadata` + interceptor in new development instead of the `post_suggest_queries` interceptor. + When both interceptors are used, this `post_suggest_queries_with_metadata` interceptor runs after the + `post_suggest_queries` interceptor. The (possibly modified) response returned by + `post_suggest_queries` will be passed to + `post_suggest_queries_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class AutoSuggestionServiceRestStub: @@ -393,6 +419,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_queries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py index 58374dbad140..f264f29158d9 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -530,6 +532,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/transports/rest.py b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/transports/rest.py index f023acb8467e..5f5e1415a6b8 100644 --- a/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/transports/rest.py +++ b/packages/google-cloud-data-qna/google/cloud/dataqna_v1alpha/services/question_service/transports/rest.py @@ -137,12 +137,35 @@ def post_create_question( ) -> gcd_question.Question: """Post-rpc interceptor for create_question - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_question_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the QuestionService server but before - it is returned to user code. + it is returned to user code. This `post_create_question` interceptor runs + before the `post_create_question_with_metadata` interceptor. """ return response + def post_create_question_with_metadata( + self, + response: gcd_question.Question, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_question.Question, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_question + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the QuestionService server but before it is returned to user code. + + We recommend only using this `post_create_question_with_metadata` + interceptor in new development instead of the `post_create_question` interceptor. + When both interceptors are used, this `post_create_question_with_metadata` interceptor runs after the + `post_create_question` interceptor. The (possibly modified) response returned by + `post_create_question` will be passed to + `post_create_question_with_metadata`. + """ + return response, metadata + def pre_execute_question( self, request: question_service.ExecuteQuestionRequest, @@ -160,12 +183,35 @@ def pre_execute_question( def post_execute_question(self, response: question.Question) -> question.Question: """Post-rpc interceptor for execute_question - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_execute_question_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the QuestionService server but before - it is returned to user code. + it is returned to user code. This `post_execute_question` interceptor runs + before the `post_execute_question_with_metadata` interceptor. """ return response + def post_execute_question_with_metadata( + self, + response: question.Question, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[question.Question, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for execute_question + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the QuestionService server but before it is returned to user code. + + We recommend only using this `post_execute_question_with_metadata` + interceptor in new development instead of the `post_execute_question` interceptor. + When both interceptors are used, this `post_execute_question_with_metadata` interceptor runs after the + `post_execute_question` interceptor. The (possibly modified) response returned by + `post_execute_question` will be passed to + `post_execute_question_with_metadata`. + """ + return response, metadata + def pre_get_question( self, request: question_service.GetQuestionRequest, @@ -183,12 +229,35 @@ def pre_get_question( def post_get_question(self, response: question.Question) -> question.Question: """Post-rpc interceptor for get_question - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_question_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the QuestionService server but before - it is returned to user code. + it is returned to user code. This `post_get_question` interceptor runs + before the `post_get_question_with_metadata` interceptor. """ return response + def post_get_question_with_metadata( + self, + response: question.Question, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[question.Question, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_question + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the QuestionService server but before it is returned to user code. + + We recommend only using this `post_get_question_with_metadata` + interceptor in new development instead of the `post_get_question` interceptor. + When both interceptors are used, this `post_get_question_with_metadata` interceptor runs after the + `post_get_question` interceptor. The (possibly modified) response returned by + `post_get_question` will be passed to + `post_get_question_with_metadata`. + """ + return response, metadata + def pre_get_user_feedback( self, request: question_service.GetUserFeedbackRequest, @@ -208,12 +277,35 @@ def post_get_user_feedback( ) -> user_feedback.UserFeedback: """Post-rpc interceptor for get_user_feedback - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_user_feedback_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the QuestionService server but before - it is returned to user code. + it is returned to user code. This `post_get_user_feedback` interceptor runs + before the `post_get_user_feedback_with_metadata` interceptor. """ return response + def post_get_user_feedback_with_metadata( + self, + response: user_feedback.UserFeedback, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[user_feedback.UserFeedback, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_user_feedback + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the QuestionService server but before it is returned to user code. + + We recommend only using this `post_get_user_feedback_with_metadata` + interceptor in new development instead of the `post_get_user_feedback` interceptor. + When both interceptors are used, this `post_get_user_feedback_with_metadata` interceptor runs after the + `post_get_user_feedback` interceptor. The (possibly modified) response returned by + `post_get_user_feedback` will be passed to + `post_get_user_feedback_with_metadata`. + """ + return response, metadata + def pre_update_user_feedback( self, request: question_service.UpdateUserFeedbackRequest, @@ -234,12 +326,35 @@ def post_update_user_feedback( ) -> gcd_user_feedback.UserFeedback: """Post-rpc interceptor for update_user_feedback - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_user_feedback_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the QuestionService server but before - it is returned to user code. + it is returned to user code. This `post_update_user_feedback` interceptor runs + before the `post_update_user_feedback_with_metadata` interceptor. """ return response + def post_update_user_feedback_with_metadata( + self, + response: gcd_user_feedback.UserFeedback, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_user_feedback.UserFeedback, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_user_feedback + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the QuestionService server but before it is returned to user code. + + We recommend only using this `post_update_user_feedback_with_metadata` + interceptor in new development instead of the `post_update_user_feedback` interceptor. + When both interceptors are used, this `post_update_user_feedback_with_metadata` interceptor runs after the + `post_update_user_feedback` interceptor. The (possibly modified) response returned by + `post_update_user_feedback` will be passed to + `post_update_user_feedback_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class QuestionServiceRestStub: @@ -476,6 +591,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_question(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_question_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -629,6 +748,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_execute_question(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_execute_question_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -775,6 +898,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_question(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_question_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -917,6 +1044,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_user_feedback(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_user_feedback_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1066,6 +1197,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_user_feedback(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_user_feedback_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json index 6dc4a7fa9d15..5649ddcc648a 100644 --- a/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json +++ b/packages/google-cloud-data-qna/samples/generated_samples/snippet_metadata_google.cloud.dataqna.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-data-qna", - "version": "0.10.14" + "version": "0.10.15" }, "snippets": [ { diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py index bf93e3acfdc3..5fef3616a9fe 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py @@ -59,6 +59,13 @@ ) from google.cloud.dataqna_v1alpha.types import auto_suggestion_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutoSuggestionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutoSuggestionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1734,10 +1784,14 @@ def test_suggest_queries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutoSuggestionServiceRestInterceptor, "post_suggest_queries" ) as post, mock.patch.object( + transports.AutoSuggestionServiceRestInterceptor, + "post_suggest_queries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutoSuggestionServiceRestInterceptor, "pre_suggest_queries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = auto_suggestion_service.SuggestQueriesRequest.pb( auto_suggestion_service.SuggestQueriesRequest() ) @@ -1763,6 +1817,10 @@ def test_suggest_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = auto_suggestion_service.SuggestQueriesResponse() + post_with_metadata.return_value = ( + auto_suggestion_service.SuggestQueriesResponse(), + metadata, + ) client.suggest_queries( request, @@ -1774,6 +1832,7 @@ def test_suggest_queries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py index cdebcd9a1522..0e3ef87dc156 100644 --- a/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py +++ b/packages/google-cloud-data-qna/tests/unit/gapic/dataqna_v1alpha/test_question_service.py @@ -68,6 +68,13 @@ from google.cloud.dataqna_v1alpha.types import question_service from google.cloud.dataqna_v1alpha.types import user_feedback +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -326,6 +333,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = QuestionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = QuestionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4245,10 +4295,13 @@ def test_get_question_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.QuestionServiceRestInterceptor, "post_get_question" ) as post, mock.patch.object( + transports.QuestionServiceRestInterceptor, "post_get_question_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.QuestionServiceRestInterceptor, "pre_get_question" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = question_service.GetQuestionRequest.pb( question_service.GetQuestionRequest() ) @@ -4272,6 +4325,7 @@ def test_get_question_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = question.Question() + post_with_metadata.return_value = question.Question(), metadata client.get_question( request, @@ -4283,6 +4337,7 @@ def test_get_question_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_question_rest_bad_request( @@ -4526,10 +4581,13 @@ def test_create_question_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.QuestionServiceRestInterceptor, "post_create_question" ) as post, mock.patch.object( + transports.QuestionServiceRestInterceptor, "post_create_question_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.QuestionServiceRestInterceptor, "pre_create_question" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = question_service.CreateQuestionRequest.pb( question_service.CreateQuestionRequest() ) @@ -4553,6 +4611,7 @@ def test_create_question_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_question.Question() + post_with_metadata.return_value = gcd_question.Question(), metadata client.create_question( request, @@ -4564,6 +4623,7 @@ def test_create_question_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_execute_question_rest_bad_request( @@ -4656,10 +4716,13 @@ def test_execute_question_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.QuestionServiceRestInterceptor, "post_execute_question" ) as post, mock.patch.object( + transports.QuestionServiceRestInterceptor, "post_execute_question_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.QuestionServiceRestInterceptor, "pre_execute_question" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = question_service.ExecuteQuestionRequest.pb( question_service.ExecuteQuestionRequest() ) @@ -4683,6 +4746,7 @@ def test_execute_question_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = question.Question() + post_with_metadata.return_value = question.Question(), metadata client.execute_question( request, @@ -4694,6 +4758,7 @@ def test_execute_question_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_user_feedback_rest_bad_request( @@ -4786,10 +4851,14 @@ def test_get_user_feedback_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.QuestionServiceRestInterceptor, "post_get_user_feedback" ) as post, mock.patch.object( + transports.QuestionServiceRestInterceptor, + "post_get_user_feedback_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.QuestionServiceRestInterceptor, "pre_get_user_feedback" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = question_service.GetUserFeedbackRequest.pb( question_service.GetUserFeedbackRequest() ) @@ -4813,6 +4882,7 @@ def test_get_user_feedback_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = user_feedback.UserFeedback() + post_with_metadata.return_value = user_feedback.UserFeedback(), metadata client.get_user_feedback( request, @@ -4824,6 +4894,7 @@ def test_get_user_feedback_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_user_feedback_rest_bad_request( @@ -4992,10 +5063,14 @@ def test_update_user_feedback_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.QuestionServiceRestInterceptor, "post_update_user_feedback" ) as post, mock.patch.object( + transports.QuestionServiceRestInterceptor, + "post_update_user_feedback_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.QuestionServiceRestInterceptor, "pre_update_user_feedback" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = question_service.UpdateUserFeedbackRequest.pb( question_service.UpdateUserFeedbackRequest() ) @@ -5021,6 +5096,7 @@ def test_update_user_feedback_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_user_feedback.UserFeedback() + post_with_metadata.return_value = gcd_user_feedback.UserFeedback(), metadata client.update_user_feedback( request, @@ -5032,6 +5108,7 @@ def test_update_user_feedback_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md index 6645042a432c..6839b9823f59 100644 --- a/packages/google-cloud-datacatalog-lineage/CHANGELOG.md +++ b/packages/google-cloud-datacatalog-lineage/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.3.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.3.11...google-cloud-datacatalog-lineage-v0.3.12) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.3.11](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-lineage-v0.3.10...google-cloud-datacatalog-lineage-v0.3.11) (2024-12-12) diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py index 075108786e34..ab68833be4be 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.12" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py index 075108786e34..ab68833be4be 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.11" # {x-release-please-version} +__version__ = "0.3.12" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py index e0c68d2dc0dc..af5ab30f0dfb 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -545,6 +547,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2770,16 +2799,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2825,16 +2858,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/transports/rest.py b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/transports/rest.py index f5d4b4f959cd..778d6c2c3d21 100644 --- a/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/transports/rest.py +++ b/packages/google-cloud-datacatalog-lineage/google/cloud/datacatalog_lineage_v1/services/lineage/transports/rest.py @@ -227,12 +227,38 @@ def post_batch_search_link_processes( ) -> lineage.BatchSearchLinkProcessesResponse: """Post-rpc interceptor for batch_search_link_processes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_search_link_processes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_batch_search_link_processes` interceptor runs + before the `post_batch_search_link_processes_with_metadata` interceptor. """ return response + def post_batch_search_link_processes_with_metadata( + self, + response: lineage.BatchSearchLinkProcessesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + lineage.BatchSearchLinkProcessesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_search_link_processes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_batch_search_link_processes_with_metadata` + interceptor in new development instead of the `post_batch_search_link_processes` interceptor. + When both interceptors are used, this `post_batch_search_link_processes_with_metadata` interceptor runs after the + `post_batch_search_link_processes` interceptor. The (possibly modified) response returned by + `post_batch_search_link_processes` will be passed to + `post_batch_search_link_processes_with_metadata`. + """ + return response, metadata + def pre_create_lineage_event( self, request: lineage.CreateLineageEventRequest, @@ -252,12 +278,35 @@ def post_create_lineage_event( ) -> lineage.LineageEvent: """Post-rpc interceptor for create_lineage_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_lineage_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_create_lineage_event` interceptor runs + before the `post_create_lineage_event_with_metadata` interceptor. """ return response + def post_create_lineage_event_with_metadata( + self, + response: lineage.LineageEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.LineageEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_lineage_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_create_lineage_event_with_metadata` + interceptor in new development instead of the `post_create_lineage_event` interceptor. + When both interceptors are used, this `post_create_lineage_event_with_metadata` interceptor runs after the + `post_create_lineage_event` interceptor. The (possibly modified) response returned by + `post_create_lineage_event` will be passed to + `post_create_lineage_event_with_metadata`. + """ + return response, metadata + def pre_create_process( self, request: lineage.CreateProcessRequest, @@ -273,12 +322,35 @@ def pre_create_process( def post_create_process(self, response: lineage.Process) -> lineage.Process: """Post-rpc interceptor for create_process - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_process_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_create_process` interceptor runs + before the `post_create_process_with_metadata` interceptor. """ return response + def post_create_process_with_metadata( + self, + response: lineage.Process, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.Process, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_process + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_create_process_with_metadata` + interceptor in new development instead of the `post_create_process` interceptor. + When both interceptors are used, this `post_create_process_with_metadata` interceptor runs after the + `post_create_process` interceptor. The (possibly modified) response returned by + `post_create_process` will be passed to + `post_create_process_with_metadata`. + """ + return response, metadata + def pre_create_run( self, request: lineage.CreateRunRequest, @@ -294,12 +366,33 @@ def pre_create_run( def post_create_run(self, response: lineage.Run) -> lineage.Run: """Post-rpc interceptor for create_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_create_run` interceptor runs + before the `post_create_run_with_metadata` interceptor. """ return response + def post_create_run_with_metadata( + self, response: lineage.Run, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[lineage.Run, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_create_run_with_metadata` + interceptor in new development instead of the `post_create_run` interceptor. + When both interceptors are used, this `post_create_run_with_metadata` interceptor runs after the + `post_create_run` interceptor. The (possibly modified) response returned by + `post_create_run` will be passed to + `post_create_run_with_metadata`. + """ + return response, metadata + def pre_delete_lineage_event( self, request: lineage.DeleteLineageEventRequest, @@ -331,12 +424,35 @@ def post_delete_process( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_process - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_process_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_delete_process` interceptor runs + before the `post_delete_process_with_metadata` interceptor. """ return response + def post_delete_process_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_process + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_delete_process_with_metadata` + interceptor in new development instead of the `post_delete_process` interceptor. + When both interceptors are used, this `post_delete_process_with_metadata` interceptor runs after the + `post_delete_process` interceptor. The (possibly modified) response returned by + `post_delete_process` will be passed to + `post_delete_process_with_metadata`. + """ + return response, metadata + def pre_delete_run( self, request: lineage.DeleteRunRequest, @@ -354,12 +470,35 @@ def post_delete_run( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_delete_run` interceptor runs + before the `post_delete_run_with_metadata` interceptor. """ return response + def post_delete_run_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_delete_run_with_metadata` + interceptor in new development instead of the `post_delete_run` interceptor. + When both interceptors are used, this `post_delete_run_with_metadata` interceptor runs after the + `post_delete_run` interceptor. The (possibly modified) response returned by + `post_delete_run` will be passed to + `post_delete_run_with_metadata`. + """ + return response, metadata + def pre_get_lineage_event( self, request: lineage.GetLineageEventRequest, @@ -377,12 +516,35 @@ def post_get_lineage_event( ) -> lineage.LineageEvent: """Post-rpc interceptor for get_lineage_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_lineage_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_get_lineage_event` interceptor runs + before the `post_get_lineage_event_with_metadata` interceptor. """ return response + def post_get_lineage_event_with_metadata( + self, + response: lineage.LineageEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.LineageEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_lineage_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_get_lineage_event_with_metadata` + interceptor in new development instead of the `post_get_lineage_event` interceptor. + When both interceptors are used, this `post_get_lineage_event_with_metadata` interceptor runs after the + `post_get_lineage_event` interceptor. The (possibly modified) response returned by + `post_get_lineage_event` will be passed to + `post_get_lineage_event_with_metadata`. + """ + return response, metadata + def pre_get_process( self, request: lineage.GetProcessRequest, @@ -398,12 +560,35 @@ def pre_get_process( def post_get_process(self, response: lineage.Process) -> lineage.Process: """Post-rpc interceptor for get_process - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_process_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_get_process` interceptor runs + before the `post_get_process_with_metadata` interceptor. """ return response + def post_get_process_with_metadata( + self, + response: lineage.Process, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.Process, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_process + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_get_process_with_metadata` + interceptor in new development instead of the `post_get_process` interceptor. + When both interceptors are used, this `post_get_process_with_metadata` interceptor runs after the + `post_get_process` interceptor. The (possibly modified) response returned by + `post_get_process` will be passed to + `post_get_process_with_metadata`. + """ + return response, metadata + def pre_get_run( self, request: lineage.GetRunRequest, @@ -419,12 +604,33 @@ def pre_get_run( def post_get_run(self, response: lineage.Run) -> lineage.Run: """Post-rpc interceptor for get_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_get_run` interceptor runs + before the `post_get_run_with_metadata` interceptor. """ return response + def post_get_run_with_metadata( + self, response: lineage.Run, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[lineage.Run, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_get_run_with_metadata` + interceptor in new development instead of the `post_get_run` interceptor. + When both interceptors are used, this `post_get_run_with_metadata` interceptor runs after the + `post_get_run` interceptor. The (possibly modified) response returned by + `post_get_run` will be passed to + `post_get_run_with_metadata`. + """ + return response, metadata + def pre_list_lineage_events( self, request: lineage.ListLineageEventsRequest, @@ -444,12 +650,37 @@ def post_list_lineage_events( ) -> lineage.ListLineageEventsResponse: """Post-rpc interceptor for list_lineage_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_lineage_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_list_lineage_events` interceptor runs + before the `post_list_lineage_events_with_metadata` interceptor. """ return response + def post_list_lineage_events_with_metadata( + self, + response: lineage.ListLineageEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + lineage.ListLineageEventsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_lineage_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_list_lineage_events_with_metadata` + interceptor in new development instead of the `post_list_lineage_events` interceptor. + When both interceptors are used, this `post_list_lineage_events_with_metadata` interceptor runs after the + `post_list_lineage_events` interceptor. The (possibly modified) response returned by + `post_list_lineage_events` will be passed to + `post_list_lineage_events_with_metadata`. + """ + return response, metadata + def pre_list_processes( self, request: lineage.ListProcessesRequest, @@ -467,12 +698,35 @@ def post_list_processes( ) -> lineage.ListProcessesResponse: """Post-rpc interceptor for list_processes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_list_processes` interceptor runs + before the `post_list_processes_with_metadata` interceptor. """ return response + def post_list_processes_with_metadata( + self, + response: lineage.ListProcessesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.ListProcessesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_processes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_list_processes_with_metadata` + interceptor in new development instead of the `post_list_processes` interceptor. + When both interceptors are used, this `post_list_processes_with_metadata` interceptor runs after the + `post_list_processes` interceptor. The (possibly modified) response returned by + `post_list_processes` will be passed to + `post_list_processes_with_metadata`. + """ + return response, metadata + def pre_list_runs( self, request: lineage.ListRunsRequest, @@ -490,12 +744,35 @@ def post_list_runs( ) -> lineage.ListRunsResponse: """Post-rpc interceptor for list_runs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_runs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_list_runs` interceptor runs + before the `post_list_runs_with_metadata` interceptor. """ return response + def post_list_runs_with_metadata( + self, + response: lineage.ListRunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.ListRunsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_runs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_list_runs_with_metadata` + interceptor in new development instead of the `post_list_runs` interceptor. + When both interceptors are used, this `post_list_runs_with_metadata` interceptor runs after the + `post_list_runs` interceptor. The (possibly modified) response returned by + `post_list_runs` will be passed to + `post_list_runs_with_metadata`. + """ + return response, metadata + def pre_process_open_lineage_run_event( self, request: lineage.ProcessOpenLineageRunEventRequest, @@ -516,12 +793,38 @@ def post_process_open_lineage_run_event( ) -> lineage.ProcessOpenLineageRunEventResponse: """Post-rpc interceptor for process_open_lineage_run_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_process_open_lineage_run_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_process_open_lineage_run_event` interceptor runs + before the `post_process_open_lineage_run_event_with_metadata` interceptor. """ return response + def post_process_open_lineage_run_event_with_metadata( + self, + response: lineage.ProcessOpenLineageRunEventResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + lineage.ProcessOpenLineageRunEventResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for process_open_lineage_run_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_process_open_lineage_run_event_with_metadata` + interceptor in new development instead of the `post_process_open_lineage_run_event` interceptor. + When both interceptors are used, this `post_process_open_lineage_run_event_with_metadata` interceptor runs after the + `post_process_open_lineage_run_event` interceptor. The (possibly modified) response returned by + `post_process_open_lineage_run_event` will be passed to + `post_process_open_lineage_run_event_with_metadata`. + """ + return response, metadata + def pre_search_links( self, request: lineage.SearchLinksRequest, @@ -539,12 +842,35 @@ def post_search_links( ) -> lineage.SearchLinksResponse: """Post-rpc interceptor for search_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_search_links` interceptor runs + before the `post_search_links_with_metadata` interceptor. """ return response + def post_search_links_with_metadata( + self, + response: lineage.SearchLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.SearchLinksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_search_links_with_metadata` + interceptor in new development instead of the `post_search_links` interceptor. + When both interceptors are used, this `post_search_links_with_metadata` interceptor runs after the + `post_search_links` interceptor. The (possibly modified) response returned by + `post_search_links` will be passed to + `post_search_links_with_metadata`. + """ + return response, metadata + def pre_update_process( self, request: lineage.UpdateProcessRequest, @@ -560,12 +886,35 @@ def pre_update_process( def post_update_process(self, response: lineage.Process) -> lineage.Process: """Post-rpc interceptor for update_process - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_process_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_update_process` interceptor runs + before the `post_update_process_with_metadata` interceptor. """ return response + def post_update_process_with_metadata( + self, + response: lineage.Process, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[lineage.Process, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_process + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_update_process_with_metadata` + interceptor in new development instead of the `post_update_process` interceptor. + When both interceptors are used, this `post_update_process_with_metadata` interceptor runs after the + `post_update_process` interceptor. The (possibly modified) response returned by + `post_update_process` will be passed to + `post_update_process_with_metadata`. + """ + return response, metadata + def pre_update_run( self, request: lineage.UpdateRunRequest, @@ -581,12 +930,33 @@ def pre_update_run( def post_update_run(self, response: lineage.Run) -> lineage.Run: """Post-rpc interceptor for update_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Lineage server but before - it is returned to user code. + it is returned to user code. This `post_update_run` interceptor runs + before the `post_update_run_with_metadata` interceptor. """ return response + def post_update_run_with_metadata( + self, response: lineage.Run, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[lineage.Run, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Lineage server but before it is returned to user code. + + We recommend only using this `post_update_run_with_metadata` + interceptor in new development instead of the `post_update_run` interceptor. + When both interceptors are used, this `post_update_run_with_metadata` interceptor runs after the + `post_update_run` interceptor. The (possibly modified) response returned by + `post_update_run` will be passed to + `post_update_run_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -957,6 +1327,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_search_link_processes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_search_link_processes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1113,6 +1487,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_lineage_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_lineage_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1264,6 +1642,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_process(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_process_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1414,6 +1796,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1667,6 +2053,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_process(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_process_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1809,6 +2199,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1961,6 +2355,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_lineage_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_lineage_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2104,6 +2502,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_process(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_process_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2246,6 +2648,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2393,6 +2799,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_lineage_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_lineage_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2540,6 +2950,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2683,6 +3097,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_runs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_runs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2835,6 +3253,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_process_open_lineage_run_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_process_open_lineage_run_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2988,6 +3413,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3139,6 +3568,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_process(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_process_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3289,6 +3722,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json index 46db67454e5c..ae2e13d674df 100644 --- a/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json +++ b/packages/google-cloud-datacatalog-lineage/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.lineage.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog-lineage", - "version": "0.3.11" + "version": "0.3.12" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py index 1c8eb6e569ed..5abb2d849d3b 100644 --- a/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py +++ b/packages/google-cloud-datacatalog-lineage/tests/unit/gapic/datacatalog_lineage_v1/test_lineage.py @@ -74,6 +74,13 @@ ) from google.cloud.datacatalog_lineage_v1.types import lineage +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -297,6 +304,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LineageClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LineageClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12057,10 +12107,14 @@ def test_process_open_lineage_run_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_process_open_lineage_run_event" ) as post, mock.patch.object( + transports.LineageRestInterceptor, + "post_process_open_lineage_run_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_process_open_lineage_run_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.ProcessOpenLineageRunEventRequest.pb( lineage.ProcessOpenLineageRunEventRequest() ) @@ -12086,6 +12140,10 @@ def test_process_open_lineage_run_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.ProcessOpenLineageRunEventResponse() + post_with_metadata.return_value = ( + lineage.ProcessOpenLineageRunEventResponse(), + metadata, + ) client.process_open_lineage_run_event( request, @@ -12097,6 +12155,7 @@ def test_process_open_lineage_run_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_process_rest_bad_request(request_type=lineage.CreateProcessRequest): @@ -12252,10 +12311,13 @@ def test_create_process_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_create_process" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_create_process_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_create_process" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.CreateProcessRequest.pb(lineage.CreateProcessRequest()) transcode.return_value = { "method": "post", @@ -12277,6 +12339,7 @@ def test_create_process_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.Process() + post_with_metadata.return_value = lineage.Process(), metadata client.create_process( request, @@ -12288,6 +12351,7 @@ def test_create_process_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_process_rest_bad_request(request_type=lineage.UpdateProcessRequest): @@ -12447,10 +12511,13 @@ def test_update_process_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_update_process" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_update_process_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_update_process" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.UpdateProcessRequest.pb(lineage.UpdateProcessRequest()) transcode.return_value = { "method": "post", @@ -12472,6 +12539,7 @@ def test_update_process_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.Process() + post_with_metadata.return_value = lineage.Process(), metadata client.update_process( request, @@ -12483,6 +12551,7 @@ def test_update_process_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_process_rest_bad_request(request_type=lineage.GetProcessRequest): @@ -12565,10 +12634,13 @@ def test_get_process_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_get_process" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_get_process_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_get_process" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.GetProcessRequest.pb(lineage.GetProcessRequest()) transcode.return_value = { "method": "post", @@ -12590,6 +12662,7 @@ def test_get_process_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.Process() + post_with_metadata.return_value = lineage.Process(), metadata client.get_process( request, @@ -12601,6 +12674,7 @@ def test_get_process_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processes_rest_bad_request(request_type=lineage.ListProcessesRequest): @@ -12681,10 +12755,13 @@ def test_list_processes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_list_processes" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_list_processes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_list_processes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.ListProcessesRequest.pb(lineage.ListProcessesRequest()) transcode.return_value = { "method": "post", @@ -12708,6 +12785,7 @@ def test_list_processes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.ListProcessesResponse() + post_with_metadata.return_value = lineage.ListProcessesResponse(), metadata client.list_processes( request, @@ -12719,6 +12797,7 @@ def test_list_processes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_process_rest_bad_request(request_type=lineage.DeleteProcessRequest): @@ -12795,10 +12874,13 @@ def test_delete_process_rest_interceptors(null_interceptor): ), mock.patch.object( transports.LineageRestInterceptor, "post_delete_process" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_delete_process_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_delete_process" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.DeleteProcessRequest.pb(lineage.DeleteProcessRequest()) transcode.return_value = { "method": "post", @@ -12820,6 +12902,7 @@ def test_delete_process_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_process( request, @@ -12831,6 +12914,7 @@ def test_delete_process_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_run_rest_bad_request(request_type=lineage.CreateRunRequest): @@ -12990,10 +13074,13 @@ def test_create_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_create_run" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_create_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_create_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.CreateRunRequest.pb(lineage.CreateRunRequest()) transcode.return_value = { "method": "post", @@ -13015,6 +13102,7 @@ def test_create_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.Run() + post_with_metadata.return_value = lineage.Run(), metadata client.create_run( request, @@ -13026,6 +13114,7 @@ def test_create_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_run_rest_bad_request(request_type=lineage.UpdateRunRequest): @@ -13193,10 +13282,13 @@ def test_update_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_update_run" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_update_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_update_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.UpdateRunRequest.pb(lineage.UpdateRunRequest()) transcode.return_value = { "method": "post", @@ -13218,6 +13310,7 @@ def test_update_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.Run() + post_with_metadata.return_value = lineage.Run(), metadata client.update_run( request, @@ -13229,6 +13322,7 @@ def test_update_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_run_rest_bad_request(request_type=lineage.GetRunRequest): @@ -13317,10 +13411,13 @@ def test_get_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_get_run" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_get_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_get_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.GetRunRequest.pb(lineage.GetRunRequest()) transcode.return_value = { "method": "post", @@ -13342,6 +13439,7 @@ def test_get_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.Run() + post_with_metadata.return_value = lineage.Run(), metadata client.get_run( request, @@ -13353,6 +13451,7 @@ def test_get_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_runs_rest_bad_request(request_type=lineage.ListRunsRequest): @@ -13433,10 +13532,13 @@ def test_list_runs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_list_runs" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_list_runs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_list_runs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.ListRunsRequest.pb(lineage.ListRunsRequest()) transcode.return_value = { "method": "post", @@ -13458,6 +13560,7 @@ def test_list_runs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.ListRunsResponse() + post_with_metadata.return_value = lineage.ListRunsResponse(), metadata client.list_runs( request, @@ -13469,6 +13572,7 @@ def test_list_runs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_run_rest_bad_request(request_type=lineage.DeleteRunRequest): @@ -13549,10 +13653,13 @@ def test_delete_run_rest_interceptors(null_interceptor): ), mock.patch.object( transports.LineageRestInterceptor, "post_delete_run" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_delete_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_delete_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.DeleteRunRequest.pb(lineage.DeleteRunRequest()) transcode.return_value = { "method": "post", @@ -13574,6 +13681,7 @@ def test_delete_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_run( request, @@ -13585,6 +13693,7 @@ def test_delete_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_lineage_event_rest_bad_request( @@ -13749,10 +13858,13 @@ def test_create_lineage_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_create_lineage_event" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_create_lineage_event_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_create_lineage_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.CreateLineageEventRequest.pb( lineage.CreateLineageEventRequest() ) @@ -13776,6 +13888,7 @@ def test_create_lineage_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.LineageEvent() + post_with_metadata.return_value = lineage.LineageEvent(), metadata client.create_lineage_event( request, @@ -13787,6 +13900,7 @@ def test_create_lineage_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_lineage_event_rest_bad_request( @@ -13873,10 +13987,13 @@ def test_get_lineage_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_get_lineage_event" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_get_lineage_event_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_get_lineage_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.GetLineageEventRequest.pb(lineage.GetLineageEventRequest()) transcode.return_value = { "method": "post", @@ -13898,6 +14015,7 @@ def test_get_lineage_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.LineageEvent() + post_with_metadata.return_value = lineage.LineageEvent(), metadata client.get_lineage_event( request, @@ -13909,6 +14027,7 @@ def test_get_lineage_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_lineage_events_rest_bad_request( @@ -13995,10 +14114,13 @@ def test_list_lineage_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_list_lineage_events" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_list_lineage_events_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_list_lineage_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.ListLineageEventsRequest.pb( lineage.ListLineageEventsRequest() ) @@ -14024,6 +14146,7 @@ def test_list_lineage_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.ListLineageEventsResponse() + post_with_metadata.return_value = lineage.ListLineageEventsResponse(), metadata client.list_lineage_events( request, @@ -14035,6 +14158,7 @@ def test_list_lineage_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_lineage_event_rest_bad_request( @@ -14226,10 +14350,13 @@ def test_search_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_search_links" ) as post, mock.patch.object( + transports.LineageRestInterceptor, "post_search_links_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_search_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.SearchLinksRequest.pb(lineage.SearchLinksRequest()) transcode.return_value = { "method": "post", @@ -14253,6 +14380,7 @@ def test_search_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.SearchLinksResponse() + post_with_metadata.return_value = lineage.SearchLinksResponse(), metadata client.search_links( request, @@ -14264,6 +14392,7 @@ def test_search_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_search_link_processes_rest_bad_request( @@ -14346,10 +14475,14 @@ def test_batch_search_link_processes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LineageRestInterceptor, "post_batch_search_link_processes" ) as post, mock.patch.object( + transports.LineageRestInterceptor, + "post_batch_search_link_processes_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LineageRestInterceptor, "pre_batch_search_link_processes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = lineage.BatchSearchLinkProcessesRequest.pb( lineage.BatchSearchLinkProcessesRequest() ) @@ -14375,6 +14508,10 @@ def test_batch_search_link_processes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = lineage.BatchSearchLinkProcessesResponse() + post_with_metadata.return_value = ( + lineage.BatchSearchLinkProcessesResponse(), + metadata, + ) client.batch_search_link_processes( request, @@ -14386,6 +14523,7 @@ def test_batch_search_link_processes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-datacatalog/CHANGELOG.md b/packages/google-cloud-datacatalog/CHANGELOG.md index b34e552cb24f..638e699d0263 100644 --- a/packages/google-cloud-datacatalog/CHANGELOG.md +++ b/packages/google-cloud-datacatalog/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## [3.25.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.24.1...google-cloud-datacatalog-v3.25.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + +## [3.24.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.24.0...google-cloud-datacatalog-v3.24.1) (2024-12-18) + + +### Documentation + +* [google-cloud-datacatalog] fix markdown reference in `TagTemplate.is_publicly_readable` comment ([#13369](https://github.com/googleapis/google-cloud-python/issues/13369)) ([d8afab0](https://github.com/googleapis/google-cloud-python/commit/d8afab0223e90ea0f13a8669cfd88ff06318d4ec)) + ## [3.24.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datacatalog-v3.23.0...google-cloud-datacatalog-v3.24.0) (2024-12-12) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index 22dde061b013..8adcea73e25d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index 22dde061b013..8adcea73e25d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index 1bfb7d97e535..15c733b9abe9 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -624,6 +626,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5402,16 +5431,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5457,16 +5490,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py index 6bbf36a3252a..8540905771a1 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2235,16 +2264,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2290,16 +2323,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py index 78170d5e8e1a..4819928882ba 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/policy_tag_manager_serialization/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -500,6 +502,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1076,16 +1105,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1131,16 +1164,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py index 8d35723897b6..a87724c8daba 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py @@ -257,7 +257,8 @@ class TagTemplate(proto.Message): is_publicly_readable (bool): Indicates whether tags created with this template are public. Public tags do not require tag template access to - appear in [ListTags][google.cloud.datacatalog.v1.ListTags] + appear in + [ListTags][google.cloud.datacatalog.v1.DataCatalog.ListTags] API response. Additionally, you can search for a public tag by value with diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index 22dde061b013..8adcea73e25d 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.24.0" # {x-release-please-version} +__version__ = "3.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py index 5b63130b88e1..2148c7ddd103 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -621,6 +623,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py index cdaa782fb712..e1c575e0da41 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -512,6 +514,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py index 7ff41a67e144..f8df202d3784 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -497,6 +499,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index 12b2442f2c3e..fcb5ceeba367 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.24.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index e4776100cd80..41a5bcc96bc5 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.24.0" + "version": "3.25.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 2fa4a93c0d42..fc309ea4b5e2 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -85,6 +86,13 @@ usage, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -324,6 +332,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataCatalogClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataCatalogClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py index 972254a07769..f9e11108ee78 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -62,6 +63,13 @@ ) from google.cloud.datacatalog_v1.types import common, policytagmanager, timestamps +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +329,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PolicyTagManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PolicyTagManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py index c3975dc8fb0f..657c63aff768 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_policy_tag_manager_serialization.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -62,6 +63,13 @@ timestamps, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -349,6 +357,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PolicyTagManagerSerializationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PolicyTagManagerSerializationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py index bfab0c177432..4db3d0d4919c 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -72,6 +73,13 @@ usage, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +319,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataCatalogClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataCatalogClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py index 810cb501ae32..e4e169219449 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -62,6 +63,13 @@ ) from google.cloud.datacatalog_v1beta1.types import common, policytagmanager, timestamps +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +329,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PolicyTagManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PolicyTagManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py index ec57ea454df9..0e23f26221f7 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -61,6 +62,13 @@ policytagmanagerserialization, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +356,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PolicyTagManagerSerializationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PolicyTagManagerSerializationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataflow-client/CHANGELOG.md b/packages/google-cloud-dataflow-client/CHANGELOG.md index f3b611de09a1..e5869f103d96 100644 --- a/packages/google-cloud-dataflow-client/CHANGELOG.md +++ b/packages/google-cloud-dataflow-client/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.8.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataflow-client-v0.8.15...google-cloud-dataflow-client-v0.8.16) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.8.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataflow-client-v0.8.14...google-cloud-dataflow-client-v0.8.15) (2024-12-12) diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py index 3fe6a5d4ecad..b862ceb75417 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.15" # {x-release-please-version} +__version__ = "0.8.16" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py index 3fe6a5d4ecad..b862ceb75417 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.8.15" # {x-release-please-version} +__version__ = "0.8.16" # {x-release-please-version} diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py index 7fd3ec7a3bf3..1bdd8bac54da 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py index 243b73ea7328..d168d1fcb8a4 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py @@ -101,12 +101,37 @@ def post_launch_flex_template( ) -> templates.LaunchFlexTemplateResponse: """Post-rpc interceptor for launch_flex_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_launch_flex_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FlexTemplatesService server but before - it is returned to user code. + it is returned to user code. This `post_launch_flex_template` interceptor runs + before the `post_launch_flex_template_with_metadata` interceptor. """ return response + def post_launch_flex_template_with_metadata( + self, + response: templates.LaunchFlexTemplateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + templates.LaunchFlexTemplateResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for launch_flex_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FlexTemplatesService server but before it is returned to user code. + + We recommend only using this `post_launch_flex_template_with_metadata` + interceptor in new development instead of the `post_launch_flex_template` interceptor. + When both interceptors are used, this `post_launch_flex_template_with_metadata` interceptor runs after the + `post_launch_flex_template` interceptor. The (possibly modified) response returned by + `post_launch_flex_template` will be passed to + `post_launch_flex_template_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class FlexTemplatesServiceRestStub: @@ -326,6 +351,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_launch_flex_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_launch_flex_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py index fabfd436124f..b667f4580925 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py index 2a352e8f0f87..ddd3556dc54b 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py @@ -147,12 +147,35 @@ def post_aggregated_list_jobs( ) -> jobs.ListJobsResponse: """Post-rpc interceptor for aggregated_list_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregated_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_aggregated_list_jobs` interceptor runs + before the `post_aggregated_list_jobs_with_metadata` interceptor. """ return response + def post_aggregated_list_jobs_with_metadata( + self, + response: jobs.ListJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[jobs.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for aggregated_list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_jobs_with_metadata` + interceptor in new development instead of the `post_aggregated_list_jobs` interceptor. + When both interceptors are used, this `post_aggregated_list_jobs_with_metadata` interceptor runs after the + `post_aggregated_list_jobs` interceptor. The (possibly modified) response returned by + `post_aggregated_list_jobs` will be passed to + `post_aggregated_list_jobs_with_metadata`. + """ + return response, metadata + def pre_create_job( self, request: jobs.CreateJobRequest, @@ -168,12 +191,33 @@ def pre_create_job( def post_create_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for create_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_create_job` interceptor runs + before the `post_create_job_with_metadata` interceptor. """ return response + def post_create_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_create_job_with_metadata` + interceptor in new development instead of the `post_create_job` interceptor. + When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the + `post_create_job` interceptor. The (possibly modified) response returned by + `post_create_job` will be passed to + `post_create_job_with_metadata`. + """ + return response, metadata + def pre_get_job( self, request: jobs.GetJobRequest, @@ -189,12 +233,33 @@ def pre_get_job( def post_get_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for get_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_get_job` interceptor runs + before the `post_get_job_with_metadata` interceptor. """ return response + def post_get_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_get_job_with_metadata` + interceptor in new development instead of the `post_get_job` interceptor. + When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the + `post_get_job` interceptor. The (possibly modified) response returned by + `post_get_job` will be passed to + `post_get_job_with_metadata`. + """ + return response, metadata + def pre_list_jobs( self, request: jobs.ListJobsRequest, @@ -210,12 +275,35 @@ def pre_list_jobs( def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: """Post-rpc interceptor for list_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_list_jobs` interceptor runs + before the `post_list_jobs_with_metadata` interceptor. """ return response + def post_list_jobs_with_metadata( + self, + response: jobs.ListJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[jobs.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_list_jobs_with_metadata` + interceptor in new development instead of the `post_list_jobs` interceptor. + When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the + `post_list_jobs` interceptor. The (possibly modified) response returned by + `post_list_jobs` will be passed to + `post_list_jobs_with_metadata`. + """ + return response, metadata + def pre_snapshot_job( self, request: jobs.SnapshotJobRequest, @@ -231,12 +319,35 @@ def pre_snapshot_job( def post_snapshot_job(self, response: snapshots.Snapshot) -> snapshots.Snapshot: """Post-rpc interceptor for snapshot_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_snapshot_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_snapshot_job` interceptor runs + before the `post_snapshot_job_with_metadata` interceptor. """ return response + def post_snapshot_job_with_metadata( + self, + response: snapshots.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[snapshots.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for snapshot_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_snapshot_job_with_metadata` + interceptor in new development instead of the `post_snapshot_job` interceptor. + When both interceptors are used, this `post_snapshot_job_with_metadata` interceptor runs after the + `post_snapshot_job` interceptor. The (possibly modified) response returned by + `post_snapshot_job` will be passed to + `post_snapshot_job_with_metadata`. + """ + return response, metadata + def pre_update_job( self, request: jobs.UpdateJobRequest, @@ -252,12 +363,33 @@ def pre_update_job( def post_update_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for update_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_update_job` interceptor runs + before the `post_update_job_with_metadata` interceptor. """ return response + def post_update_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_update_job_with_metadata` + interceptor in new development instead of the `post_update_job` interceptor. + When both interceptors are used, this `post_update_job_with_metadata` interceptor runs after the + `post_update_job` interceptor. The (possibly modified) response returned by + `post_update_job` will be passed to + `post_update_job_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class JobsV1Beta3RestStub: @@ -474,6 +606,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregated_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -643,6 +779,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -786,6 +926,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -936,6 +1080,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1089,6 +1237,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_snapshot_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_snapshot_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1240,6 +1392,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py index a539108831ae..af85219277d7 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -464,6 +466,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py index e34a59dadc9e..ada8d871681a 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py @@ -101,12 +101,37 @@ def post_list_job_messages( ) -> messages.ListJobMessagesResponse: """Post-rpc interceptor for list_job_messages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_job_messages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MessagesV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_list_job_messages` interceptor runs + before the `post_list_job_messages_with_metadata` interceptor. """ return response + def post_list_job_messages_with_metadata( + self, + response: messages.ListJobMessagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + messages.ListJobMessagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_job_messages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MessagesV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_list_job_messages_with_metadata` + interceptor in new development instead of the `post_list_job_messages` interceptor. + When both interceptors are used, this `post_list_job_messages_with_metadata` interceptor runs after the + `post_list_job_messages` interceptor. The (possibly modified) response returned by + `post_list_job_messages` will be passed to + `post_list_job_messages_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MessagesV1Beta3RestStub: @@ -320,6 +345,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_job_messages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_job_messages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py index 3974d1e608b3..067dfcf1c399 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py index 1166941a61d3..1a3a46e5b4fe 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py @@ -117,12 +117,35 @@ def post_get_job_execution_details( ) -> metrics.JobExecutionDetails: """Post-rpc interceptor for get_job_execution_details - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_execution_details_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetricsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_get_job_execution_details` interceptor runs + before the `post_get_job_execution_details_with_metadata` interceptor. """ return response + def post_get_job_execution_details_with_metadata( + self, + response: metrics.JobExecutionDetails, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metrics.JobExecutionDetails, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job_execution_details + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetricsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_get_job_execution_details_with_metadata` + interceptor in new development instead of the `post_get_job_execution_details` interceptor. + When both interceptors are used, this `post_get_job_execution_details_with_metadata` interceptor runs after the + `post_get_job_execution_details` interceptor. The (possibly modified) response returned by + `post_get_job_execution_details` will be passed to + `post_get_job_execution_details_with_metadata`. + """ + return response, metadata + def pre_get_job_metrics( self, request: metrics.GetJobMetricsRequest, @@ -138,12 +161,35 @@ def pre_get_job_metrics( def post_get_job_metrics(self, response: metrics.JobMetrics) -> metrics.JobMetrics: """Post-rpc interceptor for get_job_metrics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_metrics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetricsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_get_job_metrics` interceptor runs + before the `post_get_job_metrics_with_metadata` interceptor. """ return response + def post_get_job_metrics_with_metadata( + self, + response: metrics.JobMetrics, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metrics.JobMetrics, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job_metrics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetricsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_get_job_metrics_with_metadata` + interceptor in new development instead of the `post_get_job_metrics` interceptor. + When both interceptors are used, this `post_get_job_metrics_with_metadata` interceptor runs after the + `post_get_job_metrics` interceptor. The (possibly modified) response returned by + `post_get_job_metrics` will be passed to + `post_get_job_metrics_with_metadata`. + """ + return response, metadata + def pre_get_stage_execution_details( self, request: metrics.GetStageExecutionDetailsRequest, @@ -163,12 +209,35 @@ def post_get_stage_execution_details( ) -> metrics.StageExecutionDetails: """Post-rpc interceptor for get_stage_execution_details - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_stage_execution_details_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MetricsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_get_stage_execution_details` interceptor runs + before the `post_get_stage_execution_details_with_metadata` interceptor. """ return response + def post_get_stage_execution_details_with_metadata( + self, + response: metrics.StageExecutionDetails, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metrics.StageExecutionDetails, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_stage_execution_details + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MetricsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_get_stage_execution_details_with_metadata` + interceptor in new development instead of the `post_get_stage_execution_details` interceptor. + When both interceptors are used, this `post_get_stage_execution_details_with_metadata` interceptor runs after the + `post_get_stage_execution_details` interceptor. The (possibly modified) response returned by + `post_get_stage_execution_details` will be passed to + `post_get_stage_execution_details_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class MetricsV1Beta3RestStub: @@ -381,6 +450,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job_execution_details(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_execution_details_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -532,6 +605,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job_metrics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_metrics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -682,6 +759,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_stage_execution_details(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_stage_execution_details_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py index 74540655751b..42b87cb8b645 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py index 625d64d36519..f5ad6c3d2379 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py @@ -117,12 +117,37 @@ def post_delete_snapshot( ) -> snapshots.DeleteSnapshotResponse: """Post-rpc interceptor for delete_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SnapshotsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_delete_snapshot` interceptor runs + before the `post_delete_snapshot_with_metadata` interceptor. """ return response + def post_delete_snapshot_with_metadata( + self, + response: snapshots.DeleteSnapshotResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + snapshots.DeleteSnapshotResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for delete_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SnapshotsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_delete_snapshot_with_metadata` + interceptor in new development instead of the `post_delete_snapshot` interceptor. + When both interceptors are used, this `post_delete_snapshot_with_metadata` interceptor runs after the + `post_delete_snapshot` interceptor. The (possibly modified) response returned by + `post_delete_snapshot` will be passed to + `post_delete_snapshot_with_metadata`. + """ + return response, metadata + def pre_get_snapshot( self, request: snapshots.GetSnapshotRequest, @@ -138,12 +163,35 @@ def pre_get_snapshot( def post_get_snapshot(self, response: snapshots.Snapshot) -> snapshots.Snapshot: """Post-rpc interceptor for get_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SnapshotsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_get_snapshot` interceptor runs + before the `post_get_snapshot_with_metadata` interceptor. """ return response + def post_get_snapshot_with_metadata( + self, + response: snapshots.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[snapshots.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SnapshotsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_get_snapshot_with_metadata` + interceptor in new development instead of the `post_get_snapshot` interceptor. + When both interceptors are used, this `post_get_snapshot_with_metadata` interceptor runs after the + `post_get_snapshot` interceptor. The (possibly modified) response returned by + `post_get_snapshot` will be passed to + `post_get_snapshot_with_metadata`. + """ + return response, metadata + def pre_list_snapshots( self, request: snapshots.ListSnapshotsRequest, @@ -161,12 +209,37 @@ def post_list_snapshots( ) -> snapshots.ListSnapshotsResponse: """Post-rpc interceptor for list_snapshots - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SnapshotsV1Beta3 server but before - it is returned to user code. + it is returned to user code. This `post_list_snapshots` interceptor runs + before the `post_list_snapshots_with_metadata` interceptor. """ return response + def post_list_snapshots_with_metadata( + self, + response: snapshots.ListSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + snapshots.ListSnapshotsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SnapshotsV1Beta3 server but before it is returned to user code. + + We recommend only using this `post_list_snapshots_with_metadata` + interceptor in new development instead of the `post_list_snapshots` interceptor. + When both interceptors are used, this `post_list_snapshots_with_metadata` interceptor runs after the + `post_list_snapshots` interceptor. The (possibly modified) response returned by + `post_list_snapshots` will be passed to + `post_list_snapshots_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SnapshotsV1Beta3RestStub: @@ -372,6 +445,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -515,6 +592,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -655,6 +736,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_snapshots_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py index 4de564ffc698..834ed7a21579 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py index 7211a753a289..cc898e03aaca 100644 --- a/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py +++ b/packages/google-cloud-dataflow-client/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py @@ -115,12 +115,33 @@ def pre_create_job_from_template( def post_create_job_from_template(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for create_job_from_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_job_from_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TemplatesService server but before - it is returned to user code. + it is returned to user code. This `post_create_job_from_template` interceptor runs + before the `post_create_job_from_template_with_metadata` interceptor. """ return response + def post_create_job_from_template_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job_from_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TemplatesService server but before it is returned to user code. + + We recommend only using this `post_create_job_from_template_with_metadata` + interceptor in new development instead of the `post_create_job_from_template` interceptor. + When both interceptors are used, this `post_create_job_from_template_with_metadata` interceptor runs after the + `post_create_job_from_template` interceptor. The (possibly modified) response returned by + `post_create_job_from_template` will be passed to + `post_create_job_from_template_with_metadata`. + """ + return response, metadata + def pre_get_template( self, request: templates.GetTemplateRequest, @@ -138,12 +159,35 @@ def post_get_template( ) -> templates.GetTemplateResponse: """Post-rpc interceptor for get_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TemplatesService server but before - it is returned to user code. + it is returned to user code. This `post_get_template` interceptor runs + before the `post_get_template_with_metadata` interceptor. """ return response + def post_get_template_with_metadata( + self, + response: templates.GetTemplateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[templates.GetTemplateResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TemplatesService server but before it is returned to user code. + + We recommend only using this `post_get_template_with_metadata` + interceptor in new development instead of the `post_get_template` interceptor. + When both interceptors are used, this `post_get_template_with_metadata` interceptor runs after the + `post_get_template` interceptor. The (possibly modified) response returned by + `post_get_template` will be passed to + `post_get_template_with_metadata`. + """ + return response, metadata + def pre_launch_template( self, request: templates.LaunchTemplateRequest, @@ -163,12 +207,37 @@ def post_launch_template( ) -> templates.LaunchTemplateResponse: """Post-rpc interceptor for launch_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_launch_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TemplatesService server but before - it is returned to user code. + it is returned to user code. This `post_launch_template` interceptor runs + before the `post_launch_template_with_metadata` interceptor. """ return response + def post_launch_template_with_metadata( + self, + response: templates.LaunchTemplateResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + templates.LaunchTemplateResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for launch_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TemplatesService server but before it is returned to user code. + + We recommend only using this `post_launch_template_with_metadata` + interceptor in new development instead of the `post_launch_template` interceptor. + When both interceptors are used, this `post_launch_template_with_metadata` interceptor runs after the + `post_launch_template` interceptor. The (possibly modified) response returned by + `post_launch_template` will be passed to + `post_launch_template_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class TemplatesServiceRestStub: @@ -388,6 +457,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_job_from_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_from_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -531,6 +604,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -679,6 +756,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_launch_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_launch_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json index 2b8596034c81..ca189d9017e8 100644 --- a/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json +++ b/packages/google-cloud-dataflow-client/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataflow-client", - "version": "0.8.15" + "version": "0.8.16" }, "snippets": [ { diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py index 1837f09caa75..cad3272f73df 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -59,6 +59,13 @@ ) from google.cloud.dataflow_v1beta3.types import environment, jobs, templates +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -332,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FlexTemplatesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FlexTemplatesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1673,10 +1723,14 @@ def test_launch_flex_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlexTemplatesServiceRestInterceptor, "post_launch_flex_template" ) as post, mock.patch.object( + transports.FlexTemplatesServiceRestInterceptor, + "post_launch_flex_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FlexTemplatesServiceRestInterceptor, "pre_launch_flex_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = templates.LaunchFlexTemplateRequest.pb( templates.LaunchFlexTemplateRequest() ) @@ -1702,6 +1756,10 @@ def test_launch_flex_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = templates.LaunchFlexTemplateResponse() + post_with_metadata.return_value = ( + templates.LaunchFlexTemplateResponse(), + metadata, + ) client.launch_flex_template( request, @@ -1713,6 +1771,7 @@ def test_launch_flex_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py index 5293d5aca195..538753212f35 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -64,6 +64,13 @@ ) from google.cloud.dataflow_v1beta3.types import environment, jobs, snapshots +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -303,6 +310,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = JobsV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = JobsV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4565,10 +4615,13 @@ def test_create_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "post_create_job" ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_create_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "pre_create_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.CreateJobRequest.pb(jobs.CreateJobRequest()) transcode.return_value = { "method": "post", @@ -4590,6 +4643,7 @@ def test_create_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.create_job( request, @@ -4601,6 +4655,7 @@ def test_create_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_rest_bad_request(request_type=jobs.GetJobRequest): @@ -4709,10 +4764,13 @@ def test_get_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "post_get_job" ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_get_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "pre_get_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) transcode.return_value = { "method": "post", @@ -4734,6 +4792,7 @@ def test_get_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.get_job( request, @@ -4745,6 +4804,7 @@ def test_get_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_job_rest_bad_request(request_type=jobs.UpdateJobRequest): @@ -5144,10 +5204,13 @@ def test_update_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "post_update_job" ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_update_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "pre_update_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) transcode.return_value = { "method": "post", @@ -5169,6 +5232,7 @@ def test_update_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.update_job( request, @@ -5180,6 +5244,7 @@ def test_update_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_jobs_rest_bad_request(request_type=jobs.ListJobsRequest): @@ -5262,10 +5327,13 @@ def test_list_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "post_list_jobs" ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_list_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "pre_list_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) transcode.return_value = { "method": "post", @@ -5287,6 +5355,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.ListJobsResponse() + post_with_metadata.return_value = jobs.ListJobsResponse(), metadata client.list_jobs( request, @@ -5298,6 +5367,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregated_list_jobs_rest_bad_request(request_type=jobs.ListJobsRequest): @@ -5380,10 +5450,13 @@ def test_aggregated_list_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "post_aggregated_list_jobs" ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_aggregated_list_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "pre_aggregated_list_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) transcode.return_value = { "method": "post", @@ -5405,6 +5478,7 @@ def test_aggregated_list_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.ListJobsResponse() + post_with_metadata.return_value = jobs.ListJobsResponse(), metadata client.aggregated_list_jobs( request, @@ -5416,6 +5490,7 @@ def test_aggregated_list_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_check_active_jobs_rest_error(): @@ -5522,10 +5597,13 @@ def test_snapshot_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "post_snapshot_job" ) as post, mock.patch.object( + transports.JobsV1Beta3RestInterceptor, "post_snapshot_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobsV1Beta3RestInterceptor, "pre_snapshot_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.SnapshotJobRequest.pb(jobs.SnapshotJobRequest()) transcode.return_value = { "method": "post", @@ -5547,6 +5625,7 @@ def test_snapshot_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = snapshots.Snapshot() + post_with_metadata.return_value = snapshots.Snapshot(), metadata client.snapshot_job( request, @@ -5558,6 +5637,7 @@ def test_snapshot_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py index afb9ec120b0b..7748d6fefd3a 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -61,6 +61,13 @@ ) from google.cloud.dataflow_v1beta3.types import messages +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MessagesV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MessagesV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1925,10 +1975,14 @@ def test_list_job_messages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MessagesV1Beta3RestInterceptor, "post_list_job_messages" ) as post, mock.patch.object( + transports.MessagesV1Beta3RestInterceptor, + "post_list_job_messages_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MessagesV1Beta3RestInterceptor, "pre_list_job_messages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = messages.ListJobMessagesRequest.pb( messages.ListJobMessagesRequest() ) @@ -1954,6 +2008,7 @@ def test_list_job_messages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = messages.ListJobMessagesResponse() + post_with_metadata.return_value = messages.ListJobMessagesResponse(), metadata client.list_job_messages( request, @@ -1965,6 +2020,7 @@ def test_list_job_messages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py index 72d841d63e93..e1a3e25c2ff2 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -61,6 +61,13 @@ ) from google.cloud.dataflow_v1beta3.types import metrics +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetricsV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetricsV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2868,10 +2918,13 @@ def test_get_job_metrics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetricsV1Beta3RestInterceptor, "post_get_job_metrics" ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, "post_get_job_metrics_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MetricsV1Beta3RestInterceptor, "pre_get_job_metrics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metrics.GetJobMetricsRequest.pb(metrics.GetJobMetricsRequest()) transcode.return_value = { "method": "post", @@ -2893,6 +2946,7 @@ def test_get_job_metrics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metrics.JobMetrics() + post_with_metadata.return_value = metrics.JobMetrics(), metadata client.get_job_metrics( request, @@ -2904,6 +2958,7 @@ def test_get_job_metrics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_execution_details_rest_bad_request( @@ -2988,10 +3043,14 @@ def test_get_job_execution_details_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetricsV1Beta3RestInterceptor, "post_get_job_execution_details" ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, + "post_get_job_execution_details_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MetricsV1Beta3RestInterceptor, "pre_get_job_execution_details" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metrics.GetJobExecutionDetailsRequest.pb( metrics.GetJobExecutionDetailsRequest() ) @@ -3017,6 +3076,7 @@ def test_get_job_execution_details_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metrics.JobExecutionDetails() + post_with_metadata.return_value = metrics.JobExecutionDetails(), metadata client.get_job_execution_details( request, @@ -3028,6 +3088,7 @@ def test_get_job_execution_details_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_stage_execution_details_rest_bad_request( @@ -3122,10 +3183,14 @@ def test_get_stage_execution_details_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MetricsV1Beta3RestInterceptor, "post_get_stage_execution_details" ) as post, mock.patch.object( + transports.MetricsV1Beta3RestInterceptor, + "post_get_stage_execution_details_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MetricsV1Beta3RestInterceptor, "pre_get_stage_execution_details" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metrics.GetStageExecutionDetailsRequest.pb( metrics.GetStageExecutionDetailsRequest() ) @@ -3151,6 +3216,7 @@ def test_get_stage_execution_details_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metrics.StageExecutionDetails() + post_with_metadata.return_value = metrics.StageExecutionDetails(), metadata client.get_stage_execution_details( request, @@ -3162,6 +3228,7 @@ def test_get_stage_execution_details_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py index 3edcd11f8875..56bfc4600509 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -61,6 +61,13 @@ ) from google.cloud.dataflow_v1beta3.types import snapshots +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SnapshotsV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SnapshotsV1Beta3Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2334,10 +2384,13 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsV1Beta3RestInterceptor, "post_get_snapshot" ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_get_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsV1Beta3RestInterceptor, "pre_get_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = snapshots.GetSnapshotRequest.pb(snapshots.GetSnapshotRequest()) transcode.return_value = { "method": "post", @@ -2359,6 +2412,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = snapshots.Snapshot() + post_with_metadata.return_value = snapshots.Snapshot(), metadata client.get_snapshot( request, @@ -2370,6 +2424,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_snapshot_rest_bad_request(request_type=snapshots.DeleteSnapshotRequest): @@ -2457,10 +2512,13 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsV1Beta3RestInterceptor, "post_delete_snapshot" ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_delete_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsV1Beta3RestInterceptor, "pre_delete_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = snapshots.DeleteSnapshotRequest.pb( snapshots.DeleteSnapshotRequest() ) @@ -2486,6 +2544,7 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = snapshots.DeleteSnapshotResponse() + post_with_metadata.return_value = snapshots.DeleteSnapshotResponse(), metadata client.delete_snapshot( request, @@ -2497,6 +2556,7 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_snapshots_rest_bad_request(request_type=snapshots.ListSnapshotsRequest): @@ -2576,10 +2636,13 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SnapshotsV1Beta3RestInterceptor, "post_list_snapshots" ) as post, mock.patch.object( + transports.SnapshotsV1Beta3RestInterceptor, "post_list_snapshots_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SnapshotsV1Beta3RestInterceptor, "pre_list_snapshots" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = snapshots.ListSnapshotsRequest.pb(snapshots.ListSnapshotsRequest()) transcode.return_value = { "method": "post", @@ -2603,6 +2666,7 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = snapshots.ListSnapshotsResponse() + post_with_metadata.return_value = snapshots.ListSnapshotsResponse(), metadata client.list_snapshots( request, @@ -2614,6 +2678,7 @@ def test_list_snapshots_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py index 906f76fb96db..42d075c5e628 100644 --- a/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ b/packages/google-cloud-dataflow-client/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -61,6 +61,13 @@ ) from google.cloud.dataflow_v1beta3.types import environment, jobs, templates +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TemplatesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TemplatesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2405,10 +2455,14 @@ def test_create_job_from_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TemplatesServiceRestInterceptor, "post_create_job_from_template" ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, + "post_create_job_from_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TemplatesServiceRestInterceptor, "pre_create_job_from_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = templates.CreateJobFromTemplateRequest.pb( templates.CreateJobFromTemplateRequest() ) @@ -2432,6 +2486,7 @@ def test_create_job_from_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.create_job_from_template( request, @@ -2443,6 +2498,7 @@ def test_create_job_from_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_launch_template_rest_bad_request(request_type=templates.LaunchTemplateRequest): @@ -2616,10 +2672,13 @@ def test_launch_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TemplatesServiceRestInterceptor, "post_launch_template" ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_launch_template_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TemplatesServiceRestInterceptor, "pre_launch_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = templates.LaunchTemplateRequest.pb( templates.LaunchTemplateRequest() ) @@ -2645,6 +2704,7 @@ def test_launch_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = templates.LaunchTemplateResponse() + post_with_metadata.return_value = templates.LaunchTemplateResponse(), metadata client.launch_template( request, @@ -2656,6 +2716,7 @@ def test_launch_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_template_rest_bad_request(request_type=templates.GetTemplateRequest): @@ -2738,10 +2799,13 @@ def test_get_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TemplatesServiceRestInterceptor, "post_get_template" ) as post, mock.patch.object( + transports.TemplatesServiceRestInterceptor, "post_get_template_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TemplatesServiceRestInterceptor, "pre_get_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = templates.GetTemplateRequest.pb(templates.GetTemplateRequest()) transcode.return_value = { "method": "post", @@ -2765,6 +2829,7 @@ def test_get_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = templates.GetTemplateResponse() + post_with_metadata.return_value = templates.GetTemplateResponse(), metadata client.get_template( request, @@ -2776,6 +2841,7 @@ def test_get_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dataform/CHANGELOG.md b/packages/google-cloud-dataform/CHANGELOG.md index faa803625ec2..f998faf7db56 100644 --- a/packages/google-cloud-dataform/CHANGELOG.md +++ b/packages/google-cloud-dataform/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataform-v0.5.14...google-cloud-dataform-v0.5.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.5.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataform-v0.5.13...google-cloud-dataform-v0.5.14) (2024-12-12) diff --git a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py index 10cb6cbb3c2c..24509e5a8f43 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -634,6 +636,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5951,16 +5980,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -6073,16 +6106,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -6133,16 +6170,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -6188,16 +6229,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -6243,16 +6288,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/transports/rest.py b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/transports/rest.py index 5c585373e904..2662fe69d156 100644 --- a/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/transports/rest.py +++ b/packages/google-cloud-dataform/google/cloud/dataform_v1beta1/services/dataform/transports/rest.py @@ -497,12 +497,38 @@ def post_compute_repository_access_token_status( ) -> dataform.ComputeRepositoryAccessTokenStatusResponse: """Post-rpc interceptor for compute_repository_access_token_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_compute_repository_access_token_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_compute_repository_access_token_status` interceptor runs + before the `post_compute_repository_access_token_status_with_metadata` interceptor. """ return response + def post_compute_repository_access_token_status_with_metadata( + self, + response: dataform.ComputeRepositoryAccessTokenStatusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ComputeRepositoryAccessTokenStatusResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for compute_repository_access_token_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_compute_repository_access_token_status_with_metadata` + interceptor in new development instead of the `post_compute_repository_access_token_status` interceptor. + When both interceptors are used, this `post_compute_repository_access_token_status_with_metadata` interceptor runs after the + `post_compute_repository_access_token_status` interceptor. The (possibly modified) response returned by + `post_compute_repository_access_token_status` will be passed to + `post_compute_repository_access_token_status_with_metadata`. + """ + return response, metadata + def pre_create_compilation_result( self, request: dataform.CreateCompilationResultRequest, @@ -522,12 +548,35 @@ def post_create_compilation_result( ) -> dataform.CompilationResult: """Post-rpc interceptor for create_compilation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_compilation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_create_compilation_result` interceptor runs + before the `post_create_compilation_result_with_metadata` interceptor. """ return response + def post_create_compilation_result_with_metadata( + self, + response: dataform.CompilationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.CompilationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_compilation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_create_compilation_result_with_metadata` + interceptor in new development instead of the `post_create_compilation_result` interceptor. + When both interceptors are used, this `post_create_compilation_result_with_metadata` interceptor runs after the + `post_create_compilation_result` interceptor. The (possibly modified) response returned by + `post_create_compilation_result` will be passed to + `post_create_compilation_result_with_metadata`. + """ + return response, metadata + def pre_create_release_config( self, request: dataform.CreateReleaseConfigRequest, @@ -547,12 +596,35 @@ def post_create_release_config( ) -> dataform.ReleaseConfig: """Post-rpc interceptor for create_release_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_release_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_create_release_config` interceptor runs + before the `post_create_release_config_with_metadata` interceptor. """ return response + def post_create_release_config_with_metadata( + self, + response: dataform.ReleaseConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.ReleaseConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_release_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_create_release_config_with_metadata` + interceptor in new development instead of the `post_create_release_config` interceptor. + When both interceptors are used, this `post_create_release_config_with_metadata` interceptor runs after the + `post_create_release_config` interceptor. The (possibly modified) response returned by + `post_create_release_config` will be passed to + `post_create_release_config_with_metadata`. + """ + return response, metadata + def pre_create_repository( self, request: dataform.CreateRepositoryRequest, @@ -572,12 +644,35 @@ def post_create_repository( ) -> dataform.Repository: """Post-rpc interceptor for create_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_create_repository` interceptor runs + before the `post_create_repository_with_metadata` interceptor. """ return response + def post_create_repository_with_metadata( + self, + response: dataform.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_create_repository_with_metadata` + interceptor in new development instead of the `post_create_repository` interceptor. + When both interceptors are used, this `post_create_repository_with_metadata` interceptor runs after the + `post_create_repository` interceptor. The (possibly modified) response returned by + `post_create_repository` will be passed to + `post_create_repository_with_metadata`. + """ + return response, metadata + def pre_create_workflow_config( self, request: dataform.CreateWorkflowConfigRequest, @@ -597,12 +692,35 @@ def post_create_workflow_config( ) -> dataform.WorkflowConfig: """Post-rpc interceptor for create_workflow_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workflow_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_create_workflow_config` interceptor runs + before the `post_create_workflow_config_with_metadata` interceptor. """ return response + def post_create_workflow_config_with_metadata( + self, + response: dataform.WorkflowConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.WorkflowConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_workflow_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_create_workflow_config_with_metadata` + interceptor in new development instead of the `post_create_workflow_config` interceptor. + When both interceptors are used, this `post_create_workflow_config_with_metadata` interceptor runs after the + `post_create_workflow_config` interceptor. The (possibly modified) response returned by + `post_create_workflow_config` will be passed to + `post_create_workflow_config_with_metadata`. + """ + return response, metadata + def pre_create_workflow_invocation( self, request: dataform.CreateWorkflowInvocationRequest, @@ -623,12 +741,35 @@ def post_create_workflow_invocation( ) -> dataform.WorkflowInvocation: """Post-rpc interceptor for create_workflow_invocation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workflow_invocation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_create_workflow_invocation` interceptor runs + before the `post_create_workflow_invocation_with_metadata` interceptor. """ return response + def post_create_workflow_invocation_with_metadata( + self, + response: dataform.WorkflowInvocation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.WorkflowInvocation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_workflow_invocation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_create_workflow_invocation_with_metadata` + interceptor in new development instead of the `post_create_workflow_invocation` interceptor. + When both interceptors are used, this `post_create_workflow_invocation_with_metadata` interceptor runs after the + `post_create_workflow_invocation` interceptor. The (possibly modified) response returned by + `post_create_workflow_invocation` will be passed to + `post_create_workflow_invocation_with_metadata`. + """ + return response, metadata + def pre_create_workspace( self, request: dataform.CreateWorkspaceRequest, @@ -646,12 +787,35 @@ def pre_create_workspace( def post_create_workspace(self, response: dataform.Workspace) -> dataform.Workspace: """Post-rpc interceptor for create_workspace - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workspace_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_create_workspace` interceptor runs + before the `post_create_workspace_with_metadata` interceptor. """ return response + def post_create_workspace_with_metadata( + self, + response: dataform.Workspace, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.Workspace, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_workspace + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_create_workspace_with_metadata` + interceptor in new development instead of the `post_create_workspace` interceptor. + When both interceptors are used, this `post_create_workspace_with_metadata` interceptor runs after the + `post_create_workspace` interceptor. The (possibly modified) response returned by + `post_create_workspace` will be passed to + `post_create_workspace_with_metadata`. + """ + return response, metadata + def pre_delete_release_config( self, request: dataform.DeleteReleaseConfigRequest, @@ -740,12 +904,35 @@ def post_fetch_file_diff( ) -> dataform.FetchFileDiffResponse: """Post-rpc interceptor for fetch_file_diff - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_file_diff_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_fetch_file_diff` interceptor runs + before the `post_fetch_file_diff_with_metadata` interceptor. """ return response + def post_fetch_file_diff_with_metadata( + self, + response: dataform.FetchFileDiffResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.FetchFileDiffResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for fetch_file_diff + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_fetch_file_diff_with_metadata` + interceptor in new development instead of the `post_fetch_file_diff` interceptor. + When both interceptors are used, this `post_fetch_file_diff_with_metadata` interceptor runs after the + `post_fetch_file_diff` interceptor. The (possibly modified) response returned by + `post_fetch_file_diff` will be passed to + `post_fetch_file_diff_with_metadata`. + """ + return response, metadata + def pre_fetch_file_git_statuses( self, request: dataform.FetchFileGitStatusesRequest, @@ -765,12 +952,37 @@ def post_fetch_file_git_statuses( ) -> dataform.FetchFileGitStatusesResponse: """Post-rpc interceptor for fetch_file_git_statuses - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_file_git_statuses_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_fetch_file_git_statuses` interceptor runs + before the `post_fetch_file_git_statuses_with_metadata` interceptor. """ return response + def post_fetch_file_git_statuses_with_metadata( + self, + response: dataform.FetchFileGitStatusesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.FetchFileGitStatusesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_file_git_statuses + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_fetch_file_git_statuses_with_metadata` + interceptor in new development instead of the `post_fetch_file_git_statuses` interceptor. + When both interceptors are used, this `post_fetch_file_git_statuses_with_metadata` interceptor runs after the + `post_fetch_file_git_statuses` interceptor. The (possibly modified) response returned by + `post_fetch_file_git_statuses` will be passed to + `post_fetch_file_git_statuses_with_metadata`. + """ + return response, metadata + def pre_fetch_git_ahead_behind( self, request: dataform.FetchGitAheadBehindRequest, @@ -790,12 +1002,37 @@ def post_fetch_git_ahead_behind( ) -> dataform.FetchGitAheadBehindResponse: """Post-rpc interceptor for fetch_git_ahead_behind - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_git_ahead_behind_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_fetch_git_ahead_behind` interceptor runs + before the `post_fetch_git_ahead_behind_with_metadata` interceptor. """ return response + def post_fetch_git_ahead_behind_with_metadata( + self, + response: dataform.FetchGitAheadBehindResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.FetchGitAheadBehindResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_git_ahead_behind + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_fetch_git_ahead_behind_with_metadata` + interceptor in new development instead of the `post_fetch_git_ahead_behind` interceptor. + When both interceptors are used, this `post_fetch_git_ahead_behind_with_metadata` interceptor runs after the + `post_fetch_git_ahead_behind` interceptor. The (possibly modified) response returned by + `post_fetch_git_ahead_behind` will be passed to + `post_fetch_git_ahead_behind_with_metadata`. + """ + return response, metadata + def pre_fetch_remote_branches( self, request: dataform.FetchRemoteBranchesRequest, @@ -815,12 +1052,37 @@ def post_fetch_remote_branches( ) -> dataform.FetchRemoteBranchesResponse: """Post-rpc interceptor for fetch_remote_branches - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_remote_branches_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_fetch_remote_branches` interceptor runs + before the `post_fetch_remote_branches_with_metadata` interceptor. """ return response + def post_fetch_remote_branches_with_metadata( + self, + response: dataform.FetchRemoteBranchesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.FetchRemoteBranchesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_remote_branches + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_fetch_remote_branches_with_metadata` + interceptor in new development instead of the `post_fetch_remote_branches` interceptor. + When both interceptors are used, this `post_fetch_remote_branches_with_metadata` interceptor runs after the + `post_fetch_remote_branches` interceptor. The (possibly modified) response returned by + `post_fetch_remote_branches` will be passed to + `post_fetch_remote_branches_with_metadata`. + """ + return response, metadata + def pre_fetch_repository_history( self, request: dataform.FetchRepositoryHistoryRequest, @@ -840,12 +1102,37 @@ def post_fetch_repository_history( ) -> dataform.FetchRepositoryHistoryResponse: """Post-rpc interceptor for fetch_repository_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_repository_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_fetch_repository_history` interceptor runs + before the `post_fetch_repository_history_with_metadata` interceptor. """ return response + def post_fetch_repository_history_with_metadata( + self, + response: dataform.FetchRepositoryHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.FetchRepositoryHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_repository_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_fetch_repository_history_with_metadata` + interceptor in new development instead of the `post_fetch_repository_history` interceptor. + When both interceptors are used, this `post_fetch_repository_history_with_metadata` interceptor runs after the + `post_fetch_repository_history` interceptor. The (possibly modified) response returned by + `post_fetch_repository_history` will be passed to + `post_fetch_repository_history_with_metadata`. + """ + return response, metadata + def pre_get_compilation_result( self, request: dataform.GetCompilationResultRequest, @@ -865,12 +1152,35 @@ def post_get_compilation_result( ) -> dataform.CompilationResult: """Post-rpc interceptor for get_compilation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_compilation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_get_compilation_result` interceptor runs + before the `post_get_compilation_result_with_metadata` interceptor. """ return response + def post_get_compilation_result_with_metadata( + self, + response: dataform.CompilationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.CompilationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_compilation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_get_compilation_result_with_metadata` + interceptor in new development instead of the `post_get_compilation_result` interceptor. + When both interceptors are used, this `post_get_compilation_result_with_metadata` interceptor runs after the + `post_get_compilation_result` interceptor. The (possibly modified) response returned by + `post_get_compilation_result` will be passed to + `post_get_compilation_result_with_metadata`. + """ + return response, metadata + def pre_get_release_config( self, request: dataform.GetReleaseConfigRequest, @@ -890,12 +1200,35 @@ def post_get_release_config( ) -> dataform.ReleaseConfig: """Post-rpc interceptor for get_release_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_release_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_get_release_config` interceptor runs + before the `post_get_release_config_with_metadata` interceptor. """ return response + def post_get_release_config_with_metadata( + self, + response: dataform.ReleaseConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.ReleaseConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_release_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_get_release_config_with_metadata` + interceptor in new development instead of the `post_get_release_config` interceptor. + When both interceptors are used, this `post_get_release_config_with_metadata` interceptor runs after the + `post_get_release_config` interceptor. The (possibly modified) response returned by + `post_get_release_config` will be passed to + `post_get_release_config_with_metadata`. + """ + return response, metadata + def pre_get_repository( self, request: dataform.GetRepositoryRequest, @@ -911,12 +1244,35 @@ def pre_get_repository( def post_get_repository(self, response: dataform.Repository) -> dataform.Repository: """Post-rpc interceptor for get_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_get_repository` interceptor runs + before the `post_get_repository_with_metadata` interceptor. """ return response + def post_get_repository_with_metadata( + self, + response: dataform.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_get_repository_with_metadata` + interceptor in new development instead of the `post_get_repository` interceptor. + When both interceptors are used, this `post_get_repository_with_metadata` interceptor runs after the + `post_get_repository` interceptor. The (possibly modified) response returned by + `post_get_repository` will be passed to + `post_get_repository_with_metadata`. + """ + return response, metadata + def pre_get_workflow_config( self, request: dataform.GetWorkflowConfigRequest, @@ -936,12 +1292,35 @@ def post_get_workflow_config( ) -> dataform.WorkflowConfig: """Post-rpc interceptor for get_workflow_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workflow_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_get_workflow_config` interceptor runs + before the `post_get_workflow_config_with_metadata` interceptor. """ return response + def post_get_workflow_config_with_metadata( + self, + response: dataform.WorkflowConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.WorkflowConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workflow_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_get_workflow_config_with_metadata` + interceptor in new development instead of the `post_get_workflow_config` interceptor. + When both interceptors are used, this `post_get_workflow_config_with_metadata` interceptor runs after the + `post_get_workflow_config` interceptor. The (possibly modified) response returned by + `post_get_workflow_config` will be passed to + `post_get_workflow_config_with_metadata`. + """ + return response, metadata + def pre_get_workflow_invocation( self, request: dataform.GetWorkflowInvocationRequest, @@ -961,12 +1340,35 @@ def post_get_workflow_invocation( ) -> dataform.WorkflowInvocation: """Post-rpc interceptor for get_workflow_invocation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workflow_invocation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_get_workflow_invocation` interceptor runs + before the `post_get_workflow_invocation_with_metadata` interceptor. """ return response + def post_get_workflow_invocation_with_metadata( + self, + response: dataform.WorkflowInvocation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.WorkflowInvocation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workflow_invocation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_get_workflow_invocation_with_metadata` + interceptor in new development instead of the `post_get_workflow_invocation` interceptor. + When both interceptors are used, this `post_get_workflow_invocation_with_metadata` interceptor runs after the + `post_get_workflow_invocation` interceptor. The (possibly modified) response returned by + `post_get_workflow_invocation` will be passed to + `post_get_workflow_invocation_with_metadata`. + """ + return response, metadata + def pre_get_workspace( self, request: dataform.GetWorkspaceRequest, @@ -982,12 +1384,35 @@ def pre_get_workspace( def post_get_workspace(self, response: dataform.Workspace) -> dataform.Workspace: """Post-rpc interceptor for get_workspace - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workspace_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_get_workspace` interceptor runs + before the `post_get_workspace_with_metadata` interceptor. """ return response + def post_get_workspace_with_metadata( + self, + response: dataform.Workspace, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.Workspace, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_workspace + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_get_workspace_with_metadata` + interceptor in new development instead of the `post_get_workspace` interceptor. + When both interceptors are used, this `post_get_workspace_with_metadata` interceptor runs after the + `post_get_workspace` interceptor. The (possibly modified) response returned by + `post_get_workspace` will be passed to + `post_get_workspace_with_metadata`. + """ + return response, metadata + def pre_install_npm_packages( self, request: dataform.InstallNpmPackagesRequest, @@ -1007,12 +1432,37 @@ def post_install_npm_packages( ) -> dataform.InstallNpmPackagesResponse: """Post-rpc interceptor for install_npm_packages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_install_npm_packages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_install_npm_packages` interceptor runs + before the `post_install_npm_packages_with_metadata` interceptor. """ return response + def post_install_npm_packages_with_metadata( + self, + response: dataform.InstallNpmPackagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.InstallNpmPackagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for install_npm_packages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_install_npm_packages_with_metadata` + interceptor in new development instead of the `post_install_npm_packages` interceptor. + When both interceptors are used, this `post_install_npm_packages_with_metadata` interceptor runs after the + `post_install_npm_packages` interceptor. The (possibly modified) response returned by + `post_install_npm_packages` will be passed to + `post_install_npm_packages_with_metadata`. + """ + return response, metadata + def pre_list_compilation_results( self, request: dataform.ListCompilationResultsRequest, @@ -1032,12 +1482,37 @@ def post_list_compilation_results( ) -> dataform.ListCompilationResultsResponse: """Post-rpc interceptor for list_compilation_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_compilation_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_list_compilation_results` interceptor runs + before the `post_list_compilation_results_with_metadata` interceptor. """ return response + def post_list_compilation_results_with_metadata( + self, + response: dataform.ListCompilationResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ListCompilationResultsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_compilation_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_list_compilation_results_with_metadata` + interceptor in new development instead of the `post_list_compilation_results` interceptor. + When both interceptors are used, this `post_list_compilation_results_with_metadata` interceptor runs after the + `post_list_compilation_results` interceptor. The (possibly modified) response returned by + `post_list_compilation_results` will be passed to + `post_list_compilation_results_with_metadata`. + """ + return response, metadata + def pre_list_release_configs( self, request: dataform.ListReleaseConfigsRequest, @@ -1057,12 +1532,37 @@ def post_list_release_configs( ) -> dataform.ListReleaseConfigsResponse: """Post-rpc interceptor for list_release_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_release_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_list_release_configs` interceptor runs + before the `post_list_release_configs_with_metadata` interceptor. """ return response + def post_list_release_configs_with_metadata( + self, + response: dataform.ListReleaseConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ListReleaseConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_release_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_list_release_configs_with_metadata` + interceptor in new development instead of the `post_list_release_configs` interceptor. + When both interceptors are used, this `post_list_release_configs_with_metadata` interceptor runs after the + `post_list_release_configs` interceptor. The (possibly modified) response returned by + `post_list_release_configs` will be passed to + `post_list_release_configs_with_metadata`. + """ + return response, metadata + def pre_list_repositories( self, request: dataform.ListRepositoriesRequest, @@ -1082,12 +1582,37 @@ def post_list_repositories( ) -> dataform.ListRepositoriesResponse: """Post-rpc interceptor for list_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_list_repositories` interceptor runs + before the `post_list_repositories_with_metadata` interceptor. """ return response + def post_list_repositories_with_metadata( + self, + response: dataform.ListRepositoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ListRepositoriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_list_repositories_with_metadata` + interceptor in new development instead of the `post_list_repositories` interceptor. + When both interceptors are used, this `post_list_repositories_with_metadata` interceptor runs after the + `post_list_repositories` interceptor. The (possibly modified) response returned by + `post_list_repositories` will be passed to + `post_list_repositories_with_metadata`. + """ + return response, metadata + def pre_list_workflow_configs( self, request: dataform.ListWorkflowConfigsRequest, @@ -1107,12 +1632,37 @@ def post_list_workflow_configs( ) -> dataform.ListWorkflowConfigsResponse: """Post-rpc interceptor for list_workflow_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workflow_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_list_workflow_configs` interceptor runs + before the `post_list_workflow_configs_with_metadata` interceptor. """ return response + def post_list_workflow_configs_with_metadata( + self, + response: dataform.ListWorkflowConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ListWorkflowConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_workflow_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_list_workflow_configs_with_metadata` + interceptor in new development instead of the `post_list_workflow_configs` interceptor. + When both interceptors are used, this `post_list_workflow_configs_with_metadata` interceptor runs after the + `post_list_workflow_configs` interceptor. The (possibly modified) response returned by + `post_list_workflow_configs` will be passed to + `post_list_workflow_configs_with_metadata`. + """ + return response, metadata + def pre_list_workflow_invocations( self, request: dataform.ListWorkflowInvocationsRequest, @@ -1132,12 +1682,38 @@ def post_list_workflow_invocations( ) -> dataform.ListWorkflowInvocationsResponse: """Post-rpc interceptor for list_workflow_invocations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workflow_invocations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_list_workflow_invocations` interceptor runs + before the `post_list_workflow_invocations_with_metadata` interceptor. """ return response + def post_list_workflow_invocations_with_metadata( + self, + response: dataform.ListWorkflowInvocationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ListWorkflowInvocationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_workflow_invocations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_list_workflow_invocations_with_metadata` + interceptor in new development instead of the `post_list_workflow_invocations` interceptor. + When both interceptors are used, this `post_list_workflow_invocations_with_metadata` interceptor runs after the + `post_list_workflow_invocations` interceptor. The (possibly modified) response returned by + `post_list_workflow_invocations` will be passed to + `post_list_workflow_invocations_with_metadata`. + """ + return response, metadata + def pre_list_workspaces( self, request: dataform.ListWorkspacesRequest, @@ -1155,12 +1731,37 @@ def post_list_workspaces( ) -> dataform.ListWorkspacesResponse: """Post-rpc interceptor for list_workspaces - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workspaces_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_list_workspaces` interceptor runs + before the `post_list_workspaces_with_metadata` interceptor. """ return response + def post_list_workspaces_with_metadata( + self, + response: dataform.ListWorkspacesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ListWorkspacesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_workspaces + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_list_workspaces_with_metadata` + interceptor in new development instead of the `post_list_workspaces` interceptor. + When both interceptors are used, this `post_list_workspaces_with_metadata` interceptor runs after the + `post_list_workspaces` interceptor. The (possibly modified) response returned by + `post_list_workspaces` will be passed to + `post_list_workspaces_with_metadata`. + """ + return response, metadata + def pre_make_directory( self, request: dataform.MakeDirectoryRequest, @@ -1178,12 +1779,35 @@ def post_make_directory( ) -> dataform.MakeDirectoryResponse: """Post-rpc interceptor for make_directory - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_make_directory_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_make_directory` interceptor runs + before the `post_make_directory_with_metadata` interceptor. """ return response + def post_make_directory_with_metadata( + self, + response: dataform.MakeDirectoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.MakeDirectoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for make_directory + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_make_directory_with_metadata` + interceptor in new development instead of the `post_make_directory` interceptor. + When both interceptors are used, this `post_make_directory_with_metadata` interceptor runs after the + `post_make_directory` interceptor. The (possibly modified) response returned by + `post_make_directory` will be passed to + `post_make_directory_with_metadata`. + """ + return response, metadata + def pre_move_directory( self, request: dataform.MoveDirectoryRequest, @@ -1201,12 +1825,35 @@ def post_move_directory( ) -> dataform.MoveDirectoryResponse: """Post-rpc interceptor for move_directory - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_directory_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_move_directory` interceptor runs + before the `post_move_directory_with_metadata` interceptor. """ return response + def post_move_directory_with_metadata( + self, + response: dataform.MoveDirectoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.MoveDirectoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_directory + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_move_directory_with_metadata` + interceptor in new development instead of the `post_move_directory` interceptor. + When both interceptors are used, this `post_move_directory_with_metadata` interceptor runs after the + `post_move_directory` interceptor. The (possibly modified) response returned by + `post_move_directory` will be passed to + `post_move_directory_with_metadata`. + """ + return response, metadata + def pre_move_file( self, request: dataform.MoveFileRequest, @@ -1224,12 +1871,35 @@ def post_move_file( ) -> dataform.MoveFileResponse: """Post-rpc interceptor for move_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_move_file` interceptor runs + before the `post_move_file_with_metadata` interceptor. """ return response + def post_move_file_with_metadata( + self, + response: dataform.MoveFileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.MoveFileResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_move_file_with_metadata` + interceptor in new development instead of the `post_move_file` interceptor. + When both interceptors are used, this `post_move_file_with_metadata` interceptor runs after the + `post_move_file` interceptor. The (possibly modified) response returned by + `post_move_file` will be passed to + `post_move_file_with_metadata`. + """ + return response, metadata + def pre_pull_git_commits( self, request: dataform.PullGitCommitsRequest, @@ -1274,12 +1944,38 @@ def post_query_compilation_result_actions( ) -> dataform.QueryCompilationResultActionsResponse: """Post-rpc interceptor for query_compilation_result_actions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_compilation_result_actions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_query_compilation_result_actions` interceptor runs + before the `post_query_compilation_result_actions_with_metadata` interceptor. """ return response + def post_query_compilation_result_actions_with_metadata( + self, + response: dataform.QueryCompilationResultActionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.QueryCompilationResultActionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_compilation_result_actions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_query_compilation_result_actions_with_metadata` + interceptor in new development instead of the `post_query_compilation_result_actions` interceptor. + When both interceptors are used, this `post_query_compilation_result_actions_with_metadata` interceptor runs after the + `post_query_compilation_result_actions` interceptor. The (possibly modified) response returned by + `post_query_compilation_result_actions` will be passed to + `post_query_compilation_result_actions_with_metadata`. + """ + return response, metadata + def pre_query_directory_contents( self, request: dataform.QueryDirectoryContentsRequest, @@ -1299,12 +1995,37 @@ def post_query_directory_contents( ) -> dataform.QueryDirectoryContentsResponse: """Post-rpc interceptor for query_directory_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_directory_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_query_directory_contents` interceptor runs + before the `post_query_directory_contents_with_metadata` interceptor. """ return response + def post_query_directory_contents_with_metadata( + self, + response: dataform.QueryDirectoryContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.QueryDirectoryContentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for query_directory_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_query_directory_contents_with_metadata` + interceptor in new development instead of the `post_query_directory_contents` interceptor. + When both interceptors are used, this `post_query_directory_contents_with_metadata` interceptor runs after the + `post_query_directory_contents` interceptor. The (possibly modified) response returned by + `post_query_directory_contents` will be passed to + `post_query_directory_contents_with_metadata`. + """ + return response, metadata + def pre_query_repository_directory_contents( self, request: dataform.QueryRepositoryDirectoryContentsRequest, @@ -1325,12 +2046,38 @@ def post_query_repository_directory_contents( ) -> dataform.QueryRepositoryDirectoryContentsResponse: """Post-rpc interceptor for query_repository_directory_contents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_repository_directory_contents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_query_repository_directory_contents` interceptor runs + before the `post_query_repository_directory_contents_with_metadata` interceptor. """ return response + def post_query_repository_directory_contents_with_metadata( + self, + response: dataform.QueryRepositoryDirectoryContentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.QueryRepositoryDirectoryContentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_repository_directory_contents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_query_repository_directory_contents_with_metadata` + interceptor in new development instead of the `post_query_repository_directory_contents` interceptor. + When both interceptors are used, this `post_query_repository_directory_contents_with_metadata` interceptor runs after the + `post_query_repository_directory_contents` interceptor. The (possibly modified) response returned by + `post_query_repository_directory_contents` will be passed to + `post_query_repository_directory_contents_with_metadata`. + """ + return response, metadata + def pre_query_workflow_invocation_actions( self, request: dataform.QueryWorkflowInvocationActionsRequest, @@ -1351,12 +2098,38 @@ def post_query_workflow_invocation_actions( ) -> dataform.QueryWorkflowInvocationActionsResponse: """Post-rpc interceptor for query_workflow_invocation_actions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_workflow_invocation_actions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_query_workflow_invocation_actions` interceptor runs + before the `post_query_workflow_invocation_actions_with_metadata` interceptor. """ return response + def post_query_workflow_invocation_actions_with_metadata( + self, + response: dataform.QueryWorkflowInvocationActionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.QueryWorkflowInvocationActionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_workflow_invocation_actions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_query_workflow_invocation_actions_with_metadata` + interceptor in new development instead of the `post_query_workflow_invocation_actions` interceptor. + When both interceptors are used, this `post_query_workflow_invocation_actions_with_metadata` interceptor runs after the + `post_query_workflow_invocation_actions` interceptor. The (possibly modified) response returned by + `post_query_workflow_invocation_actions` will be passed to + `post_query_workflow_invocation_actions_with_metadata`. + """ + return response, metadata + def pre_read_file( self, request: dataform.ReadFileRequest, @@ -1374,12 +2147,35 @@ def post_read_file( ) -> dataform.ReadFileResponse: """Post-rpc interceptor for read_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_read_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_read_file` interceptor runs + before the `post_read_file_with_metadata` interceptor. """ return response + def post_read_file_with_metadata( + self, + response: dataform.ReadFileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.ReadFileResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for read_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_read_file_with_metadata` + interceptor in new development instead of the `post_read_file` interceptor. + When both interceptors are used, this `post_read_file_with_metadata` interceptor runs after the + `post_read_file` interceptor. The (possibly modified) response returned by + `post_read_file` will be passed to + `post_read_file_with_metadata`. + """ + return response, metadata + def pre_read_repository_file( self, request: dataform.ReadRepositoryFileRequest, @@ -1399,12 +2195,37 @@ def post_read_repository_file( ) -> dataform.ReadRepositoryFileResponse: """Post-rpc interceptor for read_repository_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_read_repository_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_read_repository_file` interceptor runs + before the `post_read_repository_file_with_metadata` interceptor. """ return response + def post_read_repository_file_with_metadata( + self, + response: dataform.ReadRepositoryFileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dataform.ReadRepositoryFileResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for read_repository_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_read_repository_file_with_metadata` + interceptor in new development instead of the `post_read_repository_file` interceptor. + When both interceptors are used, this `post_read_repository_file_with_metadata` interceptor runs after the + `post_read_repository_file` interceptor. The (possibly modified) response returned by + `post_read_repository_file` will be passed to + `post_read_repository_file_with_metadata`. + """ + return response, metadata + def pre_remove_directory( self, request: dataform.RemoveDirectoryRequest, @@ -1464,12 +2285,35 @@ def post_update_release_config( ) -> dataform.ReleaseConfig: """Post-rpc interceptor for update_release_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_release_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_update_release_config` interceptor runs + before the `post_update_release_config_with_metadata` interceptor. """ return response + def post_update_release_config_with_metadata( + self, + response: dataform.ReleaseConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.ReleaseConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_release_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_update_release_config_with_metadata` + interceptor in new development instead of the `post_update_release_config` interceptor. + When both interceptors are used, this `post_update_release_config_with_metadata` interceptor runs after the + `post_update_release_config` interceptor. The (possibly modified) response returned by + `post_update_release_config` will be passed to + `post_update_release_config_with_metadata`. + """ + return response, metadata + def pre_update_repository( self, request: dataform.UpdateRepositoryRequest, @@ -1489,12 +2333,35 @@ def post_update_repository( ) -> dataform.Repository: """Post-rpc interceptor for update_repository - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_repository_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_update_repository` interceptor runs + before the `post_update_repository_with_metadata` interceptor. """ return response + def post_update_repository_with_metadata( + self, + response: dataform.Repository, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.Repository, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_repository + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_update_repository_with_metadata` + interceptor in new development instead of the `post_update_repository` interceptor. + When both interceptors are used, this `post_update_repository_with_metadata` interceptor runs after the + `post_update_repository` interceptor. The (possibly modified) response returned by + `post_update_repository` will be passed to + `post_update_repository_with_metadata`. + """ + return response, metadata + def pre_update_workflow_config( self, request: dataform.UpdateWorkflowConfigRequest, @@ -1514,12 +2381,35 @@ def post_update_workflow_config( ) -> dataform.WorkflowConfig: """Post-rpc interceptor for update_workflow_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_workflow_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_update_workflow_config` interceptor runs + before the `post_update_workflow_config_with_metadata` interceptor. """ return response + def post_update_workflow_config_with_metadata( + self, + response: dataform.WorkflowConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.WorkflowConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_workflow_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_update_workflow_config_with_metadata` + interceptor in new development instead of the `post_update_workflow_config` interceptor. + When both interceptors are used, this `post_update_workflow_config_with_metadata` interceptor runs after the + `post_update_workflow_config` interceptor. The (possibly modified) response returned by + `post_update_workflow_config` will be passed to + `post_update_workflow_config_with_metadata`. + """ + return response, metadata + def pre_write_file( self, request: dataform.WriteFileRequest, @@ -1537,12 +2427,35 @@ def post_write_file( ) -> dataform.WriteFileResponse: """Post-rpc interceptor for write_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_write_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Dataform server but before - it is returned to user code. + it is returned to user code. This `post_write_file` interceptor runs + before the `post_write_file_with_metadata` interceptor. """ return response + def post_write_file_with_metadata( + self, + response: dataform.WriteFileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataform.WriteFileResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for write_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Dataform server but before it is returned to user code. + + We recommend only using this `post_write_file_with_metadata` + interceptor in new development instead of the `post_write_file` interceptor. + When both interceptors are used, this `post_write_file_with_metadata` interceptor runs after the + `post_write_file` interceptor. The (possibly modified) response returned by + `post_write_file` will be passed to + `post_write_file_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -2222,6 +3135,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_compute_repository_access_token_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_compute_repository_access_token_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2376,6 +3296,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_compilation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_compilation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2526,6 +3450,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_release_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_release_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2678,6 +3606,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2828,6 +3760,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workflow_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workflow_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2979,6 +3915,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workflow_invocation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workflow_invocation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3133,6 +4073,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workspace(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workspace_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3824,6 +4768,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_file_diff(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_file_diff_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3966,6 +4914,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_file_git_statuses(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_file_git_statuses_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4110,6 +5062,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_git_ahead_behind(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_git_ahead_behind_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4254,6 +5210,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_remote_branches(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_remote_branches_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4398,6 +5358,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_repository_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_repository_history_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4544,6 +5508,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_compilation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_compilation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4690,6 +5658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_release_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_release_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4834,6 +5806,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4978,6 +5954,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workflow_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workflow_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5122,6 +6102,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workflow_invocation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workflow_invocation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5264,6 +6248,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workspace(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workspace_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5412,6 +6400,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_install_npm_packages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_install_npm_packages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5556,6 +6548,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_compilation_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_compilation_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5700,6 +6696,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_release_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_release_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5846,6 +6846,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5990,6 +6994,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workflow_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workflow_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6134,6 +7142,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workflow_invocations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workflow_invocations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6280,6 +7292,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workspaces(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workspaces_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6430,6 +7446,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_make_directory(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_make_directory_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6580,6 +7600,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_directory(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_directory_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6726,6 +7750,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7111,6 +8139,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_compilation_result_actions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_query_compilation_result_actions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7255,6 +8290,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_directory_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_directory_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7406,6 +8445,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_repository_directory_contents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_query_repository_directory_contents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7555,6 +8601,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_workflow_invocation_actions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_query_workflow_invocation_actions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7699,6 +8752,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_read_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_read_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7841,6 +8898,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_read_repository_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_read_repository_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8341,6 +9402,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_release_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_release_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8493,6 +9558,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_repository(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_repository_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8643,6 +9712,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_workflow_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_workflow_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8789,6 +9862,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_write_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_write_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json index d950e4906c7a..aa3ca5579d41 100644 --- a/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json +++ b/packages/google-cloud-dataform/samples/generated_samples/snippet_metadata_google.cloud.dataform.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataform", - "version": "0.5.14" + "version": "0.5.15" }, "snippets": [ { diff --git a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py index 1cd3264c023e..2d94ee3f3c6b 100644 --- a/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py +++ b/packages/google-cloud-dataform/tests/unit/gapic/dataform_v1beta1/test_dataform.py @@ -69,6 +69,13 @@ ) from google.cloud.dataform_v1beta1.types import dataform +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataformClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataformClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -30348,10 +30398,13 @@ def test_list_repositories_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_list_repositories" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_list_repositories_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_list_repositories" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ListRepositoriesRequest.pb( dataform.ListRepositoriesRequest() ) @@ -30377,6 +30430,7 @@ def test_list_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ListRepositoriesResponse() + post_with_metadata.return_value = dataform.ListRepositoriesResponse(), metadata client.list_repositories( request, @@ -30388,6 +30442,7 @@ def test_list_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_repository_rest_bad_request(request_type=dataform.GetRepositoryRequest): @@ -30479,10 +30534,13 @@ def test_get_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_get_repository" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_get_repository_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_get_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.GetRepositoryRequest.pb(dataform.GetRepositoryRequest()) transcode.return_value = { "method": "post", @@ -30504,6 +30562,7 @@ def test_get_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.Repository() + post_with_metadata.return_value = dataform.Repository(), metadata client.get_repository( request, @@ -30515,6 +30574,7 @@ def test_get_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_repository_rest_bad_request( @@ -30698,10 +30758,13 @@ def test_create_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_create_repository" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_create_repository_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_create_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.CreateRepositoryRequest.pb( dataform.CreateRepositoryRequest() ) @@ -30725,6 +30788,7 @@ def test_create_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.Repository() + post_with_metadata.return_value = dataform.Repository(), metadata client.create_repository( request, @@ -30736,6 +30800,7 @@ def test_create_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_repository_rest_bad_request( @@ -30927,10 +30992,13 @@ def test_update_repository_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_update_repository" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_update_repository_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_update_repository" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.UpdateRepositoryRequest.pb( dataform.UpdateRepositoryRequest() ) @@ -30954,6 +31022,7 @@ def test_update_repository_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.Repository() + post_with_metadata.return_value = dataform.Repository(), metadata client.update_repository( request, @@ -30965,6 +31034,7 @@ def test_update_repository_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_repository_rest_bad_request( @@ -31261,10 +31331,13 @@ def test_read_repository_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_read_repository_file" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_read_repository_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_read_repository_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ReadRepositoryFileRequest.pb( dataform.ReadRepositoryFileRequest() ) @@ -31290,6 +31363,10 @@ def test_read_repository_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ReadRepositoryFileResponse() + post_with_metadata.return_value = ( + dataform.ReadRepositoryFileResponse(), + metadata, + ) client.read_repository_file( request, @@ -31301,6 +31378,7 @@ def test_read_repository_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_repository_directory_contents_rest_bad_request( @@ -31385,10 +31463,14 @@ def test_query_repository_directory_contents_rest_interceptors(null_interceptor) ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_query_repository_directory_contents" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_query_repository_directory_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_query_repository_directory_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.QueryRepositoryDirectoryContentsRequest.pb( dataform.QueryRepositoryDirectoryContentsRequest() ) @@ -31414,6 +31496,10 @@ def test_query_repository_directory_contents_rest_interceptors(null_interceptor) ] pre.return_value = request, metadata post.return_value = dataform.QueryRepositoryDirectoryContentsResponse() + post_with_metadata.return_value = ( + dataform.QueryRepositoryDirectoryContentsResponse(), + metadata, + ) client.query_repository_directory_contents( request, @@ -31425,6 +31511,7 @@ def test_query_repository_directory_contents_rest_interceptors(null_interceptor) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_repository_history_rest_bad_request( @@ -31507,10 +31594,14 @@ def test_fetch_repository_history_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_fetch_repository_history" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_fetch_repository_history_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_fetch_repository_history" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.FetchRepositoryHistoryRequest.pb( dataform.FetchRepositoryHistoryRequest() ) @@ -31536,6 +31627,10 @@ def test_fetch_repository_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.FetchRepositoryHistoryResponse() + post_with_metadata.return_value = ( + dataform.FetchRepositoryHistoryResponse(), + metadata, + ) client.fetch_repository_history( request, @@ -31547,6 +31642,7 @@ def test_fetch_repository_history_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_compute_repository_access_token_status_rest_bad_request( @@ -31635,10 +31731,14 @@ def test_compute_repository_access_token_status_rest_interceptors(null_intercept transports.DataformRestInterceptor, "post_compute_repository_access_token_status", ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_compute_repository_access_token_status_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_compute_repository_access_token_status" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ComputeRepositoryAccessTokenStatusRequest.pb( dataform.ComputeRepositoryAccessTokenStatusRequest() ) @@ -31664,6 +31764,10 @@ def test_compute_repository_access_token_status_rest_interceptors(null_intercept ] pre.return_value = request, metadata post.return_value = dataform.ComputeRepositoryAccessTokenStatusResponse() + post_with_metadata.return_value = ( + dataform.ComputeRepositoryAccessTokenStatusResponse(), + metadata, + ) client.compute_repository_access_token_status( request, @@ -31675,6 +31779,7 @@ def test_compute_repository_access_token_status_rest_interceptors(null_intercept pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_remote_branches_rest_bad_request( @@ -31757,10 +31862,13 @@ def test_fetch_remote_branches_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_fetch_remote_branches" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_fetch_remote_branches_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_fetch_remote_branches" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.FetchRemoteBranchesRequest.pb( dataform.FetchRemoteBranchesRequest() ) @@ -31786,6 +31894,10 @@ def test_fetch_remote_branches_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.FetchRemoteBranchesResponse() + post_with_metadata.return_value = ( + dataform.FetchRemoteBranchesResponse(), + metadata, + ) client.fetch_remote_branches( request, @@ -31797,6 +31909,7 @@ def test_fetch_remote_branches_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workspaces_rest_bad_request(request_type=dataform.ListWorkspacesRequest): @@ -31879,10 +31992,13 @@ def test_list_workspaces_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_list_workspaces" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_list_workspaces_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_list_workspaces" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ListWorkspacesRequest.pb(dataform.ListWorkspacesRequest()) transcode.return_value = { "method": "post", @@ -31906,6 +32022,7 @@ def test_list_workspaces_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ListWorkspacesResponse() + post_with_metadata.return_value = dataform.ListWorkspacesResponse(), metadata client.list_workspaces( request, @@ -31917,6 +32034,7 @@ def test_list_workspaces_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_workspace_rest_bad_request(request_type=dataform.GetWorkspaceRequest): @@ -32001,10 +32119,13 @@ def test_get_workspace_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_get_workspace" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_get_workspace_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_get_workspace" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.GetWorkspaceRequest.pb(dataform.GetWorkspaceRequest()) transcode.return_value = { "method": "post", @@ -32026,6 +32147,7 @@ def test_get_workspace_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.Workspace() + post_with_metadata.return_value = dataform.Workspace(), metadata client.get_workspace( request, @@ -32037,6 +32159,7 @@ def test_get_workspace_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_workspace_rest_bad_request( @@ -32187,10 +32310,13 @@ def test_create_workspace_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_create_workspace" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_create_workspace_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_create_workspace" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.CreateWorkspaceRequest.pb( dataform.CreateWorkspaceRequest() ) @@ -32214,6 +32340,7 @@ def test_create_workspace_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.Workspace() + post_with_metadata.return_value = dataform.Workspace(), metadata client.create_workspace( request, @@ -32225,6 +32352,7 @@ def test_create_workspace_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workspace_rest_bad_request( @@ -32419,10 +32547,13 @@ def test_install_npm_packages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_install_npm_packages" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_install_npm_packages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_install_npm_packages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.InstallNpmPackagesRequest.pb( dataform.InstallNpmPackagesRequest() ) @@ -32448,6 +32579,10 @@ def test_install_npm_packages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.InstallNpmPackagesResponse() + post_with_metadata.return_value = ( + dataform.InstallNpmPackagesResponse(), + metadata, + ) client.install_npm_packages( request, @@ -32459,6 +32594,7 @@ def test_install_npm_packages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_pull_git_commits_rest_bad_request(request_type=dataform.PullGitCommitsRequest): @@ -32756,10 +32892,13 @@ def test_fetch_file_git_statuses_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_fetch_file_git_statuses" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_fetch_file_git_statuses_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_fetch_file_git_statuses" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.FetchFileGitStatusesRequest.pb( dataform.FetchFileGitStatusesRequest() ) @@ -32785,6 +32924,10 @@ def test_fetch_file_git_statuses_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.FetchFileGitStatusesResponse() + post_with_metadata.return_value = ( + dataform.FetchFileGitStatusesResponse(), + metadata, + ) client.fetch_file_git_statuses( request, @@ -32796,6 +32939,7 @@ def test_fetch_file_git_statuses_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_git_ahead_behind_rest_bad_request( @@ -32884,10 +33028,13 @@ def test_fetch_git_ahead_behind_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_fetch_git_ahead_behind" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_fetch_git_ahead_behind_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_fetch_git_ahead_behind" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.FetchGitAheadBehindRequest.pb( dataform.FetchGitAheadBehindRequest() ) @@ -32913,6 +33060,10 @@ def test_fetch_git_ahead_behind_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.FetchGitAheadBehindResponse() + post_with_metadata.return_value = ( + dataform.FetchGitAheadBehindResponse(), + metadata, + ) client.fetch_git_ahead_behind( request, @@ -32924,6 +33075,7 @@ def test_fetch_git_ahead_behind_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_commit_workspace_changes_rest_bad_request( @@ -33230,10 +33382,13 @@ def test_fetch_file_diff_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_fetch_file_diff" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_fetch_file_diff_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_fetch_file_diff" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.FetchFileDiffRequest.pb(dataform.FetchFileDiffRequest()) transcode.return_value = { "method": "post", @@ -33257,6 +33412,7 @@ def test_fetch_file_diff_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.FetchFileDiffResponse() + post_with_metadata.return_value = dataform.FetchFileDiffResponse(), metadata client.fetch_file_diff( request, @@ -33268,6 +33424,7 @@ def test_fetch_file_diff_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_directory_contents_rest_bad_request( @@ -33354,10 +33511,14 @@ def test_query_directory_contents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_query_directory_contents" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_query_directory_contents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_query_directory_contents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.QueryDirectoryContentsRequest.pb( dataform.QueryDirectoryContentsRequest() ) @@ -33383,6 +33544,10 @@ def test_query_directory_contents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.QueryDirectoryContentsResponse() + post_with_metadata.return_value = ( + dataform.QueryDirectoryContentsResponse(), + metadata, + ) client.query_directory_contents( request, @@ -33394,6 +33559,7 @@ def test_query_directory_contents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_make_directory_rest_bad_request(request_type=dataform.MakeDirectoryRequest): @@ -33475,10 +33641,13 @@ def test_make_directory_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_make_directory" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_make_directory_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_make_directory" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.MakeDirectoryRequest.pb(dataform.MakeDirectoryRequest()) transcode.return_value = { "method": "post", @@ -33502,6 +33671,7 @@ def test_make_directory_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.MakeDirectoryResponse() + post_with_metadata.return_value = dataform.MakeDirectoryResponse(), metadata client.make_directory( request, @@ -33513,6 +33683,7 @@ def test_make_directory_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_directory_rest_bad_request( @@ -33705,10 +33876,13 @@ def test_move_directory_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_move_directory" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_move_directory_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_move_directory" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.MoveDirectoryRequest.pb(dataform.MoveDirectoryRequest()) transcode.return_value = { "method": "post", @@ -33732,6 +33906,7 @@ def test_move_directory_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.MoveDirectoryResponse() + post_with_metadata.return_value = dataform.MoveDirectoryResponse(), metadata client.move_directory( request, @@ -33743,6 +33918,7 @@ def test_move_directory_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_read_file_rest_bad_request(request_type=dataform.ReadFileRequest): @@ -33827,10 +34003,13 @@ def test_read_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_read_file" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_read_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_read_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ReadFileRequest.pb(dataform.ReadFileRequest()) transcode.return_value = { "method": "post", @@ -33852,6 +34031,7 @@ def test_read_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ReadFileResponse() + post_with_metadata.return_value = dataform.ReadFileResponse(), metadata client.read_file( request, @@ -33863,6 +34043,7 @@ def test_read_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_file_rest_bad_request(request_type=dataform.RemoveFileRequest): @@ -34051,10 +34232,13 @@ def test_move_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_move_file" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_move_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_move_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.MoveFileRequest.pb(dataform.MoveFileRequest()) transcode.return_value = { "method": "post", @@ -34076,6 +34260,7 @@ def test_move_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.MoveFileResponse() + post_with_metadata.return_value = dataform.MoveFileResponse(), metadata client.move_file( request, @@ -34087,6 +34272,7 @@ def test_move_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_write_file_rest_bad_request(request_type=dataform.WriteFileRequest): @@ -34168,10 +34354,13 @@ def test_write_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_write_file" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_write_file_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_write_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.WriteFileRequest.pb(dataform.WriteFileRequest()) transcode.return_value = { "method": "post", @@ -34193,6 +34382,7 @@ def test_write_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.WriteFileResponse() + post_with_metadata.return_value = dataform.WriteFileResponse(), metadata client.write_file( request, @@ -34204,6 +34394,7 @@ def test_write_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_release_configs_rest_bad_request( @@ -34288,10 +34479,13 @@ def test_list_release_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_list_release_configs" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_list_release_configs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_list_release_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ListReleaseConfigsRequest.pb( dataform.ListReleaseConfigsRequest() ) @@ -34317,6 +34511,10 @@ def test_list_release_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ListReleaseConfigsResponse() + post_with_metadata.return_value = ( + dataform.ListReleaseConfigsResponse(), + metadata, + ) client.list_release_configs( request, @@ -34328,6 +34526,7 @@ def test_list_release_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_release_config_rest_bad_request( @@ -34422,10 +34621,13 @@ def test_get_release_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_get_release_config" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_get_release_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_get_release_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.GetReleaseConfigRequest.pb( dataform.GetReleaseConfigRequest() ) @@ -34449,6 +34651,7 @@ def test_get_release_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ReleaseConfig() + post_with_metadata.return_value = dataform.ReleaseConfig(), metadata client.get_release_config( request, @@ -34460,6 +34663,7 @@ def test_get_release_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_release_config_rest_bad_request( @@ -34650,10 +34854,13 @@ def test_create_release_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_create_release_config" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_create_release_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_create_release_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.CreateReleaseConfigRequest.pb( dataform.CreateReleaseConfigRequest() ) @@ -34677,6 +34884,7 @@ def test_create_release_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ReleaseConfig() + post_with_metadata.return_value = dataform.ReleaseConfig(), metadata client.create_release_config( request, @@ -34688,6 +34896,7 @@ def test_create_release_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_release_config_rest_bad_request( @@ -34886,10 +35095,13 @@ def test_update_release_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_update_release_config" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_update_release_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_update_release_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.UpdateReleaseConfigRequest.pb( dataform.UpdateReleaseConfigRequest() ) @@ -34913,6 +35125,7 @@ def test_update_release_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ReleaseConfig() + post_with_metadata.return_value = dataform.ReleaseConfig(), metadata client.update_release_config( request, @@ -34924,6 +35137,7 @@ def test_update_release_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_release_config_rest_bad_request( @@ -35119,10 +35333,14 @@ def test_list_compilation_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_list_compilation_results" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_list_compilation_results_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_list_compilation_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ListCompilationResultsRequest.pb( dataform.ListCompilationResultsRequest() ) @@ -35148,6 +35366,10 @@ def test_list_compilation_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ListCompilationResultsResponse() + post_with_metadata.return_value = ( + dataform.ListCompilationResultsResponse(), + metadata, + ) client.list_compilation_results( request, @@ -35159,6 +35381,7 @@ def test_list_compilation_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_compilation_result_rest_bad_request( @@ -35250,10 +35473,13 @@ def test_get_compilation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_get_compilation_result" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_get_compilation_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_get_compilation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.GetCompilationResultRequest.pb( dataform.GetCompilationResultRequest() ) @@ -35277,6 +35503,7 @@ def test_get_compilation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.CompilationResult() + post_with_metadata.return_value = dataform.CompilationResult(), metadata client.get_compilation_result( request, @@ -35288,6 +35515,7 @@ def test_get_compilation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_compilation_result_rest_bad_request( @@ -35474,10 +35702,14 @@ def test_create_compilation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_create_compilation_result" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_create_compilation_result_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_create_compilation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.CreateCompilationResultRequest.pb( dataform.CreateCompilationResultRequest() ) @@ -35501,6 +35733,7 @@ def test_create_compilation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.CompilationResult() + post_with_metadata.return_value = dataform.CompilationResult(), metadata client.create_compilation_result( request, @@ -35512,6 +35745,7 @@ def test_create_compilation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_compilation_result_actions_rest_bad_request( @@ -35598,10 +35832,14 @@ def test_query_compilation_result_actions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_query_compilation_result_actions" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_query_compilation_result_actions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_query_compilation_result_actions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.QueryCompilationResultActionsRequest.pb( dataform.QueryCompilationResultActionsRequest() ) @@ -35627,6 +35865,10 @@ def test_query_compilation_result_actions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.QueryCompilationResultActionsResponse() + post_with_metadata.return_value = ( + dataform.QueryCompilationResultActionsResponse(), + metadata, + ) client.query_compilation_result_actions( request, @@ -35638,6 +35880,7 @@ def test_query_compilation_result_actions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workflow_configs_rest_bad_request( @@ -35722,10 +35965,13 @@ def test_list_workflow_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_list_workflow_configs" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_list_workflow_configs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_list_workflow_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ListWorkflowConfigsRequest.pb( dataform.ListWorkflowConfigsRequest() ) @@ -35751,6 +35997,10 @@ def test_list_workflow_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ListWorkflowConfigsResponse() + post_with_metadata.return_value = ( + dataform.ListWorkflowConfigsResponse(), + metadata, + ) client.list_workflow_configs( request, @@ -35762,6 +36012,7 @@ def test_list_workflow_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_workflow_config_rest_bad_request( @@ -35854,10 +36105,13 @@ def test_get_workflow_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_get_workflow_config" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_get_workflow_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_get_workflow_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.GetWorkflowConfigRequest.pb( dataform.GetWorkflowConfigRequest() ) @@ -35881,6 +36135,7 @@ def test_get_workflow_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.WorkflowConfig() + post_with_metadata.return_value = dataform.WorkflowConfig(), metadata client.get_workflow_config( request, @@ -35892,6 +36147,7 @@ def test_get_workflow_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_workflow_config_rest_bad_request( @@ -36083,10 +36339,13 @@ def test_create_workflow_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_create_workflow_config" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_create_workflow_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_create_workflow_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.CreateWorkflowConfigRequest.pb( dataform.CreateWorkflowConfigRequest() ) @@ -36110,6 +36369,7 @@ def test_create_workflow_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.WorkflowConfig() + post_with_metadata.return_value = dataform.WorkflowConfig(), metadata client.create_workflow_config( request, @@ -36121,6 +36381,7 @@ def test_create_workflow_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_workflow_config_rest_bad_request( @@ -36320,10 +36581,13 @@ def test_update_workflow_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_update_workflow_config" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_update_workflow_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_update_workflow_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.UpdateWorkflowConfigRequest.pb( dataform.UpdateWorkflowConfigRequest() ) @@ -36347,6 +36611,7 @@ def test_update_workflow_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.WorkflowConfig() + post_with_metadata.return_value = dataform.WorkflowConfig(), metadata client.update_workflow_config( request, @@ -36358,6 +36623,7 @@ def test_update_workflow_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workflow_config_rest_bad_request( @@ -36553,10 +36819,14 @@ def test_list_workflow_invocations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_list_workflow_invocations" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_list_workflow_invocations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_list_workflow_invocations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.ListWorkflowInvocationsRequest.pb( dataform.ListWorkflowInvocationsRequest() ) @@ -36582,6 +36852,10 @@ def test_list_workflow_invocations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.ListWorkflowInvocationsResponse() + post_with_metadata.return_value = ( + dataform.ListWorkflowInvocationsResponse(), + metadata, + ) client.list_workflow_invocations( request, @@ -36593,6 +36867,7 @@ def test_list_workflow_invocations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_workflow_invocation_rest_bad_request( @@ -36682,10 +36957,13 @@ def test_get_workflow_invocation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_get_workflow_invocation" ) as post, mock.patch.object( + transports.DataformRestInterceptor, "post_get_workflow_invocation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_get_workflow_invocation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.GetWorkflowInvocationRequest.pb( dataform.GetWorkflowInvocationRequest() ) @@ -36711,6 +36989,7 @@ def test_get_workflow_invocation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.WorkflowInvocation() + post_with_metadata.return_value = dataform.WorkflowInvocation(), metadata client.get_workflow_invocation( request, @@ -36722,6 +37001,7 @@ def test_get_workflow_invocation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_workflow_invocation_rest_bad_request( @@ -36900,10 +37180,14 @@ def test_create_workflow_invocation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_create_workflow_invocation" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_create_workflow_invocation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_create_workflow_invocation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.CreateWorkflowInvocationRequest.pb( dataform.CreateWorkflowInvocationRequest() ) @@ -36929,6 +37213,7 @@ def test_create_workflow_invocation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.WorkflowInvocation() + post_with_metadata.return_value = dataform.WorkflowInvocation(), metadata client.create_workflow_invocation( request, @@ -36940,6 +37225,7 @@ def test_create_workflow_invocation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workflow_invocation_rest_bad_request( @@ -37248,10 +37534,14 @@ def test_query_workflow_invocation_actions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataformRestInterceptor, "post_query_workflow_invocation_actions" ) as post, mock.patch.object( + transports.DataformRestInterceptor, + "post_query_workflow_invocation_actions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataformRestInterceptor, "pre_query_workflow_invocation_actions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dataform.QueryWorkflowInvocationActionsRequest.pb( dataform.QueryWorkflowInvocationActionsRequest() ) @@ -37277,6 +37567,10 @@ def test_query_workflow_invocation_actions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataform.QueryWorkflowInvocationActionsResponse() + post_with_metadata.return_value = ( + dataform.QueryWorkflowInvocationActionsResponse(), + metadata, + ) client.query_workflow_invocation_actions( request, @@ -37288,6 +37582,7 @@ def test_query_workflow_invocation_actions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-datalabeling/CHANGELOG.md b/packages/google-cloud-datalabeling/CHANGELOG.md index ca0eb74077b4..4388312109a2 100644 --- a/packages/google-cloud-datalabeling/CHANGELOG.md +++ b/packages/google-cloud-datalabeling/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-datalabeling/#history +## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datalabeling-v1.12.0...google-cloud-datalabeling-v1.13.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datalabeling-v1.11.1...google-cloud-datalabeling-v1.12.0) (2024-12-12) diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py index 739fdfae141c..43155ded0db3 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.12.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py index c3f97d28a50d..beb7d005060c 100644 --- a/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py +++ b/packages/google-cloud-datalabeling/google/cloud/datalabeling_v1beta1/services/data_labeling_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -644,6 +646,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json index 45ccefdac839..6e03d7c178ee 100644 --- a/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json +++ b/packages/google-cloud-datalabeling/samples/generated_samples/snippet_metadata_google.cloud.datalabeling.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datalabeling", - "version": "1.12.0" + "version": "1.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py index 634bca8e777b..831daf4ea388 100644 --- a/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py +++ b/packages/google-cloud-datalabeling/tests/unit/gapic/datalabeling_v1beta1/test_data_labeling_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -84,6 +85,13 @@ from google.cloud.datalabeling_v1beta1.types import instruction from google.cloud.datalabeling_v1beta1.types import operations +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -357,6 +365,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataLabelingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataLabelingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/CHANGELOG.md b/packages/google-cloud-dataplex/CHANGELOG.md index cfb4dcd788c9..a6421a792bd4 100644 --- a/packages/google-cloud-dataplex/CHANGELOG.md +++ b/packages/google-cloud-dataplex/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## [2.7.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.6.0...google-cloud-dataplex-v2.7.0) (2025-02-12) + + +### Features + +* Added value `NONE` to the `SyncMode` enum ([71b9301](https://github.com/googleapis/google-cloud-python/commit/71b93012113bbaabf2ce524553342bdc52ba96dc)) + + +### Documentation + +* Modified various comments ([71b9301](https://github.com/googleapis/google-cloud-python/commit/71b93012113bbaabf2ce524553342bdc52ba96dc)) + +## [2.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.5.0...google-cloud-dataplex-v2.6.0) (2025-01-02) + + +### Features + +* A new field `force` is added to message `.google.cloud.dataplex.v1.DeleteDataScanRequest` ([0da9e0a](https://github.com/googleapis/google-cloud-python/commit/0da9e0a01ddb9fae0df361d7cb131f2141ce5135)) + + +### Documentation + +* miscellaneous doc updates ([0da9e0a](https://github.com/googleapis/google-cloud-python/commit/0da9e0a01ddb9fae0df361d7cb131f2141ce5135)) + ## [2.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataplex-v2.4.0...google-cloud-dataplex-v2.5.0) (2024-12-12) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py index 21f7deacf8df..ca5dce14d55e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.5.0" # {x-release-please-version} +__version__ = "2.7.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py index 21f7deacf8df..ca5dce14d55e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.5.0" # {x-release-please-version} +__version__ = "2.7.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index 7cb67922a160..2f44cb540f69 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -2976,8 +2976,10 @@ async def sample_search_entries(): on the ``request`` instance; if ``request`` is provided, this should not be set. query (:class:`str`): - Required. The query against which - entries in scope should be matched. + Required. The query against which entries in scope + should be matched. The query syntax is defined in + `Search syntax for Dataplex + Catalog `__. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -3090,8 +3092,8 @@ async def sample_create_metadata_job(): metadata_job = dataplex_v1.MetadataJob() metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" - metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" metadata_job.type_ = "IMPORT" request = dataplex_v1.CreateMetadataJobRequest( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 1bdb2a730504..437121f78589 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -594,6 +596,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3394,8 +3423,10 @@ def sample_search_entries(): on the ``request`` instance; if ``request`` is provided, this should not be set. query (str): - Required. The query against which - entries in scope should be matched. + Required. The query against which entries in scope + should be matched. The query syntax is defined in + `Search syntax for Dataplex + Catalog `__. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -3505,8 +3536,8 @@ def sample_create_metadata_job(): metadata_job = dataplex_v1.MetadataJob() metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" - metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" metadata_job.type_ = "IMPORT" request = dataplex_v1.CreateMetadataJobRequest( @@ -3997,16 +4028,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4052,16 +4087,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4218,16 +4257,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4273,16 +4316,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index 7ef0bc7fc77b..86e8f08b296a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1678,16 +1707,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1733,16 +1766,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1899,16 +1936,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1954,16 +1995,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index 8943f0147fa2..f3620d668635 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -417,13 +417,22 @@ async def sample_create_data_scan(): For example: - - Data Quality: generates queries based on the rules + - Data quality: generates queries based on the rules and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and + check results. For more information, see [Auto + data quality + overview](\ https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and generates insights about the structure, content and relationships (such as null percent, - cardinality, min/max/mean, etc). + cardinality, min/max/mean, etc). For more + information, see [About data + profiling](\ https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage + buckets to extract and then catalog metadata. For + more information, see [Discover and catalog Cloud + Storage + data](\ https://cloud.google.com/bigquery/docs/automatic-discovery). """ # Create or coerce a protobuf request object. @@ -542,7 +551,7 @@ async def sample_update_data_scan(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. + Optional. Mask of fields to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -563,13 +572,22 @@ async def sample_update_data_scan(): For example: - - Data Quality: generates queries based on the rules + - Data quality: generates queries based on the rules and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and + check results. For more information, see [Auto + data quality + overview](\ https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and generates insights about the structure, content and relationships (such as null percent, - cardinality, min/max/mean, etc). + cardinality, min/max/mean, etc). For more + information, see [About data + profiling](\ https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage + buckets to extract and then catalog metadata. For + more information, see [Discover and catalog Cloud + Storage + data](\ https://cloud.google.com/bigquery/docs/automatic-discovery). """ # Create or coerce a protobuf request object. @@ -827,13 +845,22 @@ async def sample_get_data_scan(): For example: - - Data Quality: generates queries based on the rules + - Data quality: generates queries based on the rules and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and + check results. For more information, see [Auto + data quality + overview](\ https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and generates insights about the structure, content and relationships (such as null percent, - cardinality, min/max/mean, etc). + cardinality, min/max/mean, etc). For more + information, see [About data + profiling](\ https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage + buckets to extract and then catalog metadata. For + more information, see [Discover and catalog Cloud + Storage + data](\ https://cloud.google.com/bigquery/docs/automatic-discovery). """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index ba2ab1861c50..8127951d58d2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -595,6 +597,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -890,13 +919,22 @@ def sample_create_data_scan(): For example: - - Data Quality: generates queries based on the rules + - Data quality: generates queries based on the rules and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and + check results. For more information, see [Auto + data quality + overview](\ https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and generates insights about the structure, content and relationships (such as null percent, - cardinality, min/max/mean, etc). + cardinality, min/max/mean, etc). For more + information, see [About data + profiling](\ https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage + buckets to extract and then catalog metadata. For + more information, see [Discover and catalog Cloud + Storage + data](\ https://cloud.google.com/bigquery/docs/automatic-discovery). """ # Create or coerce a protobuf request object. @@ -1012,7 +1050,7 @@ def sample_update_data_scan(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. + Optional. Mask of fields to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1033,13 +1071,22 @@ def sample_update_data_scan(): For example: - - Data Quality: generates queries based on the rules + - Data quality: generates queries based on the rules and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and + check results. For more information, see [Auto + data quality + overview](\ https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and generates insights about the structure, content and relationships (such as null percent, - cardinality, min/max/mean, etc). + cardinality, min/max/mean, etc). For more + information, see [About data + profiling](\ https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage + buckets to extract and then catalog metadata. For + more information, see [Discover and catalog Cloud + Storage + data](\ https://cloud.google.com/bigquery/docs/automatic-discovery). """ # Create or coerce a protobuf request object. @@ -1291,13 +1338,22 @@ def sample_get_data_scan(): For example: - - Data Quality: generates queries based on the rules + - Data quality: generates queries based on the rules and runs against the data to get data quality - check results. - - Data Profile: analyzes the data in table(s) and + check results. For more information, see [Auto + data quality + overview](\ https://cloud.google.com/dataplex/docs/auto-data-quality-overview). + - Data profile: analyzes the data in tables and generates insights about the structure, content and relationships (such as null percent, - cardinality, min/max/mean, etc). + cardinality, min/max/mean, etc). For more + information, see [About data + profiling](\ https://cloud.google.com/dataplex/docs/data-profiling-overview). + - Data discovery: scans data in Cloud Storage + buckets to extract and then catalog metadata. For + more information, see [Discover and catalog Cloud + Storage + data](\ https://cloud.google.com/bigquery/docs/automatic-discovery). """ # Create or coerce a protobuf request object. @@ -1975,16 +2031,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2030,16 +2090,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2196,16 +2260,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2251,16 +2319,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 3664b81c821c..4b2c4ec818d2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -543,6 +545,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2727,16 +2756,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2782,16 +2815,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2948,16 +2985,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3003,16 +3044,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index 9a98c883cab3..da7200c33afd 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -676,6 +678,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5018,16 +5047,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5073,16 +5106,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5239,16 +5276,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5294,16 +5335,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index 85c9d2b0132f..76e9c9e1d93c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -548,6 +550,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1781,16 +1810,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1836,16 +1869,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2002,16 +2039,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2057,16 +2098,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index 5cfc8ed82adc..67e0db2d7ae9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -801,8 +801,9 @@ class Entry(proto.Message): - If the aspect is attached to an entry's path: ``{project_id_or_number}.{location_id}.{aspect_type_id}@{path}`` parent_entry (str): - Optional. Immutable. The resource name of the - parent entry. + Optional. Immutable. The resource name of the parent entry, + in the format + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}``. fully_qualified_name (str): Optional. A name for the entry that can be referenced by an external system. For more information, see `Fully qualified @@ -1625,7 +1626,7 @@ class UpdateEntryRequest(proto.Message): an aspect to a field that is specified by the ``schema`` aspect, the path should have the format ``Schema.``. - - ``*`` - matches aspects of the + - ``@*`` - matches aspects of the given type for all paths. - ``*@path`` - matches aspects of all types on the given path. @@ -1860,8 +1861,10 @@ class SearchEntriesRequest(proto.Message): attributed in the following form: ``projects/{project}/locations/{location}``. query (str): - Required. The query against which entries in - scope should be matched. + Required. The query against which entries in scope should be + matched. The query syntax is defined in `Search syntax for + Dataplex + Catalog `__. page_size (int): Optional. Number of results in the search page. If <=0, then defaults to 10. Max limit for page_size is 1000. Throws an @@ -1871,7 +1874,12 @@ class SearchEntriesRequest(proto.Message): ``SearchEntries`` call. Provide this to retrieve the subsequent page. order_by (str): - Optional. Specifies the ordering of results. + Optional. Specifies the ordering of results. Supported + values are: + + - ``relevance`` (default) + - ``last_modified_timestamp`` + - ``last_modified_timestamp asc`` scope (str): Optional. The scope under which the search should be operating. It must either be ``organizations/`` or @@ -2035,12 +2043,12 @@ class ImportItem(proto.Message): the entry. - ``{aspect_type_reference}@{path}``: matches aspects that belong to the specified aspect type and path. - - ``{aspect_type_reference}@*``: matches aspects that - belong to the specified aspect type for all paths. - - Replace ``{aspect_type_reference}`` with a reference to the - aspect type, in the format - ``{project_id_or_number}.{location_id}.{aspect_type_id}``. + - ``@*`` : matches aspects of the + given type for all paths. + - ``*@path`` : matches aspects of all types on the given + path. Replace ``{aspect_type_reference}`` with a + reference to the aspect type, in the format + ``{project_id_or_number}.{location_id}.{aspect_type_id}``. If you leave this field empty, it is treated as specifying exactly those aspects that are present within the specified @@ -2401,10 +2409,15 @@ class SyncMode(proto.Enum): are modified. Use this mode to modify a subset of resources while leaving unreferenced resources unchanged. + NONE (3): + If entry sync mode is NONE, then the entry-specific fields + (apart from aspects) are not modified and the aspects are + modified according to the aspect_sync_mode """ SYNC_MODE_UNSPECIFIED = 0 FULL = 1 INCREMENTAL = 2 + NONE = 3 class LogLevel(proto.Enum): r"""The level of logs to write to Cloud Logging for this job. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index e46f60dcc3c8..f7c56473c6ed 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -365,11 +365,10 @@ class DataQualityRuleResult(proto.Message): automatically failing rule evaluation, or - exclude ``null`` rows from the ``evaluated_count``, by setting ``ignore_nulls = true``. + + This field is not set for rule SqlAssertion. passed_count (int): - The number of rows which passed a rule - evaluation. - This field is only valid for row-level type - rules. + This field is not set for rule SqlAssertion. null_count (int): The number of rows with null values in the specified column. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index eb0eea7e0688..eeddb63c7002 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -123,7 +123,7 @@ class UpdateDataScanRequest(proto.Message): Only fields specified in ``update_mask`` are updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. + Optional. Mask of fields to update. validate_only (bool): Optional. Only validate the request, but do not perform mutations. The default is ``false``. @@ -154,12 +154,21 @@ class DeleteDataScanRequest(proto.Message): ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or *project_number* and ``location_id`` refers to a GCP region. + force (bool): + Optional. If set to true, any child resources + of this data scan will also be deleted. + (Otherwise, the request will only work if the + data scan has no child resources.) """ name: str = proto.Field( proto.STRING, number=1, ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) class GetDataScanRequest(proto.Message): @@ -491,11 +500,19 @@ class DataScan(proto.Message): For example: - - Data Quality: generates queries based on the rules and runs - against the data to get data quality check results. - - Data Profile: analyzes the data in table(s) and generates - insights about the structure, content and relationships (such as - null percent, cardinality, min/max/mean, etc). + - Data quality: generates queries based on the rules and runs + against the data to get data quality check results. For more + information, see `Auto data quality + overview `__. + - Data profile: analyzes the data in tables and generates insights + about the structure, content and relationships (such as null + percent, cardinality, min/max/mean, etc). For more information, + see `About data + profiling `__. + - Data discovery: scans data in Cloud Storage buckets to extract + and then catalog metadata. For more information, see `Discover + and catalog Cloud Storage + data `__. This message has `oneof`_ fields (mutually exclusive fields). For each oneof, at most one member field can be set at the same time. @@ -506,8 +523,8 @@ class DataScan(proto.Message): Attributes: name (str): - Output only. The relative resource name of the scan, of the - form: + Output only. Identifier. The relative resource name of the + scan, of the form: ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, where ``project`` refers to a *project_id* or *project_number* and ``location_id`` refers to a GCP region. @@ -615,9 +632,11 @@ class ExecutionStatus(proto.Message): Attributes: latest_job_start_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the latest DataScanJob started. + Optional. The time when the latest + DataScanJob started. latest_job_end_time (google.protobuf.timestamp_pb2.Timestamp): - The time when the latest DataScanJob ended. + Optional. The time when the latest + DataScanJob ended. latest_job_create_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The time when the DataScanJob execution was created. @@ -745,8 +764,8 @@ class DataScanJob(proto.Message): Attributes: name (str): - Output only. The relative resource name of the DataScanJob, - of the form: + Output only. Identifier. The relative resource name of the + DataScanJob, of the form: ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, where ``project`` refers to a *project_id* or *project_number* and ``location_id`` refers to a GCP region. diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py index 1ecd3586aee7..5ca0b9a4160b 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_async.py @@ -42,8 +42,8 @@ async def sample_create_metadata_job(): metadata_job = dataplex_v1.MetadataJob() metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" - metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" metadata_job.type_ = "IMPORT" request = dataplex_v1.CreateMetadataJobRequest( diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py index 022008b13e72..f27ec491589f 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_metadata_job_sync.py @@ -42,8 +42,8 @@ def sample_create_metadata_job(): metadata_job = dataplex_v1.MetadataJob() metadata_job.import_spec.scope.entry_groups = ['entry_groups_value1', 'entry_groups_value2'] metadata_job.import_spec.scope.entry_types = ['entry_types_value1', 'entry_types_value2'] - metadata_job.import_spec.entry_sync_mode = "INCREMENTAL" - metadata_job.import_spec.aspect_sync_mode = "INCREMENTAL" + metadata_job.import_spec.entry_sync_mode = "NONE" + metadata_job.import_spec.aspect_sync_mode = "NONE" metadata_job.type_ = "IMPORT" request = dataplex_v1.CreateMetadataJobRequest( diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index 9c63c00d5cdd..8feb828ded6f 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataplex", - "version": "2.5.0" + "version": "2.7.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py index 6681941351bf..e1cada26f833 100644 --- a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py +++ b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py @@ -63,7 +63,7 @@ class dataplexCallTransformer(cst.CSTTransformer): 'delete_content': ('name', ), 'delete_data_attribute': ('name', 'etag', ), 'delete_data_attribute_binding': ('name', 'etag', ), - 'delete_data_scan': ('name', ), + 'delete_data_scan': ('name', 'force', ), 'delete_data_taxonomy': ('name', 'etag', ), 'delete_entity': ('name', 'etag', ), 'delete_entry': ('name', ), diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index 72f4328e35a7..06524c0e93b0 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -73,6 +74,13 @@ ) from google.cloud.dataplex_v1.types import catalog, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CatalogServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CatalogServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py index 3f618ec57c6c..a13106cb2bd9 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -65,6 +66,13 @@ from google.cloud.dataplex_v1.types import content from google.cloud.dataplex_v1.types import content as gcd_content +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -323,6 +331,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ContentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ContentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index 2dc3372cf740..7234e9286934 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -80,6 +81,13 @@ service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +346,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataScanServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataScanServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py index cbe00d789c16..0463b4a56c28 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_taxonomy_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -74,6 +75,13 @@ from google.cloud.dataplex_v1.types import data_taxonomy as gcd_data_taxonomy from google.cloud.dataplex_v1.types import security, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataTaxonomyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataTaxonomyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index 4f51ecd23937..c93b2be86916 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -73,6 +74,13 @@ ) from google.cloud.dataplex_v1.types import analyze, resources, service, tasks +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +339,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataplexServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataplexServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index 7c4bfd0cb98b..1e098720c7bc 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -61,6 +62,13 @@ ) from google.cloud.dataplex_v1.types import metadata_ +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetadataServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetadataServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-dataproc-metastore/CHANGELOG.md b/packages/google-cloud-dataproc-metastore/CHANGELOG.md index 9e07f8396aa4..f8e11020dd54 100644 --- a/packages/google-cloud-dataproc-metastore/CHANGELOG.md +++ b/packages/google-cloud-dataproc-metastore/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-metastore-v1.17.0...google-cloud-dataproc-metastore-v1.18.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [1.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-metastore-v1.16.1...google-cloud-dataproc-metastore-v1.17.0) (2024-12-12) diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py index 007d6040cbe0..8099b154e9b6 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py index 007d6040cbe0..8099b154e9b6 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py index 713831b7056e..7a3a530fbb5e 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -605,6 +607,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3086,16 +3115,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3141,16 +3174,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3373,16 +3410,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -3495,16 +3536,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -3555,16 +3600,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -3610,16 +3659,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3665,16 +3718,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py index e53dcbf8d772..b93059b4accd 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore/transports/rest.py @@ -242,12 +242,35 @@ def post_alter_metadata_resource_location( ) -> operations_pb2.Operation: """Post-rpc interceptor for alter_metadata_resource_location - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_alter_metadata_resource_location_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_alter_metadata_resource_location` interceptor runs + before the `post_alter_metadata_resource_location_with_metadata` interceptor. """ return response + def post_alter_metadata_resource_location_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for alter_metadata_resource_location + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_alter_metadata_resource_location_with_metadata` + interceptor in new development instead of the `post_alter_metadata_resource_location` interceptor. + When both interceptors are used, this `post_alter_metadata_resource_location_with_metadata` interceptor runs after the + `post_alter_metadata_resource_location` interceptor. The (possibly modified) response returned by + `post_alter_metadata_resource_location` will be passed to + `post_alter_metadata_resource_location_with_metadata`. + """ + return response, metadata + def pre_create_backup( self, request: metastore.CreateBackupRequest, @@ -265,12 +288,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_metadata_import( self, request: metastore.CreateMetadataImportRequest, @@ -290,12 +336,35 @@ def post_create_metadata_import( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_metadata_import` interceptor runs + before the `post_create_metadata_import_with_metadata` interceptor. """ return response + def post_create_metadata_import_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_metadata_import_with_metadata` + interceptor in new development instead of the `post_create_metadata_import` interceptor. + When both interceptors are used, this `post_create_metadata_import_with_metadata` interceptor runs after the + `post_create_metadata_import` interceptor. The (possibly modified) response returned by + `post_create_metadata_import` will be passed to + `post_create_metadata_import_with_metadata`. + """ + return response, metadata + def pre_create_service( self, request: metastore.CreateServiceRequest, @@ -313,12 +382,35 @@ def post_create_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_service` interceptor runs + before the `post_create_service_with_metadata` interceptor. """ return response + def post_create_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_service_with_metadata` + interceptor in new development instead of the `post_create_service` interceptor. + When both interceptors are used, this `post_create_service_with_metadata` interceptor runs after the + `post_create_service` interceptor. The (possibly modified) response returned by + `post_create_service` will be passed to + `post_create_service_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: metastore.DeleteBackupRequest, @@ -336,12 +428,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_service( self, request: metastore.DeleteServiceRequest, @@ -359,12 +474,35 @@ def post_delete_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_delete_service` interceptor runs + before the `post_delete_service_with_metadata` interceptor. """ return response + def post_delete_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_delete_service_with_metadata` + interceptor in new development instead of the `post_delete_service` interceptor. + When both interceptors are used, this `post_delete_service_with_metadata` interceptor runs after the + `post_delete_service` interceptor. The (possibly modified) response returned by + `post_delete_service` will be passed to + `post_delete_service_with_metadata`. + """ + return response, metadata + def pre_export_metadata( self, request: metastore.ExportMetadataRequest, @@ -384,12 +522,35 @@ def post_export_metadata( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_export_metadata` interceptor runs + before the `post_export_metadata_with_metadata` interceptor. """ return response + def post_export_metadata_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_export_metadata_with_metadata` + interceptor in new development instead of the `post_export_metadata` interceptor. + When both interceptors are used, this `post_export_metadata_with_metadata` interceptor runs after the + `post_export_metadata` interceptor. The (possibly modified) response returned by + `post_export_metadata` will be passed to + `post_export_metadata_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: metastore.GetBackupRequest, @@ -405,12 +566,35 @@ def pre_get_backup( def post_get_backup(self, response: metastore.Backup) -> metastore.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: metastore.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_metadata_import( self, request: metastore.GetMetadataImportRequest, @@ -430,12 +614,35 @@ def post_get_metadata_import( ) -> metastore.MetadataImport: """Post-rpc interceptor for get_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_metadata_import` interceptor runs + before the `post_get_metadata_import_with_metadata` interceptor. """ return response + def post_get_metadata_import_with_metadata( + self, + response: metastore.MetadataImport, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.MetadataImport, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_metadata_import_with_metadata` + interceptor in new development instead of the `post_get_metadata_import` interceptor. + When both interceptors are used, this `post_get_metadata_import_with_metadata` interceptor runs after the + `post_get_metadata_import` interceptor. The (possibly modified) response returned by + `post_get_metadata_import` will be passed to + `post_get_metadata_import_with_metadata`. + """ + return response, metadata + def pre_get_service( self, request: metastore.GetServiceRequest, @@ -451,12 +658,35 @@ def pre_get_service( def post_get_service(self, response: metastore.Service) -> metastore.Service: """Post-rpc interceptor for get_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_service` interceptor runs + before the `post_get_service_with_metadata` interceptor. """ return response + def post_get_service_with_metadata( + self, + response: metastore.Service, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Service, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_service_with_metadata` + interceptor in new development instead of the `post_get_service` interceptor. + When both interceptors are used, this `post_get_service_with_metadata` interceptor runs after the + `post_get_service` interceptor. The (possibly modified) response returned by + `post_get_service` will be passed to + `post_get_service_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: metastore.ListBackupsRequest, @@ -474,12 +704,35 @@ def post_list_backups( ) -> metastore.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: metastore.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_metadata_imports( self, request: metastore.ListMetadataImportsRequest, @@ -499,12 +752,37 @@ def post_list_metadata_imports( ) -> metastore.ListMetadataImportsResponse: """Post-rpc interceptor for list_metadata_imports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_metadata_imports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_metadata_imports` interceptor runs + before the `post_list_metadata_imports_with_metadata` interceptor. """ return response + def post_list_metadata_imports_with_metadata( + self, + response: metastore.ListMetadataImportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.ListMetadataImportsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_metadata_imports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_metadata_imports_with_metadata` + interceptor in new development instead of the `post_list_metadata_imports` interceptor. + When both interceptors are used, this `post_list_metadata_imports_with_metadata` interceptor runs after the + `post_list_metadata_imports` interceptor. The (possibly modified) response returned by + `post_list_metadata_imports` will be passed to + `post_list_metadata_imports_with_metadata`. + """ + return response, metadata + def pre_list_services( self, request: metastore.ListServicesRequest, @@ -522,12 +800,35 @@ def post_list_services( ) -> metastore.ListServicesResponse: """Post-rpc interceptor for list_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_services` interceptor runs + before the `post_list_services_with_metadata` interceptor. """ return response + def post_list_services_with_metadata( + self, + response: metastore.ListServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListServicesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_services_with_metadata` + interceptor in new development instead of the `post_list_services` interceptor. + When both interceptors are used, this `post_list_services_with_metadata` interceptor runs after the + `post_list_services` interceptor. The (possibly modified) response returned by + `post_list_services` will be passed to + `post_list_services_with_metadata`. + """ + return response, metadata + def pre_move_table_to_database( self, request: metastore.MoveTableToDatabaseRequest, @@ -547,12 +848,35 @@ def post_move_table_to_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for move_table_to_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_table_to_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_move_table_to_database` interceptor runs + before the `post_move_table_to_database_with_metadata` interceptor. """ return response + def post_move_table_to_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_table_to_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_move_table_to_database_with_metadata` + interceptor in new development instead of the `post_move_table_to_database` interceptor. + When both interceptors are used, this `post_move_table_to_database_with_metadata` interceptor runs after the + `post_move_table_to_database` interceptor. The (possibly modified) response returned by + `post_move_table_to_database` will be passed to + `post_move_table_to_database_with_metadata`. + """ + return response, metadata + def pre_query_metadata( self, request: metastore.QueryMetadataRequest, @@ -570,12 +894,35 @@ def post_query_metadata( ) -> operations_pb2.Operation: """Post-rpc interceptor for query_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_query_metadata` interceptor runs + before the `post_query_metadata_with_metadata` interceptor. """ return response + def post_query_metadata_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for query_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_query_metadata_with_metadata` + interceptor in new development instead of the `post_query_metadata` interceptor. + When both interceptors are used, this `post_query_metadata_with_metadata` interceptor runs after the + `post_query_metadata` interceptor. The (possibly modified) response returned by + `post_query_metadata` will be passed to + `post_query_metadata_with_metadata`. + """ + return response, metadata + def pre_restore_service( self, request: metastore.RestoreServiceRequest, @@ -595,12 +942,35 @@ def post_restore_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_restore_service` interceptor runs + before the `post_restore_service_with_metadata` interceptor. """ return response + def post_restore_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_restore_service_with_metadata` + interceptor in new development instead of the `post_restore_service` interceptor. + When both interceptors are used, this `post_restore_service_with_metadata` interceptor runs after the + `post_restore_service` interceptor. The (possibly modified) response returned by + `post_restore_service` will be passed to + `post_restore_service_with_metadata`. + """ + return response, metadata + def pre_update_metadata_import( self, request: metastore.UpdateMetadataImportRequest, @@ -620,12 +990,35 @@ def post_update_metadata_import( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_update_metadata_import` interceptor runs + before the `post_update_metadata_import_with_metadata` interceptor. """ return response + def post_update_metadata_import_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_update_metadata_import_with_metadata` + interceptor in new development instead of the `post_update_metadata_import` interceptor. + When both interceptors are used, this `post_update_metadata_import_with_metadata` interceptor runs after the + `post_update_metadata_import` interceptor. The (possibly modified) response returned by + `post_update_metadata_import` will be passed to + `post_update_metadata_import_with_metadata`. + """ + return response, metadata + def pre_update_service( self, request: metastore.UpdateServiceRequest, @@ -643,12 +1036,35 @@ def post_update_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_update_service` interceptor runs + before the `post_update_service_with_metadata` interceptor. """ return response + def post_update_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_update_service_with_metadata` + interceptor in new development instead of the `post_update_service` interceptor. + When both interceptors are used, this `post_update_service_with_metadata` interceptor runs after the + `post_update_service` interceptor. The (possibly modified) response returned by + `post_update_service` will be passed to + `post_update_service_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1158,6 +1574,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_alter_metadata_resource_location(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_alter_metadata_resource_location_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1306,6 +1729,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1459,6 +1886,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1608,6 +2039,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1750,6 +2185,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1893,6 +2332,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2042,6 +2485,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2183,6 +2630,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2329,6 +2780,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2472,6 +2927,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2615,6 +3074,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2763,6 +3226,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_metadata_imports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_metadata_imports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2908,6 +3375,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3061,6 +3532,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_table_to_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_table_to_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3210,6 +3685,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3358,6 +3837,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3511,6 +3994,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3660,6 +4147,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py index 43f8b93d47f8..6cc6470fe8b5 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1434,16 +1463,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1489,16 +1522,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1721,16 +1758,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1843,16 +1884,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1903,16 +1948,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1958,16 +2007,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2013,16 +2066,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py index ed961bcdfe5a..2b80a3dd2ae2 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1/services/dataproc_metastore_federation/transports/rest.py @@ -138,12 +138,35 @@ def post_create_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_create_federation` interceptor runs + before the `post_create_federation_with_metadata` interceptor. """ return response + def post_create_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_create_federation_with_metadata` + interceptor in new development instead of the `post_create_federation` interceptor. + When both interceptors are used, this `post_create_federation_with_metadata` interceptor runs after the + `post_create_federation` interceptor. The (possibly modified) response returned by + `post_create_federation` will be passed to + `post_create_federation_with_metadata`. + """ + return response, metadata + def pre_delete_federation( self, request: metastore_federation.DeleteFederationRequest, @@ -164,12 +187,35 @@ def post_delete_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_delete_federation` interceptor runs + before the `post_delete_federation_with_metadata` interceptor. """ return response + def post_delete_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_delete_federation_with_metadata` + interceptor in new development instead of the `post_delete_federation` interceptor. + When both interceptors are used, this `post_delete_federation_with_metadata` interceptor runs after the + `post_delete_federation` interceptor. The (possibly modified) response returned by + `post_delete_federation` will be passed to + `post_delete_federation_with_metadata`. + """ + return response, metadata + def pre_get_federation( self, request: metastore_federation.GetFederationRequest, @@ -190,12 +236,37 @@ def post_get_federation( ) -> metastore_federation.Federation: """Post-rpc interceptor for get_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_get_federation` interceptor runs + before the `post_get_federation_with_metadata` interceptor. """ return response + def post_get_federation_with_metadata( + self, + response: metastore_federation.Federation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore_federation.Federation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_get_federation_with_metadata` + interceptor in new development instead of the `post_get_federation` interceptor. + When both interceptors are used, this `post_get_federation_with_metadata` interceptor runs after the + `post_get_federation` interceptor. The (possibly modified) response returned by + `post_get_federation` will be passed to + `post_get_federation_with_metadata`. + """ + return response, metadata + def pre_list_federations( self, request: metastore_federation.ListFederationsRequest, @@ -216,12 +287,38 @@ def post_list_federations( ) -> metastore_federation.ListFederationsResponse: """Post-rpc interceptor for list_federations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_federations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_list_federations` interceptor runs + before the `post_list_federations_with_metadata` interceptor. """ return response + def post_list_federations_with_metadata( + self, + response: metastore_federation.ListFederationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore_federation.ListFederationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_federations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_list_federations_with_metadata` + interceptor in new development instead of the `post_list_federations` interceptor. + When both interceptors are used, this `post_list_federations_with_metadata` interceptor runs after the + `post_list_federations` interceptor. The (possibly modified) response returned by + `post_list_federations` will be passed to + `post_list_federations_with_metadata`. + """ + return response, metadata + def pre_update_federation( self, request: metastore_federation.UpdateFederationRequest, @@ -242,12 +339,35 @@ def post_update_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_update_federation` interceptor runs + before the `post_update_federation_with_metadata` interceptor. """ return response + def post_update_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_update_federation_with_metadata` + interceptor in new development instead of the `post_update_federation` interceptor. + When both interceptors are used, this `post_update_federation_with_metadata` interceptor runs after the + `post_update_federation` interceptor. The (possibly modified) response returned by + `post_update_federation` will be passed to + `post_update_federation_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -750,6 +870,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -894,6 +1018,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1039,6 +1167,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1184,6 +1316,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_federations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_federations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1336,6 +1472,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py index 007d6040cbe0..8099b154e9b6 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py index 5dbf81c8c3d6..5aa8975736f1 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -627,6 +629,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3192,16 +3221,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3247,16 +3280,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3479,16 +3516,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -3601,16 +3642,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -3661,16 +3706,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -3716,16 +3765,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3771,16 +3824,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py index 8d87107ccdbf..2551e1e924d1 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore/transports/rest.py @@ -250,12 +250,35 @@ def post_alter_metadata_resource_location( ) -> operations_pb2.Operation: """Post-rpc interceptor for alter_metadata_resource_location - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_alter_metadata_resource_location_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_alter_metadata_resource_location` interceptor runs + before the `post_alter_metadata_resource_location_with_metadata` interceptor. """ return response + def post_alter_metadata_resource_location_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for alter_metadata_resource_location + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_alter_metadata_resource_location_with_metadata` + interceptor in new development instead of the `post_alter_metadata_resource_location` interceptor. + When both interceptors are used, this `post_alter_metadata_resource_location_with_metadata` interceptor runs after the + `post_alter_metadata_resource_location` interceptor. The (possibly modified) response returned by + `post_alter_metadata_resource_location` will be passed to + `post_alter_metadata_resource_location_with_metadata`. + """ + return response, metadata + def pre_create_backup( self, request: metastore.CreateBackupRequest, @@ -273,12 +296,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_metadata_import( self, request: metastore.CreateMetadataImportRequest, @@ -298,12 +344,35 @@ def post_create_metadata_import( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_metadata_import` interceptor runs + before the `post_create_metadata_import_with_metadata` interceptor. """ return response + def post_create_metadata_import_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_metadata_import_with_metadata` + interceptor in new development instead of the `post_create_metadata_import` interceptor. + When both interceptors are used, this `post_create_metadata_import_with_metadata` interceptor runs after the + `post_create_metadata_import` interceptor. The (possibly modified) response returned by + `post_create_metadata_import` will be passed to + `post_create_metadata_import_with_metadata`. + """ + return response, metadata + def pre_create_service( self, request: metastore.CreateServiceRequest, @@ -321,12 +390,35 @@ def post_create_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_service` interceptor runs + before the `post_create_service_with_metadata` interceptor. """ return response + def post_create_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_service_with_metadata` + interceptor in new development instead of the `post_create_service` interceptor. + When both interceptors are used, this `post_create_service_with_metadata` interceptor runs after the + `post_create_service` interceptor. The (possibly modified) response returned by + `post_create_service` will be passed to + `post_create_service_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: metastore.DeleteBackupRequest, @@ -344,12 +436,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_service( self, request: metastore.DeleteServiceRequest, @@ -367,12 +482,35 @@ def post_delete_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_delete_service` interceptor runs + before the `post_delete_service_with_metadata` interceptor. """ return response + def post_delete_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_delete_service_with_metadata` + interceptor in new development instead of the `post_delete_service` interceptor. + When both interceptors are used, this `post_delete_service_with_metadata` interceptor runs after the + `post_delete_service` interceptor. The (possibly modified) response returned by + `post_delete_service` will be passed to + `post_delete_service_with_metadata`. + """ + return response, metadata + def pre_export_metadata( self, request: metastore.ExportMetadataRequest, @@ -392,12 +530,35 @@ def post_export_metadata( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_export_metadata` interceptor runs + before the `post_export_metadata_with_metadata` interceptor. """ return response + def post_export_metadata_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_export_metadata_with_metadata` + interceptor in new development instead of the `post_export_metadata` interceptor. + When both interceptors are used, this `post_export_metadata_with_metadata` interceptor runs after the + `post_export_metadata` interceptor. The (possibly modified) response returned by + `post_export_metadata` will be passed to + `post_export_metadata_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: metastore.GetBackupRequest, @@ -413,12 +574,35 @@ def pre_get_backup( def post_get_backup(self, response: metastore.Backup) -> metastore.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: metastore.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_metadata_import( self, request: metastore.GetMetadataImportRequest, @@ -438,12 +622,35 @@ def post_get_metadata_import( ) -> metastore.MetadataImport: """Post-rpc interceptor for get_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_metadata_import` interceptor runs + before the `post_get_metadata_import_with_metadata` interceptor. """ return response + def post_get_metadata_import_with_metadata( + self, + response: metastore.MetadataImport, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.MetadataImport, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_metadata_import_with_metadata` + interceptor in new development instead of the `post_get_metadata_import` interceptor. + When both interceptors are used, this `post_get_metadata_import_with_metadata` interceptor runs after the + `post_get_metadata_import` interceptor. The (possibly modified) response returned by + `post_get_metadata_import` will be passed to + `post_get_metadata_import_with_metadata`. + """ + return response, metadata + def pre_get_service( self, request: metastore.GetServiceRequest, @@ -459,12 +666,35 @@ def pre_get_service( def post_get_service(self, response: metastore.Service) -> metastore.Service: """Post-rpc interceptor for get_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_service` interceptor runs + before the `post_get_service_with_metadata` interceptor. """ return response + def post_get_service_with_metadata( + self, + response: metastore.Service, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Service, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_service_with_metadata` + interceptor in new development instead of the `post_get_service` interceptor. + When both interceptors are used, this `post_get_service_with_metadata` interceptor runs after the + `post_get_service` interceptor. The (possibly modified) response returned by + `post_get_service` will be passed to + `post_get_service_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: metastore.ListBackupsRequest, @@ -482,12 +712,35 @@ def post_list_backups( ) -> metastore.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: metastore.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_metadata_imports( self, request: metastore.ListMetadataImportsRequest, @@ -507,12 +760,37 @@ def post_list_metadata_imports( ) -> metastore.ListMetadataImportsResponse: """Post-rpc interceptor for list_metadata_imports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_metadata_imports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_metadata_imports` interceptor runs + before the `post_list_metadata_imports_with_metadata` interceptor. """ return response + def post_list_metadata_imports_with_metadata( + self, + response: metastore.ListMetadataImportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.ListMetadataImportsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_metadata_imports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_metadata_imports_with_metadata` + interceptor in new development instead of the `post_list_metadata_imports` interceptor. + When both interceptors are used, this `post_list_metadata_imports_with_metadata` interceptor runs after the + `post_list_metadata_imports` interceptor. The (possibly modified) response returned by + `post_list_metadata_imports` will be passed to + `post_list_metadata_imports_with_metadata`. + """ + return response, metadata + def pre_list_services( self, request: metastore.ListServicesRequest, @@ -530,12 +808,35 @@ def post_list_services( ) -> metastore.ListServicesResponse: """Post-rpc interceptor for list_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_services` interceptor runs + before the `post_list_services_with_metadata` interceptor. """ return response + def post_list_services_with_metadata( + self, + response: metastore.ListServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListServicesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_services_with_metadata` + interceptor in new development instead of the `post_list_services` interceptor. + When both interceptors are used, this `post_list_services_with_metadata` interceptor runs after the + `post_list_services` interceptor. The (possibly modified) response returned by + `post_list_services` will be passed to + `post_list_services_with_metadata`. + """ + return response, metadata + def pre_move_table_to_database( self, request: metastore.MoveTableToDatabaseRequest, @@ -555,12 +856,35 @@ def post_move_table_to_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for move_table_to_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_table_to_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_move_table_to_database` interceptor runs + before the `post_move_table_to_database_with_metadata` interceptor. """ return response + def post_move_table_to_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_table_to_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_move_table_to_database_with_metadata` + interceptor in new development instead of the `post_move_table_to_database` interceptor. + When both interceptors are used, this `post_move_table_to_database_with_metadata` interceptor runs after the + `post_move_table_to_database` interceptor. The (possibly modified) response returned by + `post_move_table_to_database` will be passed to + `post_move_table_to_database_with_metadata`. + """ + return response, metadata + def pre_query_metadata( self, request: metastore.QueryMetadataRequest, @@ -578,12 +902,35 @@ def post_query_metadata( ) -> operations_pb2.Operation: """Post-rpc interceptor for query_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_query_metadata` interceptor runs + before the `post_query_metadata_with_metadata` interceptor. """ return response + def post_query_metadata_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for query_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_query_metadata_with_metadata` + interceptor in new development instead of the `post_query_metadata` interceptor. + When both interceptors are used, this `post_query_metadata_with_metadata` interceptor runs after the + `post_query_metadata` interceptor. The (possibly modified) response returned by + `post_query_metadata` will be passed to + `post_query_metadata_with_metadata`. + """ + return response, metadata + def pre_remove_iam_policy( self, request: metastore.RemoveIamPolicyRequest, @@ -603,12 +950,37 @@ def post_remove_iam_policy( ) -> metastore.RemoveIamPolicyResponse: """Post-rpc interceptor for remove_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_remove_iam_policy` interceptor runs + before the `post_remove_iam_policy_with_metadata` interceptor. """ return response + def post_remove_iam_policy_with_metadata( + self, + response: metastore.RemoveIamPolicyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.RemoveIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for remove_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_remove_iam_policy_with_metadata` + interceptor in new development instead of the `post_remove_iam_policy` interceptor. + When both interceptors are used, this `post_remove_iam_policy_with_metadata` interceptor runs after the + `post_remove_iam_policy` interceptor. The (possibly modified) response returned by + `post_remove_iam_policy` will be passed to + `post_remove_iam_policy_with_metadata`. + """ + return response, metadata + def pre_restore_service( self, request: metastore.RestoreServiceRequest, @@ -628,12 +1000,35 @@ def post_restore_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_restore_service` interceptor runs + before the `post_restore_service_with_metadata` interceptor. """ return response + def post_restore_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_restore_service_with_metadata` + interceptor in new development instead of the `post_restore_service` interceptor. + When both interceptors are used, this `post_restore_service_with_metadata` interceptor runs after the + `post_restore_service` interceptor. The (possibly modified) response returned by + `post_restore_service` will be passed to + `post_restore_service_with_metadata`. + """ + return response, metadata + def pre_update_metadata_import( self, request: metastore.UpdateMetadataImportRequest, @@ -653,12 +1048,35 @@ def post_update_metadata_import( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_update_metadata_import` interceptor runs + before the `post_update_metadata_import_with_metadata` interceptor. """ return response + def post_update_metadata_import_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_update_metadata_import_with_metadata` + interceptor in new development instead of the `post_update_metadata_import` interceptor. + When both interceptors are used, this `post_update_metadata_import_with_metadata` interceptor runs after the + `post_update_metadata_import` interceptor. The (possibly modified) response returned by + `post_update_metadata_import` will be passed to + `post_update_metadata_import_with_metadata`. + """ + return response, metadata + def pre_update_service( self, request: metastore.UpdateServiceRequest, @@ -676,12 +1094,35 @@ def post_update_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_update_service` interceptor runs + before the `post_update_service_with_metadata` interceptor. """ return response + def post_update_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_update_service_with_metadata` + interceptor in new development instead of the `post_update_service` interceptor. + When both interceptors are used, this `post_update_service_with_metadata` interceptor runs after the + `post_update_service` interceptor. The (possibly modified) response returned by + `post_update_service` will be passed to + `post_update_service_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1191,6 +1632,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_alter_metadata_resource_location(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_alter_metadata_resource_location_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1339,6 +1787,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1492,6 +1944,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1641,6 +2097,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1783,6 +2243,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1926,6 +2390,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2075,6 +2543,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2216,6 +2688,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2362,6 +2838,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2505,6 +2985,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2648,6 +3132,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2796,6 +3284,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_metadata_imports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_metadata_imports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2941,6 +3433,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3094,6 +3590,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_table_to_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_table_to_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3243,6 +3743,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3395,6 +3899,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3545,6 +4053,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3698,6 +4210,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3847,6 +4363,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py index 3e36593e5b05..4ecb6e74b256 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1434,16 +1463,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1489,16 +1522,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1721,16 +1758,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1843,16 +1884,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1903,16 +1948,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1958,16 +2007,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2013,16 +2066,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py index 4be6fa9975c3..b634251081db 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1alpha/services/dataproc_metastore_federation/transports/rest.py @@ -138,12 +138,35 @@ def post_create_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_create_federation` interceptor runs + before the `post_create_federation_with_metadata` interceptor. """ return response + def post_create_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_create_federation_with_metadata` + interceptor in new development instead of the `post_create_federation` interceptor. + When both interceptors are used, this `post_create_federation_with_metadata` interceptor runs after the + `post_create_federation` interceptor. The (possibly modified) response returned by + `post_create_federation` will be passed to + `post_create_federation_with_metadata`. + """ + return response, metadata + def pre_delete_federation( self, request: metastore_federation.DeleteFederationRequest, @@ -164,12 +187,35 @@ def post_delete_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_delete_federation` interceptor runs + before the `post_delete_federation_with_metadata` interceptor. """ return response + def post_delete_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_delete_federation_with_metadata` + interceptor in new development instead of the `post_delete_federation` interceptor. + When both interceptors are used, this `post_delete_federation_with_metadata` interceptor runs after the + `post_delete_federation` interceptor. The (possibly modified) response returned by + `post_delete_federation` will be passed to + `post_delete_federation_with_metadata`. + """ + return response, metadata + def pre_get_federation( self, request: metastore_federation.GetFederationRequest, @@ -190,12 +236,37 @@ def post_get_federation( ) -> metastore_federation.Federation: """Post-rpc interceptor for get_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_get_federation` interceptor runs + before the `post_get_federation_with_metadata` interceptor. """ return response + def post_get_federation_with_metadata( + self, + response: metastore_federation.Federation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore_federation.Federation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_get_federation_with_metadata` + interceptor in new development instead of the `post_get_federation` interceptor. + When both interceptors are used, this `post_get_federation_with_metadata` interceptor runs after the + `post_get_federation` interceptor. The (possibly modified) response returned by + `post_get_federation` will be passed to + `post_get_federation_with_metadata`. + """ + return response, metadata + def pre_list_federations( self, request: metastore_federation.ListFederationsRequest, @@ -216,12 +287,38 @@ def post_list_federations( ) -> metastore_federation.ListFederationsResponse: """Post-rpc interceptor for list_federations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_federations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_list_federations` interceptor runs + before the `post_list_federations_with_metadata` interceptor. """ return response + def post_list_federations_with_metadata( + self, + response: metastore_federation.ListFederationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore_federation.ListFederationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_federations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_list_federations_with_metadata` + interceptor in new development instead of the `post_list_federations` interceptor. + When both interceptors are used, this `post_list_federations_with_metadata` interceptor runs after the + `post_list_federations` interceptor. The (possibly modified) response returned by + `post_list_federations` will be passed to + `post_list_federations_with_metadata`. + """ + return response, metadata + def pre_update_federation( self, request: metastore_federation.UpdateFederationRequest, @@ -242,12 +339,35 @@ def post_update_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_update_federation` interceptor runs + before the `post_update_federation_with_metadata` interceptor. """ return response + def post_update_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_update_federation_with_metadata` + interceptor in new development instead of the `post_update_federation` interceptor. + When both interceptors are used, this `post_update_federation_with_metadata` interceptor runs after the + `post_update_federation` interceptor. The (possibly modified) response returned by + `post_update_federation` will be passed to + `post_update_federation_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -750,6 +870,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -894,6 +1018,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1039,6 +1167,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1184,6 +1316,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_federations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_federations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1336,6 +1472,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py index 007d6040cbe0..8099b154e9b6 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "1.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py index 10aa27d56cab..b5be6a297c87 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -627,6 +629,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3192,16 +3221,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3247,16 +3280,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3479,16 +3516,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -3601,16 +3642,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -3661,16 +3706,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -3716,16 +3765,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3771,16 +3824,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py index 856a669588ce..942dbed1d7f2 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore/transports/rest.py @@ -250,12 +250,35 @@ def post_alter_metadata_resource_location( ) -> operations_pb2.Operation: """Post-rpc interceptor for alter_metadata_resource_location - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_alter_metadata_resource_location_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_alter_metadata_resource_location` interceptor runs + before the `post_alter_metadata_resource_location_with_metadata` interceptor. """ return response + def post_alter_metadata_resource_location_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for alter_metadata_resource_location + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_alter_metadata_resource_location_with_metadata` + interceptor in new development instead of the `post_alter_metadata_resource_location` interceptor. + When both interceptors are used, this `post_alter_metadata_resource_location_with_metadata` interceptor runs after the + `post_alter_metadata_resource_location` interceptor. The (possibly modified) response returned by + `post_alter_metadata_resource_location` will be passed to + `post_alter_metadata_resource_location_with_metadata`. + """ + return response, metadata + def pre_create_backup( self, request: metastore.CreateBackupRequest, @@ -273,12 +296,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_metadata_import( self, request: metastore.CreateMetadataImportRequest, @@ -298,12 +344,35 @@ def post_create_metadata_import( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_metadata_import` interceptor runs + before the `post_create_metadata_import_with_metadata` interceptor. """ return response + def post_create_metadata_import_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_metadata_import_with_metadata` + interceptor in new development instead of the `post_create_metadata_import` interceptor. + When both interceptors are used, this `post_create_metadata_import_with_metadata` interceptor runs after the + `post_create_metadata_import` interceptor. The (possibly modified) response returned by + `post_create_metadata_import` will be passed to + `post_create_metadata_import_with_metadata`. + """ + return response, metadata + def pre_create_service( self, request: metastore.CreateServiceRequest, @@ -321,12 +390,35 @@ def post_create_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_create_service` interceptor runs + before the `post_create_service_with_metadata` interceptor. """ return response + def post_create_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_create_service_with_metadata` + interceptor in new development instead of the `post_create_service` interceptor. + When both interceptors are used, this `post_create_service_with_metadata` interceptor runs after the + `post_create_service` interceptor. The (possibly modified) response returned by + `post_create_service` will be passed to + `post_create_service_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: metastore.DeleteBackupRequest, @@ -344,12 +436,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_service( self, request: metastore.DeleteServiceRequest, @@ -367,12 +482,35 @@ def post_delete_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_delete_service` interceptor runs + before the `post_delete_service_with_metadata` interceptor. """ return response + def post_delete_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_delete_service_with_metadata` + interceptor in new development instead of the `post_delete_service` interceptor. + When both interceptors are used, this `post_delete_service_with_metadata` interceptor runs after the + `post_delete_service` interceptor. The (possibly modified) response returned by + `post_delete_service` will be passed to + `post_delete_service_with_metadata`. + """ + return response, metadata + def pre_export_metadata( self, request: metastore.ExportMetadataRequest, @@ -392,12 +530,35 @@ def post_export_metadata( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_export_metadata` interceptor runs + before the `post_export_metadata_with_metadata` interceptor. """ return response + def post_export_metadata_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_export_metadata_with_metadata` + interceptor in new development instead of the `post_export_metadata` interceptor. + When both interceptors are used, this `post_export_metadata_with_metadata` interceptor runs after the + `post_export_metadata` interceptor. The (possibly modified) response returned by + `post_export_metadata` will be passed to + `post_export_metadata_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: metastore.GetBackupRequest, @@ -413,12 +574,35 @@ def pre_get_backup( def post_get_backup(self, response: metastore.Backup) -> metastore.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: metastore.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_metadata_import( self, request: metastore.GetMetadataImportRequest, @@ -438,12 +622,35 @@ def post_get_metadata_import( ) -> metastore.MetadataImport: """Post-rpc interceptor for get_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_metadata_import` interceptor runs + before the `post_get_metadata_import_with_metadata` interceptor. """ return response + def post_get_metadata_import_with_metadata( + self, + response: metastore.MetadataImport, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.MetadataImport, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_metadata_import_with_metadata` + interceptor in new development instead of the `post_get_metadata_import` interceptor. + When both interceptors are used, this `post_get_metadata_import_with_metadata` interceptor runs after the + `post_get_metadata_import` interceptor. The (possibly modified) response returned by + `post_get_metadata_import` will be passed to + `post_get_metadata_import_with_metadata`. + """ + return response, metadata + def pre_get_service( self, request: metastore.GetServiceRequest, @@ -459,12 +666,35 @@ def pre_get_service( def post_get_service(self, response: metastore.Service) -> metastore.Service: """Post-rpc interceptor for get_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_get_service` interceptor runs + before the `post_get_service_with_metadata` interceptor. """ return response + def post_get_service_with_metadata( + self, + response: metastore.Service, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.Service, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_get_service_with_metadata` + interceptor in new development instead of the `post_get_service` interceptor. + When both interceptors are used, this `post_get_service_with_metadata` interceptor runs after the + `post_get_service` interceptor. The (possibly modified) response returned by + `post_get_service` will be passed to + `post_get_service_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: metastore.ListBackupsRequest, @@ -482,12 +712,35 @@ def post_list_backups( ) -> metastore.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: metastore.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_metadata_imports( self, request: metastore.ListMetadataImportsRequest, @@ -507,12 +760,37 @@ def post_list_metadata_imports( ) -> metastore.ListMetadataImportsResponse: """Post-rpc interceptor for list_metadata_imports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_metadata_imports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_metadata_imports` interceptor runs + before the `post_list_metadata_imports_with_metadata` interceptor. """ return response + def post_list_metadata_imports_with_metadata( + self, + response: metastore.ListMetadataImportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.ListMetadataImportsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_metadata_imports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_metadata_imports_with_metadata` + interceptor in new development instead of the `post_list_metadata_imports` interceptor. + When both interceptors are used, this `post_list_metadata_imports_with_metadata` interceptor runs after the + `post_list_metadata_imports` interceptor. The (possibly modified) response returned by + `post_list_metadata_imports` will be passed to + `post_list_metadata_imports_with_metadata`. + """ + return response, metadata + def pre_list_services( self, request: metastore.ListServicesRequest, @@ -530,12 +808,35 @@ def post_list_services( ) -> metastore.ListServicesResponse: """Post-rpc interceptor for list_services - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_services_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_list_services` interceptor runs + before the `post_list_services_with_metadata` interceptor. """ return response + def post_list_services_with_metadata( + self, + response: metastore.ListServicesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[metastore.ListServicesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_services + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_list_services_with_metadata` + interceptor in new development instead of the `post_list_services` interceptor. + When both interceptors are used, this `post_list_services_with_metadata` interceptor runs after the + `post_list_services` interceptor. The (possibly modified) response returned by + `post_list_services` will be passed to + `post_list_services_with_metadata`. + """ + return response, metadata + def pre_move_table_to_database( self, request: metastore.MoveTableToDatabaseRequest, @@ -555,12 +856,35 @@ def post_move_table_to_database( ) -> operations_pb2.Operation: """Post-rpc interceptor for move_table_to_database - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_move_table_to_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_move_table_to_database` interceptor runs + before the `post_move_table_to_database_with_metadata` interceptor. """ return response + def post_move_table_to_database_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for move_table_to_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_move_table_to_database_with_metadata` + interceptor in new development instead of the `post_move_table_to_database` interceptor. + When both interceptors are used, this `post_move_table_to_database_with_metadata` interceptor runs after the + `post_move_table_to_database` interceptor. The (possibly modified) response returned by + `post_move_table_to_database` will be passed to + `post_move_table_to_database_with_metadata`. + """ + return response, metadata + def pre_query_metadata( self, request: metastore.QueryMetadataRequest, @@ -578,12 +902,35 @@ def post_query_metadata( ) -> operations_pb2.Operation: """Post-rpc interceptor for query_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_query_metadata` interceptor runs + before the `post_query_metadata_with_metadata` interceptor. """ return response + def post_query_metadata_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for query_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_query_metadata_with_metadata` + interceptor in new development instead of the `post_query_metadata` interceptor. + When both interceptors are used, this `post_query_metadata_with_metadata` interceptor runs after the + `post_query_metadata` interceptor. The (possibly modified) response returned by + `post_query_metadata` will be passed to + `post_query_metadata_with_metadata`. + """ + return response, metadata + def pre_remove_iam_policy( self, request: metastore.RemoveIamPolicyRequest, @@ -603,12 +950,37 @@ def post_remove_iam_policy( ) -> metastore.RemoveIamPolicyResponse: """Post-rpc interceptor for remove_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_remove_iam_policy` interceptor runs + before the `post_remove_iam_policy_with_metadata` interceptor. """ return response + def post_remove_iam_policy_with_metadata( + self, + response: metastore.RemoveIamPolicyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore.RemoveIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for remove_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_remove_iam_policy_with_metadata` + interceptor in new development instead of the `post_remove_iam_policy` interceptor. + When both interceptors are used, this `post_remove_iam_policy_with_metadata` interceptor runs after the + `post_remove_iam_policy` interceptor. The (possibly modified) response returned by + `post_remove_iam_policy` will be passed to + `post_remove_iam_policy_with_metadata`. + """ + return response, metadata + def pre_restore_service( self, request: metastore.RestoreServiceRequest, @@ -628,12 +1000,35 @@ def post_restore_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_restore_service` interceptor runs + before the `post_restore_service_with_metadata` interceptor. """ return response + def post_restore_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_restore_service_with_metadata` + interceptor in new development instead of the `post_restore_service` interceptor. + When both interceptors are used, this `post_restore_service_with_metadata` interceptor runs after the + `post_restore_service` interceptor. The (possibly modified) response returned by + `post_restore_service` will be passed to + `post_restore_service_with_metadata`. + """ + return response, metadata + def pre_update_metadata_import( self, request: metastore.UpdateMetadataImportRequest, @@ -653,12 +1048,35 @@ def post_update_metadata_import( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_metadata_import - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_metadata_import_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_update_metadata_import` interceptor runs + before the `post_update_metadata_import_with_metadata` interceptor. """ return response + def post_update_metadata_import_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_metadata_import + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_update_metadata_import_with_metadata` + interceptor in new development instead of the `post_update_metadata_import` interceptor. + When both interceptors are used, this `post_update_metadata_import_with_metadata` interceptor runs after the + `post_update_metadata_import` interceptor. The (possibly modified) response returned by + `post_update_metadata_import` will be passed to + `post_update_metadata_import_with_metadata`. + """ + return response, metadata + def pre_update_service( self, request: metastore.UpdateServiceRequest, @@ -676,12 +1094,35 @@ def post_update_service( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_service - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_service_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastore server but before - it is returned to user code. + it is returned to user code. This `post_update_service` interceptor runs + before the `post_update_service_with_metadata` interceptor. """ return response + def post_update_service_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_service + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastore server but before it is returned to user code. + + We recommend only using this `post_update_service_with_metadata` + interceptor in new development instead of the `post_update_service` interceptor. + When both interceptors are used, this `post_update_service_with_metadata` interceptor runs after the + `post_update_service` interceptor. The (possibly modified) response returned by + `post_update_service` will be passed to + `post_update_service_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1191,6 +1632,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_alter_metadata_resource_location(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_alter_metadata_resource_location_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1339,6 +1787,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1492,6 +1944,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1641,6 +2097,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1783,6 +2243,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1926,6 +2390,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2075,6 +2543,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2216,6 +2688,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2362,6 +2838,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2505,6 +2985,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2648,6 +3132,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2796,6 +3284,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_metadata_imports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_metadata_imports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2941,6 +3433,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_services(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_services_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3094,6 +3590,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_move_table_to_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_table_to_database_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3243,6 +3743,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3395,6 +3899,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3545,6 +4053,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3698,6 +4210,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_metadata_import(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_metadata_import_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3847,6 +4363,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_service_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py index 480805965138..5b154f5d2c96 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1434,16 +1463,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1489,16 +1522,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1721,16 +1758,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1843,16 +1884,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1903,16 +1948,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1958,16 +2007,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2013,16 +2066,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py index 29e1260cc6e2..c1da06a58781 100644 --- a/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py +++ b/packages/google-cloud-dataproc-metastore/google/cloud/metastore_v1beta/services/dataproc_metastore_federation/transports/rest.py @@ -138,12 +138,35 @@ def post_create_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_create_federation` interceptor runs + before the `post_create_federation_with_metadata` interceptor. """ return response + def post_create_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_create_federation_with_metadata` + interceptor in new development instead of the `post_create_federation` interceptor. + When both interceptors are used, this `post_create_federation_with_metadata` interceptor runs after the + `post_create_federation` interceptor. The (possibly modified) response returned by + `post_create_federation` will be passed to + `post_create_federation_with_metadata`. + """ + return response, metadata + def pre_delete_federation( self, request: metastore_federation.DeleteFederationRequest, @@ -164,12 +187,35 @@ def post_delete_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_delete_federation` interceptor runs + before the `post_delete_federation_with_metadata` interceptor. """ return response + def post_delete_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_delete_federation_with_metadata` + interceptor in new development instead of the `post_delete_federation` interceptor. + When both interceptors are used, this `post_delete_federation_with_metadata` interceptor runs after the + `post_delete_federation` interceptor. The (possibly modified) response returned by + `post_delete_federation` will be passed to + `post_delete_federation_with_metadata`. + """ + return response, metadata + def pre_get_federation( self, request: metastore_federation.GetFederationRequest, @@ -190,12 +236,37 @@ def post_get_federation( ) -> metastore_federation.Federation: """Post-rpc interceptor for get_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_get_federation` interceptor runs + before the `post_get_federation_with_metadata` interceptor. """ return response + def post_get_federation_with_metadata( + self, + response: metastore_federation.Federation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore_federation.Federation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_get_federation_with_metadata` + interceptor in new development instead of the `post_get_federation` interceptor. + When both interceptors are used, this `post_get_federation_with_metadata` interceptor runs after the + `post_get_federation` interceptor. The (possibly modified) response returned by + `post_get_federation` will be passed to + `post_get_federation_with_metadata`. + """ + return response, metadata + def pre_list_federations( self, request: metastore_federation.ListFederationsRequest, @@ -216,12 +287,38 @@ def post_list_federations( ) -> metastore_federation.ListFederationsResponse: """Post-rpc interceptor for list_federations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_federations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_list_federations` interceptor runs + before the `post_list_federations_with_metadata` interceptor. """ return response + def post_list_federations_with_metadata( + self, + response: metastore_federation.ListFederationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + metastore_federation.ListFederationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_federations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_list_federations_with_metadata` + interceptor in new development instead of the `post_list_federations` interceptor. + When both interceptors are used, this `post_list_federations_with_metadata` interceptor runs after the + `post_list_federations` interceptor. The (possibly modified) response returned by + `post_list_federations` will be passed to + `post_list_federations_with_metadata`. + """ + return response, metadata + def pre_update_federation( self, request: metastore_federation.UpdateFederationRequest, @@ -242,12 +339,35 @@ def post_update_federation( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_federation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_federation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataprocMetastoreFederation server but before - it is returned to user code. + it is returned to user code. This `post_update_federation` interceptor runs + before the `post_update_federation_with_metadata` interceptor. """ return response + def post_update_federation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_federation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataprocMetastoreFederation server but before it is returned to user code. + + We recommend only using this `post_update_federation_with_metadata` + interceptor in new development instead of the `post_update_federation` interceptor. + When both interceptors are used, this `post_update_federation_with_metadata` interceptor runs after the + `post_update_federation` interceptor. The (possibly modified) response returned by + `post_update_federation` will be passed to + `post_update_federation_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -750,6 +870,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -894,6 +1018,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1039,6 +1167,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1184,6 +1316,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_federations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_federations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1336,6 +1472,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_federation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_federation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json index ba05a3c7cbba..6ea79e87e08b 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json index f047479b9f21..7e80e6e9d558 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json index 94eb5dcb868f..0e1da19d5308 100644 --- a/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json +++ b/packages/google-cloud-dataproc-metastore/samples/generated_samples/snippet_metadata_google.cloud.metastore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc-metastore", - "version": "1.17.0" + "version": "1.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py index 89df5f01663a..e7d24120ce8f 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore.py @@ -79,6 +79,13 @@ ) from google.cloud.metastore_v1.types import metastore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataprocMetastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataprocMetastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12242,10 +12292,13 @@ def test_list_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_services" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_list_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListServicesRequest.pb(metastore.ListServicesRequest()) transcode.return_value = { "method": "post", @@ -12269,6 +12322,7 @@ def test_list_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListServicesResponse() + post_with_metadata.return_value = metastore.ListServicesResponse(), metadata client.list_services( request, @@ -12280,6 +12334,7 @@ def test_list_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_service_rest_bad_request(request_type=metastore.GetServiceRequest): @@ -12382,10 +12437,13 @@ def test_get_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_get_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetServiceRequest.pb(metastore.GetServiceRequest()) transcode.return_value = { "method": "post", @@ -12407,6 +12465,7 @@ def test_get_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Service() + post_with_metadata.return_value = metastore.Service(), metadata client.get_service( request, @@ -12418,6 +12477,7 @@ def test_get_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_service_rest_bad_request(request_type=metastore.CreateServiceRequest): @@ -12624,10 +12684,13 @@ def test_create_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_create_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateServiceRequest.pb(metastore.CreateServiceRequest()) transcode.return_value = { "method": "post", @@ -12649,6 +12712,7 @@ def test_create_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_service( request, @@ -12660,6 +12724,7 @@ def test_create_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_service_rest_bad_request(request_type=metastore.UpdateServiceRequest): @@ -12870,10 +12935,13 @@ def test_update_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_update_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_update_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_update_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateServiceRequest.pb(metastore.UpdateServiceRequest()) transcode.return_value = { "method": "post", @@ -12895,6 +12963,7 @@ def test_update_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_service( request, @@ -12906,6 +12975,7 @@ def test_update_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_service_rest_bad_request(request_type=metastore.DeleteServiceRequest): @@ -12984,10 +13054,13 @@ def test_delete_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_delete_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_delete_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_delete_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteServiceRequest.pb(metastore.DeleteServiceRequest()) transcode.return_value = { "method": "post", @@ -13009,6 +13082,7 @@ def test_delete_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_service( request, @@ -13020,6 +13094,7 @@ def test_delete_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_metadata_imports_rest_bad_request( @@ -13106,10 +13181,14 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_metadata_imports" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_list_metadata_imports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_metadata_imports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListMetadataImportsRequest.pb( metastore.ListMetadataImportsRequest() ) @@ -13135,6 +13214,10 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListMetadataImportsResponse() + post_with_metadata.return_value = ( + metastore.ListMetadataImportsResponse(), + metadata, + ) client.list_metadata_imports( request, @@ -13146,6 +13229,7 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_metadata_import_rest_bad_request( @@ -13238,10 +13322,14 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_get_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetMetadataImportRequest.pb( metastore.GetMetadataImportRequest() ) @@ -13265,6 +13353,7 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.MetadataImport() + post_with_metadata.return_value = metastore.MetadataImport(), metadata client.get_metadata_import( request, @@ -13276,6 +13365,7 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_metadata_import_rest_bad_request( @@ -13437,10 +13527,14 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_create_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateMetadataImportRequest.pb( metastore.CreateMetadataImportRequest() ) @@ -13464,6 +13558,7 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_metadata_import( request, @@ -13475,6 +13570,7 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_metadata_import_rest_bad_request( @@ -13644,10 +13740,14 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_update_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_update_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_update_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateMetadataImportRequest.pb( metastore.UpdateMetadataImportRequest() ) @@ -13671,6 +13771,7 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_metadata_import( request, @@ -13682,6 +13783,7 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_metadata_rest_bad_request(request_type=metastore.ExportMetadataRequest): @@ -13760,10 +13862,14 @@ def test_export_metadata_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_export_metadata" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_export_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_export_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ExportMetadataRequest.pb( metastore.ExportMetadataRequest() ) @@ -13787,6 +13893,7 @@ def test_export_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_metadata( request, @@ -13798,6 +13905,7 @@ def test_export_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_service_rest_bad_request(request_type=metastore.RestoreServiceRequest): @@ -13876,10 +13984,14 @@ def test_restore_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_restore_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_restore_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_restore_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RestoreServiceRequest.pb( metastore.RestoreServiceRequest() ) @@ -13903,6 +14015,7 @@ def test_restore_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_service( request, @@ -13914,6 +14027,7 @@ def test_restore_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=metastore.ListBackupsRequest): @@ -13998,10 +14112,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListBackupsRequest.pb(metastore.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -14025,6 +14142,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListBackupsResponse() + post_with_metadata.return_value = metastore.ListBackupsResponse(), metadata client.list_backups( request, @@ -14036,6 +14154,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=metastore.GetBackupRequest): @@ -14128,10 +14247,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetBackupRequest.pb(metastore.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -14153,6 +14275,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Backup() + post_with_metadata.return_value = metastore.Backup(), metadata client.get_backup( request, @@ -14164,6 +14287,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=metastore.CreateBackupRequest): @@ -14381,10 +14505,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateBackupRequest.pb(metastore.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -14406,6 +14533,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -14417,6 +14545,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=metastore.DeleteBackupRequest): @@ -14499,10 +14628,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteBackupRequest.pb(metastore.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -14524,6 +14656,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -14535,6 +14668,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_metadata_rest_bad_request(request_type=metastore.QueryMetadataRequest): @@ -14613,10 +14747,13 @@ def test_query_metadata_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_query_metadata" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_query_metadata_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_query_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.QueryMetadataRequest.pb(metastore.QueryMetadataRequest()) transcode.return_value = { "method": "post", @@ -14638,6 +14775,7 @@ def test_query_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.query_metadata( request, @@ -14649,6 +14787,7 @@ def test_query_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_table_to_database_rest_bad_request( @@ -14729,10 +14868,14 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_move_table_to_database" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_move_table_to_database_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_move_table_to_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.MoveTableToDatabaseRequest.pb( metastore.MoveTableToDatabaseRequest() ) @@ -14756,6 +14899,7 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.move_table_to_database( request, @@ -14767,6 +14911,7 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_alter_metadata_resource_location_rest_bad_request( @@ -14848,11 +14993,15 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): transports.DataprocMetastoreRestInterceptor, "post_alter_metadata_resource_location", ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_alter_metadata_resource_location_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_alter_metadata_resource_location", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.AlterMetadataResourceLocationRequest.pb( metastore.AlterMetadataResourceLocationRequest() ) @@ -14876,6 +15025,7 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.alter_metadata_resource_location( request, @@ -14887,6 +15037,7 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py index b095518768bc..053ca6c61449 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1/test_dataproc_metastore_federation.py @@ -77,6 +77,13 @@ ) from google.cloud.metastore_v1.types import metastore, metastore_federation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -360,6 +367,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataprocMetastoreFederationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataprocMetastoreFederationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4606,10 +4656,14 @@ def test_list_federations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_list_federations" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_list_federations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_list_federations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.ListFederationsRequest.pb( metastore_federation.ListFederationsRequest() ) @@ -4635,6 +4689,10 @@ def test_list_federations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore_federation.ListFederationsResponse() + post_with_metadata.return_value = ( + metastore_federation.ListFederationsResponse(), + metadata, + ) client.list_federations( request, @@ -4646,6 +4704,7 @@ def test_list_federations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_federation_rest_bad_request( @@ -4740,10 +4799,14 @@ def test_get_federation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_get_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_get_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_get_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.GetFederationRequest.pb( metastore_federation.GetFederationRequest() ) @@ -4769,6 +4832,7 @@ def test_get_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore_federation.Federation() + post_with_metadata.return_value = metastore_federation.Federation(), metadata client.get_federation( request, @@ -4780,6 +4844,7 @@ def test_get_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_federation_rest_bad_request( @@ -4939,10 +5004,14 @@ def test_create_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_create_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_create_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_create_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.CreateFederationRequest.pb( metastore_federation.CreateFederationRequest() ) @@ -4966,6 +5035,7 @@ def test_create_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_federation( request, @@ -4977,6 +5047,7 @@ def test_create_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_federation_rest_bad_request( @@ -5140,10 +5211,14 @@ def test_update_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_update_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_update_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_update_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.UpdateFederationRequest.pb( metastore_federation.UpdateFederationRequest() ) @@ -5167,6 +5242,7 @@ def test_update_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_federation( request, @@ -5178,6 +5254,7 @@ def test_update_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_federation_rest_bad_request( @@ -5258,10 +5335,14 @@ def test_delete_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_delete_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_delete_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_delete_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.DeleteFederationRequest.pb( metastore_federation.DeleteFederationRequest() ) @@ -5285,6 +5366,7 @@ def test_delete_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_federation( request, @@ -5296,6 +5378,7 @@ def test_delete_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py index 7aeb2d72bd89..9bcc9fdffaba 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore.py @@ -79,6 +79,13 @@ ) from google.cloud.metastore_v1alpha.types import metastore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataprocMetastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataprocMetastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12674,10 +12724,13 @@ def test_list_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_services" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_list_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListServicesRequest.pb(metastore.ListServicesRequest()) transcode.return_value = { "method": "post", @@ -12701,6 +12754,7 @@ def test_list_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListServicesResponse() + post_with_metadata.return_value = metastore.ListServicesResponse(), metadata client.list_services( request, @@ -12712,6 +12766,7 @@ def test_list_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_service_rest_bad_request(request_type=metastore.GetServiceRequest): @@ -12814,10 +12869,13 @@ def test_get_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_get_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetServiceRequest.pb(metastore.GetServiceRequest()) transcode.return_value = { "method": "post", @@ -12839,6 +12897,7 @@ def test_get_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Service() + post_with_metadata.return_value = metastore.Service(), metadata client.get_service( request, @@ -12850,6 +12909,7 @@ def test_get_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_service_rest_bad_request(request_type=metastore.CreateServiceRequest): @@ -13061,10 +13121,13 @@ def test_create_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_create_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateServiceRequest.pb(metastore.CreateServiceRequest()) transcode.return_value = { "method": "post", @@ -13086,6 +13149,7 @@ def test_create_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_service( request, @@ -13097,6 +13161,7 @@ def test_create_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_service_rest_bad_request(request_type=metastore.UpdateServiceRequest): @@ -13312,10 +13377,13 @@ def test_update_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_update_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_update_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_update_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateServiceRequest.pb(metastore.UpdateServiceRequest()) transcode.return_value = { "method": "post", @@ -13337,6 +13405,7 @@ def test_update_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_service( request, @@ -13348,6 +13417,7 @@ def test_update_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_service_rest_bad_request(request_type=metastore.DeleteServiceRequest): @@ -13426,10 +13496,13 @@ def test_delete_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_delete_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_delete_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_delete_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteServiceRequest.pb(metastore.DeleteServiceRequest()) transcode.return_value = { "method": "post", @@ -13451,6 +13524,7 @@ def test_delete_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_service( request, @@ -13462,6 +13536,7 @@ def test_delete_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_metadata_imports_rest_bad_request( @@ -13548,10 +13623,14 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_metadata_imports" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_list_metadata_imports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_metadata_imports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListMetadataImportsRequest.pb( metastore.ListMetadataImportsRequest() ) @@ -13577,6 +13656,10 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListMetadataImportsResponse() + post_with_metadata.return_value = ( + metastore.ListMetadataImportsResponse(), + metadata, + ) client.list_metadata_imports( request, @@ -13588,6 +13671,7 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_metadata_import_rest_bad_request( @@ -13680,10 +13764,14 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_get_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetMetadataImportRequest.pb( metastore.GetMetadataImportRequest() ) @@ -13707,6 +13795,7 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.MetadataImport() + post_with_metadata.return_value = metastore.MetadataImport(), metadata client.get_metadata_import( request, @@ -13718,6 +13807,7 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_metadata_import_rest_bad_request( @@ -13879,10 +13969,14 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_create_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateMetadataImportRequest.pb( metastore.CreateMetadataImportRequest() ) @@ -13906,6 +14000,7 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_metadata_import( request, @@ -13917,6 +14012,7 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_metadata_import_rest_bad_request( @@ -14086,10 +14182,14 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_update_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_update_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_update_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateMetadataImportRequest.pb( metastore.UpdateMetadataImportRequest() ) @@ -14113,6 +14213,7 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_metadata_import( request, @@ -14124,6 +14225,7 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_metadata_rest_bad_request(request_type=metastore.ExportMetadataRequest): @@ -14202,10 +14304,14 @@ def test_export_metadata_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_export_metadata" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_export_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_export_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ExportMetadataRequest.pb( metastore.ExportMetadataRequest() ) @@ -14229,6 +14335,7 @@ def test_export_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_metadata( request, @@ -14240,6 +14347,7 @@ def test_export_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_service_rest_bad_request(request_type=metastore.RestoreServiceRequest): @@ -14318,10 +14426,14 @@ def test_restore_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_restore_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_restore_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_restore_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RestoreServiceRequest.pb( metastore.RestoreServiceRequest() ) @@ -14345,6 +14457,7 @@ def test_restore_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_service( request, @@ -14356,6 +14469,7 @@ def test_restore_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=metastore.ListBackupsRequest): @@ -14440,10 +14554,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListBackupsRequest.pb(metastore.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -14467,6 +14584,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListBackupsResponse() + post_with_metadata.return_value = metastore.ListBackupsResponse(), metadata client.list_backups( request, @@ -14478,6 +14596,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=metastore.GetBackupRequest): @@ -14570,10 +14689,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetBackupRequest.pb(metastore.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -14595,6 +14717,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Backup() + post_with_metadata.return_value = metastore.Backup(), metadata client.get_backup( request, @@ -14606,6 +14729,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=metastore.CreateBackupRequest): @@ -14828,10 +14952,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateBackupRequest.pb(metastore.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -14853,6 +14980,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -14864,6 +14992,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=metastore.DeleteBackupRequest): @@ -14946,10 +15075,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteBackupRequest.pb(metastore.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -14971,6 +15103,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -14982,6 +15115,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_iam_policy_rest_bad_request( @@ -15070,10 +15204,14 @@ def test_remove_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_remove_iam_policy" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_remove_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_remove_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RemoveIamPolicyRequest.pb( metastore.RemoveIamPolicyRequest() ) @@ -15099,6 +15237,7 @@ def test_remove_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.RemoveIamPolicyResponse() + post_with_metadata.return_value = metastore.RemoveIamPolicyResponse(), metadata client.remove_iam_policy( request, @@ -15110,6 +15249,7 @@ def test_remove_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_metadata_rest_bad_request(request_type=metastore.QueryMetadataRequest): @@ -15188,10 +15328,13 @@ def test_query_metadata_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_query_metadata" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_query_metadata_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_query_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.QueryMetadataRequest.pb(metastore.QueryMetadataRequest()) transcode.return_value = { "method": "post", @@ -15213,6 +15356,7 @@ def test_query_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.query_metadata( request, @@ -15224,6 +15368,7 @@ def test_query_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_table_to_database_rest_bad_request( @@ -15304,10 +15449,14 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_move_table_to_database" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_move_table_to_database_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_move_table_to_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.MoveTableToDatabaseRequest.pb( metastore.MoveTableToDatabaseRequest() ) @@ -15331,6 +15480,7 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.move_table_to_database( request, @@ -15342,6 +15492,7 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_alter_metadata_resource_location_rest_bad_request( @@ -15423,11 +15574,15 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): transports.DataprocMetastoreRestInterceptor, "post_alter_metadata_resource_location", ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_alter_metadata_resource_location_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_alter_metadata_resource_location", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.AlterMetadataResourceLocationRequest.pb( metastore.AlterMetadataResourceLocationRequest() ) @@ -15451,6 +15606,7 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.alter_metadata_resource_location( request, @@ -15462,6 +15618,7 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py index fcc0a14c2e23..2c92319a594f 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1alpha/test_dataproc_metastore_federation.py @@ -77,6 +77,13 @@ ) from google.cloud.metastore_v1alpha.types import metastore, metastore_federation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -360,6 +367,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataprocMetastoreFederationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataprocMetastoreFederationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4606,10 +4656,14 @@ def test_list_federations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_list_federations" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_list_federations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_list_federations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.ListFederationsRequest.pb( metastore_federation.ListFederationsRequest() ) @@ -4635,6 +4689,10 @@ def test_list_federations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore_federation.ListFederationsResponse() + post_with_metadata.return_value = ( + metastore_federation.ListFederationsResponse(), + metadata, + ) client.list_federations( request, @@ -4646,6 +4704,7 @@ def test_list_federations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_federation_rest_bad_request( @@ -4740,10 +4799,14 @@ def test_get_federation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_get_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_get_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_get_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.GetFederationRequest.pb( metastore_federation.GetFederationRequest() ) @@ -4769,6 +4832,7 @@ def test_get_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore_federation.Federation() + post_with_metadata.return_value = metastore_federation.Federation(), metadata client.get_federation( request, @@ -4780,6 +4844,7 @@ def test_get_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_federation_rest_bad_request( @@ -4939,10 +5004,14 @@ def test_create_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_create_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_create_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_create_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.CreateFederationRequest.pb( metastore_federation.CreateFederationRequest() ) @@ -4966,6 +5035,7 @@ def test_create_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_federation( request, @@ -4977,6 +5047,7 @@ def test_create_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_federation_rest_bad_request( @@ -5140,10 +5211,14 @@ def test_update_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_update_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_update_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_update_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.UpdateFederationRequest.pb( metastore_federation.UpdateFederationRequest() ) @@ -5167,6 +5242,7 @@ def test_update_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_federation( request, @@ -5178,6 +5254,7 @@ def test_update_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_federation_rest_bad_request( @@ -5258,10 +5335,14 @@ def test_delete_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_delete_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_delete_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_delete_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.DeleteFederationRequest.pb( metastore_federation.DeleteFederationRequest() ) @@ -5285,6 +5366,7 @@ def test_delete_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_federation( request, @@ -5296,6 +5378,7 @@ def test_delete_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py index cf9131ad25dd..0303ff4426cf 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore.py @@ -79,6 +79,13 @@ ) from google.cloud.metastore_v1beta.types import metastore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataprocMetastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataprocMetastoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -12674,10 +12724,13 @@ def test_list_services_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_services" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_list_services_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_services" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListServicesRequest.pb(metastore.ListServicesRequest()) transcode.return_value = { "method": "post", @@ -12701,6 +12754,7 @@ def test_list_services_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListServicesResponse() + post_with_metadata.return_value = metastore.ListServicesResponse(), metadata client.list_services( request, @@ -12712,6 +12766,7 @@ def test_list_services_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_service_rest_bad_request(request_type=metastore.GetServiceRequest): @@ -12814,10 +12869,13 @@ def test_get_service_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_get_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetServiceRequest.pb(metastore.GetServiceRequest()) transcode.return_value = { "method": "post", @@ -12839,6 +12897,7 @@ def test_get_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Service() + post_with_metadata.return_value = metastore.Service(), metadata client.get_service( request, @@ -12850,6 +12909,7 @@ def test_get_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_service_rest_bad_request(request_type=metastore.CreateServiceRequest): @@ -13061,10 +13121,13 @@ def test_create_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_create_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateServiceRequest.pb(metastore.CreateServiceRequest()) transcode.return_value = { "method": "post", @@ -13086,6 +13149,7 @@ def test_create_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_service( request, @@ -13097,6 +13161,7 @@ def test_create_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_service_rest_bad_request(request_type=metastore.UpdateServiceRequest): @@ -13312,10 +13377,13 @@ def test_update_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_update_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_update_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_update_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateServiceRequest.pb(metastore.UpdateServiceRequest()) transcode.return_value = { "method": "post", @@ -13337,6 +13405,7 @@ def test_update_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_service( request, @@ -13348,6 +13417,7 @@ def test_update_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_service_rest_bad_request(request_type=metastore.DeleteServiceRequest): @@ -13426,10 +13496,13 @@ def test_delete_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_delete_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_delete_service_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_delete_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteServiceRequest.pb(metastore.DeleteServiceRequest()) transcode.return_value = { "method": "post", @@ -13451,6 +13524,7 @@ def test_delete_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_service( request, @@ -13462,6 +13536,7 @@ def test_delete_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_metadata_imports_rest_bad_request( @@ -13548,10 +13623,14 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_metadata_imports" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_list_metadata_imports_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_metadata_imports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListMetadataImportsRequest.pb( metastore.ListMetadataImportsRequest() ) @@ -13577,6 +13656,10 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListMetadataImportsResponse() + post_with_metadata.return_value = ( + metastore.ListMetadataImportsResponse(), + metadata, + ) client.list_metadata_imports( request, @@ -13588,6 +13671,7 @@ def test_list_metadata_imports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_metadata_import_rest_bad_request( @@ -13680,10 +13764,14 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_get_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetMetadataImportRequest.pb( metastore.GetMetadataImportRequest() ) @@ -13707,6 +13795,7 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.MetadataImport() + post_with_metadata.return_value = metastore.MetadataImport(), metadata client.get_metadata_import( request, @@ -13718,6 +13807,7 @@ def test_get_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_metadata_import_rest_bad_request( @@ -13879,10 +13969,14 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_create_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateMetadataImportRequest.pb( metastore.CreateMetadataImportRequest() ) @@ -13906,6 +14000,7 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_metadata_import( request, @@ -13917,6 +14012,7 @@ def test_create_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_metadata_import_rest_bad_request( @@ -14086,10 +14182,14 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_update_metadata_import" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_update_metadata_import_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_update_metadata_import" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.UpdateMetadataImportRequest.pb( metastore.UpdateMetadataImportRequest() ) @@ -14113,6 +14213,7 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_metadata_import( request, @@ -14124,6 +14225,7 @@ def test_update_metadata_import_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_metadata_rest_bad_request(request_type=metastore.ExportMetadataRequest): @@ -14202,10 +14304,14 @@ def test_export_metadata_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_export_metadata" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_export_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_export_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ExportMetadataRequest.pb( metastore.ExportMetadataRequest() ) @@ -14229,6 +14335,7 @@ def test_export_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_metadata( request, @@ -14240,6 +14347,7 @@ def test_export_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_service_rest_bad_request(request_type=metastore.RestoreServiceRequest): @@ -14318,10 +14426,14 @@ def test_restore_service_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_restore_service" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_restore_service_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_restore_service" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RestoreServiceRequest.pb( metastore.RestoreServiceRequest() ) @@ -14345,6 +14457,7 @@ def test_restore_service_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_service( request, @@ -14356,6 +14469,7 @@ def test_restore_service_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=metastore.ListBackupsRequest): @@ -14440,10 +14554,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.ListBackupsRequest.pb(metastore.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -14467,6 +14584,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.ListBackupsResponse() + post_with_metadata.return_value = metastore.ListBackupsResponse(), metadata client.list_backups( request, @@ -14478,6 +14596,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=metastore.GetBackupRequest): @@ -14570,10 +14689,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.GetBackupRequest.pb(metastore.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -14595,6 +14717,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.Backup() + post_with_metadata.return_value = metastore.Backup(), metadata client.get_backup( request, @@ -14606,6 +14729,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=metastore.CreateBackupRequest): @@ -14828,10 +14952,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.CreateBackupRequest.pb(metastore.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -14853,6 +14980,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -14864,6 +14992,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=metastore.DeleteBackupRequest): @@ -14946,10 +15075,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.DeleteBackupRequest.pb(metastore.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -14971,6 +15103,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -14982,6 +15115,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_iam_policy_rest_bad_request( @@ -15070,10 +15204,14 @@ def test_remove_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_remove_iam_policy" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_remove_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_remove_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.RemoveIamPolicyRequest.pb( metastore.RemoveIamPolicyRequest() ) @@ -15099,6 +15237,7 @@ def test_remove_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore.RemoveIamPolicyResponse() + post_with_metadata.return_value = metastore.RemoveIamPolicyResponse(), metadata client.remove_iam_policy( request, @@ -15110,6 +15249,7 @@ def test_remove_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_metadata_rest_bad_request(request_type=metastore.QueryMetadataRequest): @@ -15188,10 +15328,13 @@ def test_query_metadata_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_query_metadata" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, "post_query_metadata_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_query_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.QueryMetadataRequest.pb(metastore.QueryMetadataRequest()) transcode.return_value = { "method": "post", @@ -15213,6 +15356,7 @@ def test_query_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.query_metadata( request, @@ -15224,6 +15368,7 @@ def test_query_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_move_table_to_database_rest_bad_request( @@ -15304,10 +15449,14 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreRestInterceptor, "post_move_table_to_database" ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_move_table_to_database_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_move_table_to_database" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.MoveTableToDatabaseRequest.pb( metastore.MoveTableToDatabaseRequest() ) @@ -15331,6 +15480,7 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.move_table_to_database( request, @@ -15342,6 +15492,7 @@ def test_move_table_to_database_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_alter_metadata_resource_location_rest_bad_request( @@ -15423,11 +15574,15 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): transports.DataprocMetastoreRestInterceptor, "post_alter_metadata_resource_location", ) as post, mock.patch.object( + transports.DataprocMetastoreRestInterceptor, + "post_alter_metadata_resource_location_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreRestInterceptor, "pre_alter_metadata_resource_location", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore.AlterMetadataResourceLocationRequest.pb( metastore.AlterMetadataResourceLocationRequest() ) @@ -15451,6 +15606,7 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.alter_metadata_resource_location( request, @@ -15462,6 +15618,7 @@ def test_alter_metadata_resource_location_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py index a37404d5f243..d62df1ff4835 100644 --- a/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py +++ b/packages/google-cloud-dataproc-metastore/tests/unit/gapic/metastore_v1beta/test_dataproc_metastore_federation.py @@ -77,6 +77,13 @@ ) from google.cloud.metastore_v1beta.types import metastore, metastore_federation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -360,6 +367,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataprocMetastoreFederationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataprocMetastoreFederationClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4606,10 +4656,14 @@ def test_list_federations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_list_federations" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_list_federations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_list_federations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.ListFederationsRequest.pb( metastore_federation.ListFederationsRequest() ) @@ -4635,6 +4689,10 @@ def test_list_federations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore_federation.ListFederationsResponse() + post_with_metadata.return_value = ( + metastore_federation.ListFederationsResponse(), + metadata, + ) client.list_federations( request, @@ -4646,6 +4704,7 @@ def test_list_federations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_federation_rest_bad_request( @@ -4740,10 +4799,14 @@ def test_get_federation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_get_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_get_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_get_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.GetFederationRequest.pb( metastore_federation.GetFederationRequest() ) @@ -4769,6 +4832,7 @@ def test_get_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = metastore_federation.Federation() + post_with_metadata.return_value = metastore_federation.Federation(), metadata client.get_federation( request, @@ -4780,6 +4844,7 @@ def test_get_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_federation_rest_bad_request( @@ -4939,10 +5004,14 @@ def test_create_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_create_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_create_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_create_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.CreateFederationRequest.pb( metastore_federation.CreateFederationRequest() ) @@ -4966,6 +5035,7 @@ def test_create_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_federation( request, @@ -4977,6 +5047,7 @@ def test_create_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_federation_rest_bad_request( @@ -5140,10 +5211,14 @@ def test_update_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_update_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_update_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_update_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.UpdateFederationRequest.pb( metastore_federation.UpdateFederationRequest() ) @@ -5167,6 +5242,7 @@ def test_update_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_federation( request, @@ -5178,6 +5254,7 @@ def test_update_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_federation_rest_bad_request( @@ -5258,10 +5335,14 @@ def test_delete_federation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "post_delete_federation" ) as post, mock.patch.object( + transports.DataprocMetastoreFederationRestInterceptor, + "post_delete_federation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataprocMetastoreFederationRestInterceptor, "pre_delete_federation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = metastore_federation.DeleteFederationRequest.pb( metastore_federation.DeleteFederationRequest() ) @@ -5285,6 +5366,7 @@ def test_delete_federation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_federation( request, @@ -5296,6 +5378,7 @@ def test_delete_federation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dataproc/CHANGELOG.md b/packages/google-cloud-dataproc/CHANGELOG.md index b17042ad5df1..db51f804bed3 100644 --- a/packages/google-cloud-dataproc/CHANGELOG.md +++ b/packages/google-cloud-dataproc/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [5.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.16.0...google-cloud-dataproc-v5.17.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [5.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dataproc-v5.15.1...google-cloud-dataproc-v5.16.0) (2024-12-12) diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py index ae9c1a794e9e..769d6a06054c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.16.0" # {x-release-please-version} +__version__ = "5.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py index ae9c1a794e9e..769d6a06054c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "5.16.0" # {x-release-please-version} +__version__ = "5.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index 35592336fce0..bb440a591fbd 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1371,16 +1400,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1426,16 +1459,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1658,16 +1695,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1780,16 +1821,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1840,16 +1885,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py index 5368c32fcf8f..e0a8198ddf7f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/rest.py @@ -134,12 +134,37 @@ def post_create_autoscaling_policy( ) -> autoscaling_policies.AutoscalingPolicy: """Post-rpc interceptor for create_autoscaling_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_autoscaling_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_create_autoscaling_policy` interceptor runs + before the `post_create_autoscaling_policy_with_metadata` interceptor. """ return response + def post_create_autoscaling_policy_with_metadata( + self, + response: autoscaling_policies.AutoscalingPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + autoscaling_policies.AutoscalingPolicy, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_autoscaling_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoscalingPolicyService server but before it is returned to user code. + + We recommend only using this `post_create_autoscaling_policy_with_metadata` + interceptor in new development instead of the `post_create_autoscaling_policy` interceptor. + When both interceptors are used, this `post_create_autoscaling_policy_with_metadata` interceptor runs after the + `post_create_autoscaling_policy` interceptor. The (possibly modified) response returned by + `post_create_autoscaling_policy` will be passed to + `post_create_autoscaling_policy_with_metadata`. + """ + return response, metadata + def pre_delete_autoscaling_policy( self, request: autoscaling_policies.DeleteAutoscalingPolicyRequest, @@ -175,12 +200,37 @@ def post_get_autoscaling_policy( ) -> autoscaling_policies.AutoscalingPolicy: """Post-rpc interceptor for get_autoscaling_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_autoscaling_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_get_autoscaling_policy` interceptor runs + before the `post_get_autoscaling_policy_with_metadata` interceptor. """ return response + def post_get_autoscaling_policy_with_metadata( + self, + response: autoscaling_policies.AutoscalingPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + autoscaling_policies.AutoscalingPolicy, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_autoscaling_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoscalingPolicyService server but before it is returned to user code. + + We recommend only using this `post_get_autoscaling_policy_with_metadata` + interceptor in new development instead of the `post_get_autoscaling_policy` interceptor. + When both interceptors are used, this `post_get_autoscaling_policy_with_metadata` interceptor runs after the + `post_get_autoscaling_policy` interceptor. The (possibly modified) response returned by + `post_get_autoscaling_policy` will be passed to + `post_get_autoscaling_policy_with_metadata`. + """ + return response, metadata + def pre_list_autoscaling_policies( self, request: autoscaling_policies.ListAutoscalingPoliciesRequest, @@ -201,12 +251,38 @@ def post_list_autoscaling_policies( ) -> autoscaling_policies.ListAutoscalingPoliciesResponse: """Post-rpc interceptor for list_autoscaling_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_autoscaling_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_list_autoscaling_policies` interceptor runs + before the `post_list_autoscaling_policies_with_metadata` interceptor. """ return response + def post_list_autoscaling_policies_with_metadata( + self, + response: autoscaling_policies.ListAutoscalingPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + autoscaling_policies.ListAutoscalingPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_autoscaling_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoscalingPolicyService server but before it is returned to user code. + + We recommend only using this `post_list_autoscaling_policies_with_metadata` + interceptor in new development instead of the `post_list_autoscaling_policies` interceptor. + When both interceptors are used, this `post_list_autoscaling_policies_with_metadata` interceptor runs after the + `post_list_autoscaling_policies` interceptor. The (possibly modified) response returned by + `post_list_autoscaling_policies` will be passed to + `post_list_autoscaling_policies_with_metadata`. + """ + return response, metadata + def pre_update_autoscaling_policy( self, request: autoscaling_policies.UpdateAutoscalingPolicyRequest, @@ -227,12 +303,37 @@ def post_update_autoscaling_policy( ) -> autoscaling_policies.AutoscalingPolicy: """Post-rpc interceptor for update_autoscaling_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_autoscaling_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutoscalingPolicyService server but before - it is returned to user code. + it is returned to user code. This `post_update_autoscaling_policy` interceptor runs + before the `post_update_autoscaling_policy_with_metadata` interceptor. """ return response + def post_update_autoscaling_policy_with_metadata( + self, + response: autoscaling_policies.AutoscalingPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + autoscaling_policies.AutoscalingPolicy, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_autoscaling_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutoscalingPolicyService server but before it is returned to user code. + + We recommend only using this `post_update_autoscaling_policy_with_metadata` + interceptor in new development instead of the `post_update_autoscaling_policy` interceptor. + When both interceptors are used, this `post_update_autoscaling_policy_with_metadata` interceptor runs after the + `post_update_autoscaling_policy` interceptor. The (possibly modified) response returned by + `post_update_autoscaling_policy` will be passed to + `post_update_autoscaling_policy_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -618,6 +719,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_autoscaling_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_autoscaling_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -878,6 +983,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_autoscaling_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_autoscaling_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1026,6 +1135,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_autoscaling_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_autoscaling_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1182,6 +1295,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_autoscaling_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_autoscaling_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py index 11a6a8ff080e..ed6668f3d44f 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1236,16 +1265,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1291,16 +1324,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1523,16 +1560,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1645,16 +1686,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1705,16 +1750,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py index b9d59a6f4f5a..f5409f69a8c1 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/batch_controller/transports/rest.py @@ -123,12 +123,35 @@ def post_create_batch( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_batch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_batch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchController server but before - it is returned to user code. + it is returned to user code. This `post_create_batch` interceptor runs + before the `post_create_batch_with_metadata` interceptor. """ return response + def post_create_batch_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_batch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchController server but before it is returned to user code. + + We recommend only using this `post_create_batch_with_metadata` + interceptor in new development instead of the `post_create_batch` interceptor. + When both interceptors are used, this `post_create_batch_with_metadata` interceptor runs after the + `post_create_batch` interceptor. The (possibly modified) response returned by + `post_create_batch` will be passed to + `post_create_batch_with_metadata`. + """ + return response, metadata + def pre_delete_batch( self, request: batches.DeleteBatchRequest, @@ -156,12 +179,33 @@ def pre_get_batch( def post_get_batch(self, response: batches.Batch) -> batches.Batch: """Post-rpc interceptor for get_batch - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_batch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchController server but before - it is returned to user code. + it is returned to user code. This `post_get_batch` interceptor runs + before the `post_get_batch_with_metadata` interceptor. """ return response + def post_get_batch_with_metadata( + self, response: batches.Batch, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[batches.Batch, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_batch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchController server but before it is returned to user code. + + We recommend only using this `post_get_batch_with_metadata` + interceptor in new development instead of the `post_get_batch` interceptor. + When both interceptors are used, this `post_get_batch_with_metadata` interceptor runs after the + `post_get_batch` interceptor. The (possibly modified) response returned by + `post_get_batch` will be passed to + `post_get_batch_with_metadata`. + """ + return response, metadata + def pre_list_batches( self, request: batches.ListBatchesRequest, @@ -179,12 +223,35 @@ def post_list_batches( ) -> batches.ListBatchesResponse: """Post-rpc interceptor for list_batches - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_batches_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BatchController server but before - it is returned to user code. + it is returned to user code. This `post_list_batches` interceptor runs + before the `post_list_batches_with_metadata` interceptor. """ return response + def post_list_batches_with_metadata( + self, + response: batches.ListBatchesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[batches.ListBatchesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_batches + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BatchController server but before it is returned to user code. + + We recommend only using this `post_list_batches_with_metadata` + interceptor in new development instead of the `post_list_batches` interceptor. + When both interceptors are used, this `post_list_batches_with_metadata` interceptor runs after the + `post_list_batches` interceptor. The (possibly modified) response returned by + `post_list_batches` will be passed to + `post_list_batches_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -634,6 +701,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_batch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_batch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -887,6 +958,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_batch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_batch_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1028,6 +1103,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_batches(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_batches_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py index 7f772f804de9..51d1bd162595 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -564,6 +566,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2033,16 +2062,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2088,16 +2121,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2320,16 +2357,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2442,16 +2483,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2502,16 +2547,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py index 0f4f79dbd767..1ca2319a7c40 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/cluster_controller/transports/rest.py @@ -158,12 +158,35 @@ def post_create_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: clusters.DeleteClusterRequest, @@ -181,12 +204,35 @@ def post_delete_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_diagnose_cluster( self, request: clusters.DiagnoseClusterRequest, @@ -206,12 +252,35 @@ def post_diagnose_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for diagnose_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_diagnose_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_diagnose_cluster` interceptor runs + before the `post_diagnose_cluster_with_metadata` interceptor. """ return response + def post_diagnose_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for diagnose_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_diagnose_cluster_with_metadata` + interceptor in new development instead of the `post_diagnose_cluster` interceptor. + When both interceptors are used, this `post_diagnose_cluster_with_metadata` interceptor runs after the + `post_diagnose_cluster` interceptor. The (possibly modified) response returned by + `post_diagnose_cluster` will be passed to + `post_diagnose_cluster_with_metadata`. + """ + return response, metadata + def pre_get_cluster( self, request: clusters.GetClusterRequest, @@ -227,12 +296,35 @@ def pre_get_cluster( def post_get_cluster(self, response: clusters.Cluster) -> clusters.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: clusters.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[clusters.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: clusters.ListClustersRequest, @@ -250,12 +342,35 @@ def post_list_clusters( ) -> clusters.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: clusters.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[clusters.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_start_cluster( self, request: clusters.StartClusterRequest, @@ -273,12 +388,35 @@ def post_start_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for start_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_start_cluster` interceptor runs + before the `post_start_cluster_with_metadata` interceptor. """ return response + def post_start_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_start_cluster_with_metadata` + interceptor in new development instead of the `post_start_cluster` interceptor. + When both interceptors are used, this `post_start_cluster_with_metadata` interceptor runs after the + `post_start_cluster` interceptor. The (possibly modified) response returned by + `post_start_cluster` will be passed to + `post_start_cluster_with_metadata`. + """ + return response, metadata + def pre_stop_cluster( self, request: clusters.StopClusterRequest, @@ -296,12 +434,35 @@ def post_stop_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for stop_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_stop_cluster` interceptor runs + before the `post_stop_cluster_with_metadata` interceptor. """ return response + def post_stop_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_stop_cluster_with_metadata` + interceptor in new development instead of the `post_stop_cluster` interceptor. + When both interceptors are used, this `post_stop_cluster_with_metadata` interceptor runs after the + `post_stop_cluster` interceptor. The (possibly modified) response returned by + `post_stop_cluster` will be passed to + `post_stop_cluster_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: clusters.UpdateClusterRequest, @@ -319,12 +480,35 @@ def post_update_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ClusterController server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ClusterController server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -775,6 +959,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -917,6 +1105,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1068,6 +1260,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_diagnose_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_diagnose_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1213,6 +1409,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1356,6 +1556,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1503,6 +1707,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1650,6 +1858,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1798,6 +2010,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py index d635c5aa83d8..f53e0998ffd4 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1639,16 +1668,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1694,16 +1727,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1926,16 +1963,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2048,16 +2089,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2108,16 +2153,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py index caf87e6364a7..3137961c022c 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/job_controller/transports/rest.py @@ -145,12 +145,33 @@ def pre_cancel_job( def post_cancel_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for cancel_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobController server but before - it is returned to user code. + it is returned to user code. This `post_cancel_job` interceptor runs + before the `post_cancel_job_with_metadata` interceptor. """ return response + def post_cancel_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobController server but before it is returned to user code. + + We recommend only using this `post_cancel_job_with_metadata` + interceptor in new development instead of the `post_cancel_job` interceptor. + When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the + `post_cancel_job` interceptor. The (possibly modified) response returned by + `post_cancel_job` will be passed to + `post_cancel_job_with_metadata`. + """ + return response, metadata + def pre_delete_job( self, request: jobs.DeleteJobRequest, @@ -178,12 +199,33 @@ def pre_get_job( def post_get_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for get_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobController server but before - it is returned to user code. + it is returned to user code. This `post_get_job` interceptor runs + before the `post_get_job_with_metadata` interceptor. """ return response + def post_get_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobController server but before it is returned to user code. + + We recommend only using this `post_get_job_with_metadata` + interceptor in new development instead of the `post_get_job` interceptor. + When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the + `post_get_job` interceptor. The (possibly modified) response returned by + `post_get_job` will be passed to + `post_get_job_with_metadata`. + """ + return response, metadata + def pre_list_jobs( self, request: jobs.ListJobsRequest, @@ -199,12 +241,35 @@ def pre_list_jobs( def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: """Post-rpc interceptor for list_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobController server but before - it is returned to user code. + it is returned to user code. This `post_list_jobs` interceptor runs + before the `post_list_jobs_with_metadata` interceptor. """ return response + def post_list_jobs_with_metadata( + self, + response: jobs.ListJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[jobs.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobController server but before it is returned to user code. + + We recommend only using this `post_list_jobs_with_metadata` + interceptor in new development instead of the `post_list_jobs` interceptor. + When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the + `post_list_jobs` interceptor. The (possibly modified) response returned by + `post_list_jobs` will be passed to + `post_list_jobs_with_metadata`. + """ + return response, metadata + def pre_submit_job( self, request: jobs.SubmitJobRequest, @@ -220,12 +285,33 @@ def pre_submit_job( def post_submit_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for submit_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_submit_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobController server but before - it is returned to user code. + it is returned to user code. This `post_submit_job` interceptor runs + before the `post_submit_job_with_metadata` interceptor. """ return response + def post_submit_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for submit_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobController server but before it is returned to user code. + + We recommend only using this `post_submit_job_with_metadata` + interceptor in new development instead of the `post_submit_job` interceptor. + When both interceptors are used, this `post_submit_job_with_metadata` interceptor runs after the + `post_submit_job` interceptor. The (possibly modified) response returned by + `post_submit_job` will be passed to + `post_submit_job_with_metadata`. + """ + return response, metadata + def pre_submit_job_as_operation( self, request: jobs.SubmitJobRequest, @@ -243,12 +329,35 @@ def post_submit_job_as_operation( ) -> operations_pb2.Operation: """Post-rpc interceptor for submit_job_as_operation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_submit_job_as_operation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobController server but before - it is returned to user code. + it is returned to user code. This `post_submit_job_as_operation` interceptor runs + before the `post_submit_job_as_operation_with_metadata` interceptor. """ return response + def post_submit_job_as_operation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for submit_job_as_operation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobController server but before it is returned to user code. + + We recommend only using this `post_submit_job_as_operation_with_metadata` + interceptor in new development instead of the `post_submit_job_as_operation` interceptor. + When both interceptors are used, this `post_submit_job_as_operation_with_metadata` interceptor runs after the + `post_submit_job_as_operation` interceptor. The (possibly modified) response returned by + `post_submit_job_as_operation` will be passed to + `post_submit_job_as_operation_with_metadata`. + """ + return response, metadata + def pre_update_job( self, request: jobs.UpdateJobRequest, @@ -264,12 +373,33 @@ def pre_update_job( def post_update_job(self, response: jobs.Job) -> jobs.Job: """Post-rpc interceptor for update_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the JobController server but before - it is returned to user code. + it is returned to user code. This `post_update_job` interceptor runs + before the `post_update_job_with_metadata` interceptor. """ return response + def post_update_job_with_metadata( + self, response: jobs.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[jobs.Job, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the JobController server but before it is returned to user code. + + We recommend only using this `post_update_job_with_metadata` + interceptor in new development instead of the `post_update_job` interceptor. + When both interceptors are used, this `post_update_job_with_metadata` interceptor runs after the + `post_update_job` interceptor. The (possibly modified) response returned by + `post_update_job` will be passed to + `post_update_job_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -723,6 +853,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -976,6 +1110,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1120,6 +1258,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1272,6 +1414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_submit_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_submit_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1421,6 +1567,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_submit_job_as_operation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_submit_job_as_operation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1573,6 +1723,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py index 16f48142eb9a..5c08ef818030 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1148,16 +1177,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1203,16 +1236,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1435,16 +1472,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1557,16 +1598,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1617,16 +1662,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py index 50d0d98b4649..516f05afd6b0 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/node_group_controller/transports/rest.py @@ -120,12 +120,35 @@ def post_create_node_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_node_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_node_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroupController server but before - it is returned to user code. + it is returned to user code. This `post_create_node_group` interceptor runs + before the `post_create_node_group_with_metadata` interceptor. """ return response + def post_create_node_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_node_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroupController server but before it is returned to user code. + + We recommend only using this `post_create_node_group_with_metadata` + interceptor in new development instead of the `post_create_node_group` interceptor. + When both interceptors are used, this `post_create_node_group_with_metadata` interceptor runs after the + `post_create_node_group` interceptor. The (possibly modified) response returned by + `post_create_node_group` will be passed to + `post_create_node_group_with_metadata`. + """ + return response, metadata + def pre_get_node_group( self, request: node_groups.GetNodeGroupRequest, @@ -143,12 +166,35 @@ def pre_get_node_group( def post_get_node_group(self, response: clusters.NodeGroup) -> clusters.NodeGroup: """Post-rpc interceptor for get_node_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_node_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroupController server but before - it is returned to user code. + it is returned to user code. This `post_get_node_group` interceptor runs + before the `post_get_node_group_with_metadata` interceptor. """ return response + def post_get_node_group_with_metadata( + self, + response: clusters.NodeGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[clusters.NodeGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_node_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroupController server but before it is returned to user code. + + We recommend only using this `post_get_node_group_with_metadata` + interceptor in new development instead of the `post_get_node_group` interceptor. + When both interceptors are used, this `post_get_node_group_with_metadata` interceptor runs after the + `post_get_node_group` interceptor. The (possibly modified) response returned by + `post_get_node_group` will be passed to + `post_get_node_group_with_metadata`. + """ + return response, metadata + def pre_resize_node_group( self, request: node_groups.ResizeNodeGroupRequest, @@ -168,12 +214,35 @@ def post_resize_node_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for resize_node_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resize_node_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the NodeGroupController server but before - it is returned to user code. + it is returned to user code. This `post_resize_node_group` interceptor runs + before the `post_resize_node_group_with_metadata` interceptor. """ return response + def post_resize_node_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resize_node_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the NodeGroupController server but before it is returned to user code. + + We recommend only using this `post_resize_node_group_with_metadata` + interceptor in new development instead of the `post_resize_node_group` interceptor. + When both interceptors are used, this `post_resize_node_group_with_metadata` interceptor runs after the + `post_resize_node_group` interceptor. The (possibly modified) response returned by + `post_resize_node_group` will be passed to + `post_resize_node_group_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -626,6 +695,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_node_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_node_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -771,6 +844,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_node_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_node_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -921,6 +998,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_resize_node_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resize_node_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py index eb72e4000be9..85a3cde3169d 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -537,6 +539,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1407,16 +1436,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1462,16 +1495,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1694,16 +1731,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1816,16 +1857,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1876,16 +1921,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/transports/rest.py index 7da55caf4d68..faf081e98f82 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_controller/transports/rest.py @@ -134,12 +134,35 @@ def post_create_session( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionController server but before - it is returned to user code. + it is returned to user code. This `post_create_session` interceptor runs + before the `post_create_session_with_metadata` interceptor. """ return response + def post_create_session_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionController server but before it is returned to user code. + + We recommend only using this `post_create_session_with_metadata` + interceptor in new development instead of the `post_create_session` interceptor. + When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the + `post_create_session` interceptor. The (possibly modified) response returned by + `post_create_session` will be passed to + `post_create_session_with_metadata`. + """ + return response, metadata + def pre_delete_session( self, request: sessions.DeleteSessionRequest, @@ -157,12 +180,35 @@ def post_delete_session( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionController server but before - it is returned to user code. + it is returned to user code. This `post_delete_session` interceptor runs + before the `post_delete_session_with_metadata` interceptor. """ return response + def post_delete_session_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionController server but before it is returned to user code. + + We recommend only using this `post_delete_session_with_metadata` + interceptor in new development instead of the `post_delete_session` interceptor. + When both interceptors are used, this `post_delete_session_with_metadata` interceptor runs after the + `post_delete_session` interceptor. The (possibly modified) response returned by + `post_delete_session` will be passed to + `post_delete_session_with_metadata`. + """ + return response, metadata + def pre_get_session( self, request: sessions.GetSessionRequest, @@ -178,12 +224,35 @@ def pre_get_session( def post_get_session(self, response: sessions.Session) -> sessions.Session: """Post-rpc interceptor for get_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionController server but before - it is returned to user code. + it is returned to user code. This `post_get_session` interceptor runs + before the `post_get_session_with_metadata` interceptor. """ return response + def post_get_session_with_metadata( + self, + response: sessions.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sessions.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionController server but before it is returned to user code. + + We recommend only using this `post_get_session_with_metadata` + interceptor in new development instead of the `post_get_session` interceptor. + When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the + `post_get_session` interceptor. The (possibly modified) response returned by + `post_get_session` will be passed to + `post_get_session_with_metadata`. + """ + return response, metadata + def pre_list_sessions( self, request: sessions.ListSessionsRequest, @@ -201,12 +270,35 @@ def post_list_sessions( ) -> sessions.ListSessionsResponse: """Post-rpc interceptor for list_sessions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionController server but before - it is returned to user code. + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. """ return response + def post_list_sessions_with_metadata( + self, + response: sessions.ListSessionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sessions.ListSessionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionController server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + def pre_terminate_session( self, request: sessions.TerminateSessionRequest, @@ -226,12 +318,35 @@ def post_terminate_session( ) -> operations_pb2.Operation: """Post-rpc interceptor for terminate_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_terminate_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionController server but before - it is returned to user code. + it is returned to user code. This `post_terminate_session` interceptor runs + before the `post_terminate_session_with_metadata` interceptor. """ return response + def post_terminate_session_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for terminate_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionController server but before it is returned to user code. + + We recommend only using this `post_terminate_session_with_metadata` + interceptor in new development instead of the `post_terminate_session` interceptor. + When both interceptors are used, this `post_terminate_session_with_metadata` interceptor runs after the + `post_terminate_session` interceptor. The (possibly modified) response returned by + `post_terminate_session` will be passed to + `post_terminate_session_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -682,6 +797,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -824,6 +943,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -965,6 +1088,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1106,6 +1233,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1257,6 +1388,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_terminate_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_terminate_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py index 700e27d8cb88..df32d216b828 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1333,16 +1362,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1388,16 +1421,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1620,16 +1657,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1742,16 +1783,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1802,16 +1847,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/transports/rest.py index eaf6b7730a31..58c9ecfe5c05 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/session_template_controller/transports/rest.py @@ -134,12 +134,37 @@ def post_create_session_template( ) -> session_templates.SessionTemplate: """Post-rpc interceptor for create_session_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionTemplateController server but before - it is returned to user code. + it is returned to user code. This `post_create_session_template` interceptor runs + before the `post_create_session_template_with_metadata` interceptor. """ return response + def post_create_session_template_with_metadata( + self, + response: session_templates.SessionTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_templates.SessionTemplate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_session_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionTemplateController server but before it is returned to user code. + + We recommend only using this `post_create_session_template_with_metadata` + interceptor in new development instead of the `post_create_session_template` interceptor. + When both interceptors are used, this `post_create_session_template_with_metadata` interceptor runs after the + `post_create_session_template` interceptor. The (possibly modified) response returned by + `post_create_session_template` will be passed to + `post_create_session_template_with_metadata`. + """ + return response, metadata + def pre_delete_session_template( self, request: session_templates.DeleteSessionTemplateRequest, @@ -175,12 +200,37 @@ def post_get_session_template( ) -> session_templates.SessionTemplate: """Post-rpc interceptor for get_session_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionTemplateController server but before - it is returned to user code. + it is returned to user code. This `post_get_session_template` interceptor runs + before the `post_get_session_template_with_metadata` interceptor. """ return response + def post_get_session_template_with_metadata( + self, + response: session_templates.SessionTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_templates.SessionTemplate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_session_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionTemplateController server but before it is returned to user code. + + We recommend only using this `post_get_session_template_with_metadata` + interceptor in new development instead of the `post_get_session_template` interceptor. + When both interceptors are used, this `post_get_session_template_with_metadata` interceptor runs after the + `post_get_session_template` interceptor. The (possibly modified) response returned by + `post_get_session_template` will be passed to + `post_get_session_template_with_metadata`. + """ + return response, metadata + def pre_list_session_templates( self, request: session_templates.ListSessionTemplatesRequest, @@ -201,12 +251,38 @@ def post_list_session_templates( ) -> session_templates.ListSessionTemplatesResponse: """Post-rpc interceptor for list_session_templates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_session_templates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionTemplateController server but before - it is returned to user code. + it is returned to user code. This `post_list_session_templates` interceptor runs + before the `post_list_session_templates_with_metadata` interceptor. """ return response + def post_list_session_templates_with_metadata( + self, + response: session_templates.ListSessionTemplatesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_templates.ListSessionTemplatesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_session_templates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionTemplateController server but before it is returned to user code. + + We recommend only using this `post_list_session_templates_with_metadata` + interceptor in new development instead of the `post_list_session_templates` interceptor. + When both interceptors are used, this `post_list_session_templates_with_metadata` interceptor runs after the + `post_list_session_templates` interceptor. The (possibly modified) response returned by + `post_list_session_templates` will be passed to + `post_list_session_templates_with_metadata`. + """ + return response, metadata + def pre_update_session_template( self, request: session_templates.UpdateSessionTemplateRequest, @@ -227,12 +303,37 @@ def post_update_session_template( ) -> session_templates.SessionTemplate: """Post-rpc interceptor for update_session_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionTemplateController server but before - it is returned to user code. + it is returned to user code. This `post_update_session_template` interceptor runs + before the `post_update_session_template_with_metadata` interceptor. """ return response + def post_update_session_template_with_metadata( + self, + response: session_templates.SessionTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_templates.SessionTemplate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_session_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionTemplateController server but before it is returned to user code. + + We recommend only using this `post_update_session_template_with_metadata` + interceptor in new development instead of the `post_update_session_template` interceptor. + When both interceptors are used, this `post_update_session_template_with_metadata` interceptor runs after the + `post_update_session_template` interceptor. The (possibly modified) response returned by + `post_update_session_template` will be passed to + `post_update_session_template_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -620,6 +721,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -878,6 +983,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1024,6 +1133,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_session_templates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_session_templates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1178,6 +1291,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py index 5a2df2aa2e4e..9a4eac8e3138 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -567,6 +569,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1811,16 +1840,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1866,16 +1899,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2098,16 +2135,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2220,16 +2261,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2280,16 +2325,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py index 525d3c00b6d1..95397cff990d 100644 --- a/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py +++ b/packages/google-cloud-dataproc/google/cloud/dataproc_v1/services/workflow_template_service/transports/rest.py @@ -150,12 +150,37 @@ def post_create_workflow_template( ) -> workflow_templates.WorkflowTemplate: """Post-rpc interceptor for create_workflow_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_workflow_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowTemplateService server but before - it is returned to user code. + it is returned to user code. This `post_create_workflow_template` interceptor runs + before the `post_create_workflow_template_with_metadata` interceptor. """ return response + def post_create_workflow_template_with_metadata( + self, + response: workflow_templates.WorkflowTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + workflow_templates.WorkflowTemplate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_workflow_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowTemplateService server but before it is returned to user code. + + We recommend only using this `post_create_workflow_template_with_metadata` + interceptor in new development instead of the `post_create_workflow_template` interceptor. + When both interceptors are used, this `post_create_workflow_template_with_metadata` interceptor runs after the + `post_create_workflow_template` interceptor. The (possibly modified) response returned by + `post_create_workflow_template` will be passed to + `post_create_workflow_template_with_metadata`. + """ + return response, metadata + def pre_delete_workflow_template( self, request: workflow_templates.DeleteWorkflowTemplateRequest, @@ -191,12 +216,37 @@ def post_get_workflow_template( ) -> workflow_templates.WorkflowTemplate: """Post-rpc interceptor for get_workflow_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_workflow_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowTemplateService server but before - it is returned to user code. + it is returned to user code. This `post_get_workflow_template` interceptor runs + before the `post_get_workflow_template_with_metadata` interceptor. """ return response + def post_get_workflow_template_with_metadata( + self, + response: workflow_templates.WorkflowTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + workflow_templates.WorkflowTemplate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_workflow_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowTemplateService server but before it is returned to user code. + + We recommend only using this `post_get_workflow_template_with_metadata` + interceptor in new development instead of the `post_get_workflow_template` interceptor. + When both interceptors are used, this `post_get_workflow_template_with_metadata` interceptor runs after the + `post_get_workflow_template` interceptor. The (possibly modified) response returned by + `post_get_workflow_template` will be passed to + `post_get_workflow_template_with_metadata`. + """ + return response, metadata + def pre_instantiate_inline_workflow_template( self, request: workflow_templates.InstantiateInlineWorkflowTemplateRequest, @@ -217,12 +267,35 @@ def post_instantiate_inline_workflow_template( ) -> operations_pb2.Operation: """Post-rpc interceptor for instantiate_inline_workflow_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_instantiate_inline_workflow_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowTemplateService server but before - it is returned to user code. + it is returned to user code. This `post_instantiate_inline_workflow_template` interceptor runs + before the `post_instantiate_inline_workflow_template_with_metadata` interceptor. """ return response + def post_instantiate_inline_workflow_template_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for instantiate_inline_workflow_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowTemplateService server but before it is returned to user code. + + We recommend only using this `post_instantiate_inline_workflow_template_with_metadata` + interceptor in new development instead of the `post_instantiate_inline_workflow_template` interceptor. + When both interceptors are used, this `post_instantiate_inline_workflow_template_with_metadata` interceptor runs after the + `post_instantiate_inline_workflow_template` interceptor. The (possibly modified) response returned by + `post_instantiate_inline_workflow_template` will be passed to + `post_instantiate_inline_workflow_template_with_metadata`. + """ + return response, metadata + def pre_instantiate_workflow_template( self, request: workflow_templates.InstantiateWorkflowTemplateRequest, @@ -243,12 +316,35 @@ def post_instantiate_workflow_template( ) -> operations_pb2.Operation: """Post-rpc interceptor for instantiate_workflow_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_instantiate_workflow_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowTemplateService server but before - it is returned to user code. + it is returned to user code. This `post_instantiate_workflow_template` interceptor runs + before the `post_instantiate_workflow_template_with_metadata` interceptor. """ return response + def post_instantiate_workflow_template_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for instantiate_workflow_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowTemplateService server but before it is returned to user code. + + We recommend only using this `post_instantiate_workflow_template_with_metadata` + interceptor in new development instead of the `post_instantiate_workflow_template` interceptor. + When both interceptors are used, this `post_instantiate_workflow_template_with_metadata` interceptor runs after the + `post_instantiate_workflow_template` interceptor. The (possibly modified) response returned by + `post_instantiate_workflow_template` will be passed to + `post_instantiate_workflow_template_with_metadata`. + """ + return response, metadata + def pre_list_workflow_templates( self, request: workflow_templates.ListWorkflowTemplatesRequest, @@ -269,12 +365,38 @@ def post_list_workflow_templates( ) -> workflow_templates.ListWorkflowTemplatesResponse: """Post-rpc interceptor for list_workflow_templates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_workflow_templates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowTemplateService server but before - it is returned to user code. + it is returned to user code. This `post_list_workflow_templates` interceptor runs + before the `post_list_workflow_templates_with_metadata` interceptor. """ return response + def post_list_workflow_templates_with_metadata( + self, + response: workflow_templates.ListWorkflowTemplatesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + workflow_templates.ListWorkflowTemplatesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_workflow_templates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowTemplateService server but before it is returned to user code. + + We recommend only using this `post_list_workflow_templates_with_metadata` + interceptor in new development instead of the `post_list_workflow_templates` interceptor. + When both interceptors are used, this `post_list_workflow_templates_with_metadata` interceptor runs after the + `post_list_workflow_templates` interceptor. The (possibly modified) response returned by + `post_list_workflow_templates` will be passed to + `post_list_workflow_templates_with_metadata`. + """ + return response, metadata + def pre_update_workflow_template( self, request: workflow_templates.UpdateWorkflowTemplateRequest, @@ -295,12 +417,37 @@ def post_update_workflow_template( ) -> workflow_templates.WorkflowTemplate: """Post-rpc interceptor for update_workflow_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_workflow_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowTemplateService server but before - it is returned to user code. + it is returned to user code. This `post_update_workflow_template` interceptor runs + before the `post_update_workflow_template_with_metadata` interceptor. """ return response + def post_update_workflow_template_with_metadata( + self, + response: workflow_templates.WorkflowTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + workflow_templates.WorkflowTemplate, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_workflow_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowTemplateService server but before it is returned to user code. + + We recommend only using this `post_update_workflow_template_with_metadata` + interceptor in new development instead of the `post_update_workflow_template` interceptor. + When both interceptors are used, this `post_update_workflow_template_with_metadata` interceptor runs after the + `post_update_workflow_template` interceptor. The (possibly modified) response returned by + `post_update_workflow_template` will be passed to + `post_update_workflow_template_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -755,6 +902,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_workflow_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_workflow_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1017,6 +1168,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_workflow_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_workflow_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1176,6 +1331,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_instantiate_inline_workflow_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_instantiate_inline_workflow_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1330,6 +1492,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_instantiate_workflow_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_instantiate_workflow_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1476,6 +1645,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_workflow_templates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_workflow_templates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1632,6 +1805,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_workflow_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_workflow_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json index 58522e93787a..15126da08c8b 100644 --- a/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json +++ b/packages/google-cloud-dataproc/samples/generated_samples/snippet_metadata_google.cloud.dataproc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dataproc", - "version": "5.16.0" + "version": "5.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index 72b78dd09615..4df4a9cad22c 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -65,6 +65,13 @@ ) from google.cloud.dataproc_v1.types import autoscaling_policies +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutoscalingPolicyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutoscalingPolicyServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4633,11 +4683,15 @@ def test_create_autoscaling_policy_rest_interceptors(null_interceptor): transports.AutoscalingPolicyServiceRestInterceptor, "post_create_autoscaling_policy", ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_create_autoscaling_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutoscalingPolicyServiceRestInterceptor, "pre_create_autoscaling_policy", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autoscaling_policies.CreateAutoscalingPolicyRequest.pb( autoscaling_policies.CreateAutoscalingPolicyRequest() ) @@ -4663,6 +4717,10 @@ def test_create_autoscaling_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autoscaling_policies.AutoscalingPolicy() + post_with_metadata.return_value = ( + autoscaling_policies.AutoscalingPolicy(), + metadata, + ) client.create_autoscaling_policy( request, @@ -4674,6 +4732,7 @@ def test_create_autoscaling_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_autoscaling_policy_rest_bad_request( @@ -4855,11 +4914,15 @@ def test_update_autoscaling_policy_rest_interceptors(null_interceptor): transports.AutoscalingPolicyServiceRestInterceptor, "post_update_autoscaling_policy", ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_update_autoscaling_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutoscalingPolicyServiceRestInterceptor, "pre_update_autoscaling_policy", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autoscaling_policies.UpdateAutoscalingPolicyRequest.pb( autoscaling_policies.UpdateAutoscalingPolicyRequest() ) @@ -4885,6 +4948,10 @@ def test_update_autoscaling_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autoscaling_policies.AutoscalingPolicy() + post_with_metadata.return_value = ( + autoscaling_policies.AutoscalingPolicy(), + metadata, + ) client.update_autoscaling_policy( request, @@ -4896,6 +4963,7 @@ def test_update_autoscaling_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_autoscaling_policy_rest_bad_request( @@ -4987,10 +5055,14 @@ def test_get_autoscaling_policy_rest_interceptors(null_interceptor): transports.AutoscalingPolicyServiceRestInterceptor, "post_get_autoscaling_policy", ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_get_autoscaling_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutoscalingPolicyServiceRestInterceptor, "pre_get_autoscaling_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autoscaling_policies.GetAutoscalingPolicyRequest.pb( autoscaling_policies.GetAutoscalingPolicyRequest() ) @@ -5016,6 +5088,10 @@ def test_get_autoscaling_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autoscaling_policies.AutoscalingPolicy() + post_with_metadata.return_value = ( + autoscaling_policies.AutoscalingPolicy(), + metadata, + ) client.get_autoscaling_policy( request, @@ -5027,6 +5103,7 @@ def test_get_autoscaling_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_autoscaling_policies_rest_bad_request( @@ -5114,11 +5191,15 @@ def test_list_autoscaling_policies_rest_interceptors(null_interceptor): transports.AutoscalingPolicyServiceRestInterceptor, "post_list_autoscaling_policies", ) as post, mock.patch.object( + transports.AutoscalingPolicyServiceRestInterceptor, + "post_list_autoscaling_policies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutoscalingPolicyServiceRestInterceptor, "pre_list_autoscaling_policies", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autoscaling_policies.ListAutoscalingPoliciesRequest.pb( autoscaling_policies.ListAutoscalingPoliciesRequest() ) @@ -5144,6 +5225,10 @@ def test_list_autoscaling_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() + post_with_metadata.return_value = ( + autoscaling_policies.ListAutoscalingPoliciesResponse(), + metadata, + ) client.list_autoscaling_policies( request, @@ -5155,6 +5240,7 @@ def test_list_autoscaling_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_autoscaling_policy_rest_bad_request( diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py index b788edab02b0..380b9c84b9c6 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_batch_controller.py @@ -75,6 +75,13 @@ ) from google.cloud.dataproc_v1.types import batches, operations, shared +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -333,6 +340,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BatchControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BatchControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3975,10 +4025,13 @@ def test_create_batch_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BatchControllerRestInterceptor, "post_create_batch" ) as post, mock.patch.object( + transports.BatchControllerRestInterceptor, "post_create_batch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchControllerRestInterceptor, "pre_create_batch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batches.CreateBatchRequest.pb(batches.CreateBatchRequest()) transcode.return_value = { "method": "post", @@ -4000,6 +4053,7 @@ def test_create_batch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_batch( request, @@ -4011,6 +4065,7 @@ def test_create_batch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_batch_rest_bad_request(request_type=batches.GetBatchRequest): @@ -4103,10 +4158,13 @@ def test_get_batch_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchControllerRestInterceptor, "post_get_batch" ) as post, mock.patch.object( + transports.BatchControllerRestInterceptor, "post_get_batch_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchControllerRestInterceptor, "pre_get_batch" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batches.GetBatchRequest.pb(batches.GetBatchRequest()) transcode.return_value = { "method": "post", @@ -4128,6 +4186,7 @@ def test_get_batch_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batches.Batch() + post_with_metadata.return_value = batches.Batch(), metadata client.get_batch( request, @@ -4139,6 +4198,7 @@ def test_get_batch_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_batches_rest_bad_request(request_type=batches.ListBatchesRequest): @@ -4223,10 +4283,13 @@ def test_list_batches_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BatchControllerRestInterceptor, "post_list_batches" ) as post, mock.patch.object( + transports.BatchControllerRestInterceptor, "post_list_batches_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BatchControllerRestInterceptor, "pre_list_batches" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = batches.ListBatchesRequest.pb(batches.ListBatchesRequest()) transcode.return_value = { "method": "post", @@ -4250,6 +4313,7 @@ def test_list_batches_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = batches.ListBatchesResponse() + post_with_metadata.return_value = batches.ListBatchesResponse(), metadata client.list_batches( request, @@ -4261,6 +4325,7 @@ def test_list_batches_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_batch_rest_bad_request(request_type=batches.DeleteBatchRequest): diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index 4f581dc72faf..9710ef540b26 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -78,6 +78,13 @@ ) from google.cloud.dataproc_v1.types import clusters, operations, shared +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -345,6 +352,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ClusterControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ClusterControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6435,10 +6485,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClusterControllerRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.CreateClusterRequest.pb(clusters.CreateClusterRequest()) transcode.return_value = { "method": "post", @@ -6460,6 +6513,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_cluster( request, @@ -6471,6 +6525,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request(request_type=clusters.UpdateClusterRequest): @@ -6841,10 +6896,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClusterControllerRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.UpdateClusterRequest.pb(clusters.UpdateClusterRequest()) transcode.return_value = { "method": "post", @@ -6866,6 +6924,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_cluster( request, @@ -6877,6 +6936,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_cluster_rest_bad_request(request_type=clusters.StopClusterRequest): @@ -6963,10 +7023,13 @@ def test_stop_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClusterControllerRestInterceptor, "post_stop_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_stop_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_stop_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.StopClusterRequest.pb(clusters.StopClusterRequest()) transcode.return_value = { "method": "post", @@ -6988,6 +7051,7 @@ def test_stop_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.stop_cluster( request, @@ -6999,6 +7063,7 @@ def test_stop_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_cluster_rest_bad_request(request_type=clusters.StartClusterRequest): @@ -7085,10 +7150,13 @@ def test_start_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClusterControllerRestInterceptor, "post_start_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_start_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_start_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.StartClusterRequest.pb(clusters.StartClusterRequest()) transcode.return_value = { "method": "post", @@ -7110,6 +7178,7 @@ def test_start_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.start_cluster( request, @@ -7121,6 +7190,7 @@ def test_start_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request(request_type=clusters.DeleteClusterRequest): @@ -7207,10 +7277,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClusterControllerRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.DeleteClusterRequest.pb(clusters.DeleteClusterRequest()) transcode.return_value = { "method": "post", @@ -7232,6 +7305,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_cluster( request, @@ -7243,6 +7317,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=clusters.GetClusterRequest): @@ -7337,10 +7412,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterControllerRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.GetClusterRequest.pb(clusters.GetClusterRequest()) transcode.return_value = { "method": "post", @@ -7362,6 +7440,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = clusters.Cluster() + post_with_metadata.return_value = clusters.Cluster(), metadata client.get_cluster( request, @@ -7373,6 +7452,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_clusters_rest_bad_request(request_type=clusters.ListClustersRequest): @@ -7455,10 +7535,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ClusterControllerRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.ListClustersRequest.pb(clusters.ListClustersRequest()) transcode.return_value = { "method": "post", @@ -7482,6 +7565,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = clusters.ListClustersResponse() + post_with_metadata.return_value = clusters.ListClustersResponse(), metadata client.list_clusters( request, @@ -7493,6 +7577,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_diagnose_cluster_rest_bad_request( @@ -7581,10 +7666,14 @@ def test_diagnose_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ClusterControllerRestInterceptor, "post_diagnose_cluster" ) as post, mock.patch.object( + transports.ClusterControllerRestInterceptor, + "post_diagnose_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ClusterControllerRestInterceptor, "pre_diagnose_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = clusters.DiagnoseClusterRequest.pb( clusters.DiagnoseClusterRequest() ) @@ -7608,6 +7697,7 @@ def test_diagnose_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.diagnose_cluster( request, @@ -7619,6 +7709,7 @@ def test_diagnose_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py index b93c80f4d648..fac61670f4b8 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -75,6 +75,13 @@ ) from google.cloud.dataproc_v1.types import jobs +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = JobControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = JobControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5624,10 +5674,13 @@ def test_submit_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobControllerRestInterceptor, "post_submit_job" ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "post_submit_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobControllerRestInterceptor, "pre_submit_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.SubmitJobRequest.pb(jobs.SubmitJobRequest()) transcode.return_value = { "method": "post", @@ -5649,6 +5702,7 @@ def test_submit_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.submit_job( request, @@ -5660,6 +5714,7 @@ def test_submit_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_submit_job_as_operation_rest_bad_request(request_type=jobs.SubmitJobRequest): @@ -5738,10 +5793,14 @@ def test_submit_job_as_operation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.JobControllerRestInterceptor, "post_submit_job_as_operation" ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, + "post_submit_job_as_operation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.JobControllerRestInterceptor, "pre_submit_job_as_operation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.SubmitJobRequest.pb(jobs.SubmitJobRequest()) transcode.return_value = { "method": "post", @@ -5763,6 +5822,7 @@ def test_submit_job_as_operation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.submit_job_as_operation( request, @@ -5774,6 +5834,7 @@ def test_submit_job_as_operation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_rest_bad_request(request_type=jobs.GetJobRequest): @@ -5862,10 +5923,13 @@ def test_get_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobControllerRestInterceptor, "post_get_job" ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "post_get_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobControllerRestInterceptor, "pre_get_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) transcode.return_value = { "method": "post", @@ -5887,6 +5951,7 @@ def test_get_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.get_job( request, @@ -5898,6 +5963,7 @@ def test_get_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_jobs_rest_bad_request(request_type=jobs.ListJobsRequest): @@ -5982,10 +6048,13 @@ def test_list_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobControllerRestInterceptor, "post_list_jobs" ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "post_list_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobControllerRestInterceptor, "pre_list_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) transcode.return_value = { "method": "post", @@ -6007,6 +6076,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.ListJobsResponse() + post_with_metadata.return_value = jobs.ListJobsResponse(), metadata client.list_jobs( request, @@ -6018,6 +6088,7 @@ def test_list_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_job_rest_bad_request(request_type=jobs.UpdateJobRequest): @@ -6293,10 +6364,13 @@ def test_update_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobControllerRestInterceptor, "post_update_job" ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "post_update_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobControllerRestInterceptor, "pre_update_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) transcode.return_value = { "method": "post", @@ -6318,6 +6392,7 @@ def test_update_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.update_job( request, @@ -6329,6 +6404,7 @@ def test_update_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_job_rest_bad_request(request_type=jobs.CancelJobRequest): @@ -6417,10 +6493,13 @@ def test_cancel_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.JobControllerRestInterceptor, "post_cancel_job" ) as post, mock.patch.object( + transports.JobControllerRestInterceptor, "post_cancel_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.JobControllerRestInterceptor, "pre_cancel_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = jobs.CancelJobRequest.pb(jobs.CancelJobRequest()) transcode.return_value = { "method": "post", @@ -6442,6 +6521,7 @@ def test_cancel_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = jobs.Job() + post_with_metadata.return_value = jobs.Job(), metadata client.cancel_job( request, @@ -6453,6 +6533,7 @@ def test_cancel_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_job_rest_bad_request(request_type=jobs.DeleteJobRequest): diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py index b53f1cebabe6..3703cf8dbf5f 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_node_group_controller.py @@ -73,6 +73,13 @@ ) from google.cloud.dataproc_v1.types import clusters, node_groups, operations +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = NodeGroupControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = NodeGroupControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3240,10 +3290,14 @@ def test_create_node_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.NodeGroupControllerRestInterceptor, "post_create_node_group" ) as post, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, + "post_create_node_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NodeGroupControllerRestInterceptor, "pre_create_node_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = node_groups.CreateNodeGroupRequest.pb( node_groups.CreateNodeGroupRequest() ) @@ -3267,6 +3321,7 @@ def test_create_node_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_node_group( request, @@ -3278,6 +3333,7 @@ def test_create_node_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resize_node_group_rest_bad_request( @@ -3362,10 +3418,14 @@ def test_resize_node_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.NodeGroupControllerRestInterceptor, "post_resize_node_group" ) as post, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, + "post_resize_node_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NodeGroupControllerRestInterceptor, "pre_resize_node_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = node_groups.ResizeNodeGroupRequest.pb( node_groups.ResizeNodeGroupRequest() ) @@ -3389,6 +3449,7 @@ def test_resize_node_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.resize_node_group( request, @@ -3400,6 +3461,7 @@ def test_resize_node_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_node_group_rest_bad_request(request_type=node_groups.GetNodeGroupRequest): @@ -3488,10 +3550,14 @@ def test_get_node_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.NodeGroupControllerRestInterceptor, "post_get_node_group" ) as post, mock.patch.object( + transports.NodeGroupControllerRestInterceptor, + "post_get_node_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.NodeGroupControllerRestInterceptor, "pre_get_node_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = node_groups.GetNodeGroupRequest.pb( node_groups.GetNodeGroupRequest() ) @@ -3515,6 +3581,7 @@ def test_get_node_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = clusters.NodeGroup() + post_with_metadata.return_value = clusters.NodeGroup(), metadata client.get_node_group( request, @@ -3526,6 +3593,7 @@ def test_get_node_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py index 810ca34f0f08..3ffe81dcabae 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_controller.py @@ -75,6 +75,13 @@ ) from google.cloud.dataproc_v1.types import operations, sessions, shared +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4582,10 +4632,13 @@ def test_create_session_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SessionControllerRestInterceptor, "post_create_session" ) as post, mock.patch.object( + transports.SessionControllerRestInterceptor, "post_create_session_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionControllerRestInterceptor, "pre_create_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sessions.CreateSessionRequest.pb(sessions.CreateSessionRequest()) transcode.return_value = { "method": "post", @@ -4607,6 +4660,7 @@ def test_create_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_session( request, @@ -4618,6 +4672,7 @@ def test_create_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_rest_bad_request(request_type=sessions.GetSessionRequest): @@ -4712,10 +4767,13 @@ def test_get_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionControllerRestInterceptor, "post_get_session" ) as post, mock.patch.object( + transports.SessionControllerRestInterceptor, "post_get_session_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionControllerRestInterceptor, "pre_get_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sessions.GetSessionRequest.pb(sessions.GetSessionRequest()) transcode.return_value = { "method": "post", @@ -4737,6 +4795,7 @@ def test_get_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sessions.Session() + post_with_metadata.return_value = sessions.Session(), metadata client.get_session( request, @@ -4748,6 +4807,7 @@ def test_get_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sessions_rest_bad_request(request_type=sessions.ListSessionsRequest): @@ -4830,10 +4890,13 @@ def test_list_sessions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionControllerRestInterceptor, "post_list_sessions" ) as post, mock.patch.object( + transports.SessionControllerRestInterceptor, "post_list_sessions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionControllerRestInterceptor, "pre_list_sessions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sessions.ListSessionsRequest.pb(sessions.ListSessionsRequest()) transcode.return_value = { "method": "post", @@ -4857,6 +4920,7 @@ def test_list_sessions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sessions.ListSessionsResponse() + post_with_metadata.return_value = sessions.ListSessionsResponse(), metadata client.list_sessions( request, @@ -4868,6 +4932,7 @@ def test_list_sessions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_terminate_session_rest_bad_request( @@ -4948,10 +5013,14 @@ def test_terminate_session_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SessionControllerRestInterceptor, "post_terminate_session" ) as post, mock.patch.object( + transports.SessionControllerRestInterceptor, + "post_terminate_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionControllerRestInterceptor, "pre_terminate_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sessions.TerminateSessionRequest.pb( sessions.TerminateSessionRequest() ) @@ -4975,6 +5044,7 @@ def test_terminate_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.terminate_session( request, @@ -4986,6 +5056,7 @@ def test_terminate_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_rest_bad_request(request_type=sessions.DeleteSessionRequest): @@ -5064,10 +5135,13 @@ def test_delete_session_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SessionControllerRestInterceptor, "post_delete_session" ) as post, mock.patch.object( + transports.SessionControllerRestInterceptor, "post_delete_session_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionControllerRestInterceptor, "pre_delete_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sessions.DeleteSessionRequest.pb(sessions.DeleteSessionRequest()) transcode.return_value = { "method": "post", @@ -5089,6 +5163,7 @@ def test_delete_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_session( request, @@ -5100,6 +5175,7 @@ def test_delete_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py index 55d26f5297d9..c07b79e7b281 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_session_template_controller.py @@ -66,6 +66,13 @@ ) from google.cloud.dataproc_v1.types import session_templates, sessions, shared +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -345,6 +352,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionTemplateControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionTemplateControllerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4687,11 +4737,15 @@ def test_create_session_template_rest_interceptors(null_interceptor): transports.SessionTemplateControllerRestInterceptor, "post_create_session_template", ) as post, mock.patch.object( + transports.SessionTemplateControllerRestInterceptor, + "post_create_session_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionTemplateControllerRestInterceptor, "pre_create_session_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_templates.CreateSessionTemplateRequest.pb( session_templates.CreateSessionTemplateRequest() ) @@ -4717,6 +4771,7 @@ def test_create_session_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_templates.SessionTemplate() + post_with_metadata.return_value = session_templates.SessionTemplate(), metadata client.create_session_template( request, @@ -4728,6 +4783,7 @@ def test_create_session_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_session_template_rest_bad_request( @@ -4935,11 +4991,15 @@ def test_update_session_template_rest_interceptors(null_interceptor): transports.SessionTemplateControllerRestInterceptor, "post_update_session_template", ) as post, mock.patch.object( + transports.SessionTemplateControllerRestInterceptor, + "post_update_session_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionTemplateControllerRestInterceptor, "pre_update_session_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_templates.UpdateSessionTemplateRequest.pb( session_templates.UpdateSessionTemplateRequest() ) @@ -4965,6 +5025,7 @@ def test_update_session_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_templates.SessionTemplate() + post_with_metadata.return_value = session_templates.SessionTemplate(), metadata client.update_session_template( request, @@ -4976,6 +5037,7 @@ def test_update_session_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_template_rest_bad_request( @@ -5070,10 +5132,14 @@ def test_get_session_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionTemplateControllerRestInterceptor, "post_get_session_template" ) as post, mock.patch.object( + transports.SessionTemplateControllerRestInterceptor, + "post_get_session_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionTemplateControllerRestInterceptor, "pre_get_session_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_templates.GetSessionTemplateRequest.pb( session_templates.GetSessionTemplateRequest() ) @@ -5099,6 +5165,7 @@ def test_get_session_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_templates.SessionTemplate() + post_with_metadata.return_value = session_templates.SessionTemplate(), metadata client.get_session_template( request, @@ -5110,6 +5177,7 @@ def test_get_session_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_session_templates_rest_bad_request( @@ -5195,11 +5263,15 @@ def test_list_session_templates_rest_interceptors(null_interceptor): transports.SessionTemplateControllerRestInterceptor, "post_list_session_templates", ) as post, mock.patch.object( + transports.SessionTemplateControllerRestInterceptor, + "post_list_session_templates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionTemplateControllerRestInterceptor, "pre_list_session_templates", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_templates.ListSessionTemplatesRequest.pb( session_templates.ListSessionTemplatesRequest() ) @@ -5225,6 +5297,10 @@ def test_list_session_templates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_templates.ListSessionTemplatesResponse() + post_with_metadata.return_value = ( + session_templates.ListSessionTemplatesResponse(), + metadata, + ) client.list_session_templates( request, @@ -5236,6 +5312,7 @@ def test_list_session_templates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_template_rest_bad_request( diff --git a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 3840b6261733..a05587f8b553 100644 --- a/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/packages/google-cloud-dataproc/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -76,6 +76,13 @@ ) from google.cloud.dataproc_v1.types import clusters, jobs, shared, workflow_templates +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = WorkflowTemplateServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = WorkflowTemplateServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6146,11 +6196,15 @@ def test_create_workflow_template_rest_interceptors(null_interceptor): transports.WorkflowTemplateServiceRestInterceptor, "post_create_workflow_template", ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_create_workflow_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "pre_create_workflow_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflow_templates.CreateWorkflowTemplateRequest.pb( workflow_templates.CreateWorkflowTemplateRequest() ) @@ -6176,6 +6230,10 @@ def test_create_workflow_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = workflow_templates.WorkflowTemplate() + post_with_metadata.return_value = ( + workflow_templates.WorkflowTemplate(), + metadata, + ) client.create_workflow_template( request, @@ -6187,6 +6245,7 @@ def test_create_workflow_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_workflow_template_rest_bad_request( @@ -6279,10 +6338,14 @@ def test_get_workflow_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "post_get_workflow_template" ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_get_workflow_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "pre_get_workflow_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflow_templates.GetWorkflowTemplateRequest.pb( workflow_templates.GetWorkflowTemplateRequest() ) @@ -6308,6 +6371,10 @@ def test_get_workflow_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = workflow_templates.WorkflowTemplate() + post_with_metadata.return_value = ( + workflow_templates.WorkflowTemplate(), + metadata, + ) client.get_workflow_template( request, @@ -6319,6 +6386,7 @@ def test_get_workflow_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_instantiate_workflow_template_rest_bad_request( @@ -6404,11 +6472,15 @@ def test_instantiate_workflow_template_rest_interceptors(null_interceptor): transports.WorkflowTemplateServiceRestInterceptor, "post_instantiate_workflow_template", ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_instantiate_workflow_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "pre_instantiate_workflow_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflow_templates.InstantiateWorkflowTemplateRequest.pb( workflow_templates.InstantiateWorkflowTemplateRequest() ) @@ -6432,6 +6504,7 @@ def test_instantiate_workflow_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.instantiate_workflow_template( request, @@ -6443,6 +6516,7 @@ def test_instantiate_workflow_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_instantiate_inline_workflow_template_rest_bad_request( @@ -6896,11 +6970,15 @@ def test_instantiate_inline_workflow_template_rest_interceptors(null_interceptor transports.WorkflowTemplateServiceRestInterceptor, "post_instantiate_inline_workflow_template", ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_instantiate_inline_workflow_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "pre_instantiate_inline_workflow_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflow_templates.InstantiateInlineWorkflowTemplateRequest.pb( workflow_templates.InstantiateInlineWorkflowTemplateRequest() ) @@ -6924,6 +7002,7 @@ def test_instantiate_inline_workflow_template_rest_interceptors(null_interceptor ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.instantiate_inline_workflow_template( request, @@ -6935,6 +7014,7 @@ def test_instantiate_inline_workflow_template_rest_interceptors(null_interceptor pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_workflow_template_rest_bad_request( @@ -7402,11 +7482,15 @@ def test_update_workflow_template_rest_interceptors(null_interceptor): transports.WorkflowTemplateServiceRestInterceptor, "post_update_workflow_template", ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_update_workflow_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "pre_update_workflow_template", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflow_templates.UpdateWorkflowTemplateRequest.pb( workflow_templates.UpdateWorkflowTemplateRequest() ) @@ -7432,6 +7516,10 @@ def test_update_workflow_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = workflow_templates.WorkflowTemplate() + post_with_metadata.return_value = ( + workflow_templates.WorkflowTemplate(), + metadata, + ) client.update_workflow_template( request, @@ -7443,6 +7531,7 @@ def test_update_workflow_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_workflow_templates_rest_bad_request( @@ -7530,10 +7619,14 @@ def test_list_workflow_templates_rest_interceptors(null_interceptor): transports.WorkflowTemplateServiceRestInterceptor, "post_list_workflow_templates", ) as post, mock.patch.object( + transports.WorkflowTemplateServiceRestInterceptor, + "post_list_workflow_templates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowTemplateServiceRestInterceptor, "pre_list_workflow_templates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflow_templates.ListWorkflowTemplatesRequest.pb( workflow_templates.ListWorkflowTemplatesRequest() ) @@ -7559,6 +7652,10 @@ def test_list_workflow_templates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = workflow_templates.ListWorkflowTemplatesResponse() + post_with_metadata.return_value = ( + workflow_templates.ListWorkflowTemplatesResponse(), + metadata, + ) client.list_workflow_templates( request, @@ -7570,6 +7667,7 @@ def test_list_workflow_templates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_workflow_template_rest_bad_request( diff --git a/packages/google-cloud-datastream/CHANGELOG.md b/packages/google-cloud-datastream/CHANGELOG.md index d11af14c2279..e952b75d55b7 100644 --- a/packages/google-cloud-datastream/CHANGELOG.md +++ b/packages/google-cloud-datastream/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## [1.13.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.12.0...google-cloud-datastream-v1.13.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.11.0...google-cloud-datastream-v1.12.0) (2025-01-20) + + +### Features + +* A new message `PostgresqlSslConfig` is added ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) + + +### Documentation + +* A comment for field `name` in message `.google.cloud.datastream.v1.ConnectionProfile` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) +* A comment for field `name` in message `.google.cloud.datastream.v1.PrivateConnection` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) +* A comment for field `name` in message `.google.cloud.datastream.v1.Route` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) +* A comment for field `name` in message `.google.cloud.datastream.v1.Stream` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) +* A comment for field `name` in message `.google.cloud.datastream.v1.StreamObject` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) +* A comment for field `password` in message `.google.cloud.datastream.v1.OracleAsmConfig` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) +* A comment for message `OracleAsmConfig` is changed ([6fa4ff8](https://github.com/googleapis/google-cloud-python/commit/6fa4ff89f201cc5e3d5ae1dd4c7ced457745f5ef)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-datastream-v1.10.1...google-cloud-datastream-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-datastream/datastream-v1-py.tar.gz b/packages/google-cloud-datastream/datastream-v1-py.tar.gz new file mode 100644 index 000000000000..b087310916e2 Binary files /dev/null and b/packages/google-cloud-datastream/datastream-v1-py.tar.gz differ diff --git a/packages/google-cloud-datastream/google/cloud/datastream/__init__.py b/packages/google-cloud-datastream/google/cloud/datastream/__init__.py index 9d57bedd7ceb..9b3d846d65e2 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream/__init__.py +++ b/packages/google-cloud-datastream/google/cloud/datastream/__init__.py @@ -95,6 +95,7 @@ PostgresqlRdbms, PostgresqlSchema, PostgresqlSourceConfig, + PostgresqlSslConfig, PostgresqlTable, PrivateConnection, PrivateConnectivity, @@ -192,6 +193,7 @@ "PostgresqlRdbms", "PostgresqlSchema", "PostgresqlSourceConfig", + "PostgresqlSslConfig", "PostgresqlTable", "PrivateConnection", "PrivateConnectivity", diff --git a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py index 50d842f376d0..43155ded0db3 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/__init__.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/__init__.py index 1f8f52185d1d..9ba2365d6535 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/__init__.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/__init__.py @@ -92,6 +92,7 @@ PostgresqlRdbms, PostgresqlSchema, PostgresqlSourceConfig, + PostgresqlSslConfig, PostgresqlTable, PrivateConnection, PrivateConnectivity, @@ -182,6 +183,7 @@ "PostgresqlRdbms", "PostgresqlSchema", "PostgresqlSourceConfig", + "PostgresqlSslConfig", "PostgresqlTable", "PrivateConnection", "PrivateConnectivity", diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py index 50d842f376d0..43155ded0db3 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py index 8920b70eccae..b80613bebd61 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -603,6 +605,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4010,16 +4039,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4065,16 +4098,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4231,16 +4268,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4286,16 +4327,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py index 9424c2bcdcaf..4b1dab2f9825 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/services/datastream/transports/rest.py @@ -306,12 +306,35 @@ def post_create_connection_profile( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_connection_profile` interceptor runs + before the `post_create_connection_profile_with_metadata` interceptor. """ return response + def post_create_connection_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_connection_profile_with_metadata` + interceptor in new development instead of the `post_create_connection_profile` interceptor. + When both interceptors are used, this `post_create_connection_profile_with_metadata` interceptor runs after the + `post_create_connection_profile` interceptor. The (possibly modified) response returned by + `post_create_connection_profile` will be passed to + `post_create_connection_profile_with_metadata`. + """ + return response, metadata + def pre_create_private_connection( self, request: datastream.CreatePrivateConnectionRequest, @@ -332,12 +355,35 @@ def post_create_private_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_private_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_private_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_private_connection` interceptor runs + before the `post_create_private_connection_with_metadata` interceptor. """ return response + def post_create_private_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_private_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_private_connection_with_metadata` + interceptor in new development instead of the `post_create_private_connection` interceptor. + When both interceptors are used, this `post_create_private_connection_with_metadata` interceptor runs after the + `post_create_private_connection` interceptor. The (possibly modified) response returned by + `post_create_private_connection` will be passed to + `post_create_private_connection_with_metadata`. + """ + return response, metadata + def pre_create_route( self, request: datastream.CreateRouteRequest, @@ -355,12 +401,35 @@ def post_create_route( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_route - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_route_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_route` interceptor runs + before the `post_create_route_with_metadata` interceptor. """ return response + def post_create_route_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_route + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_route_with_metadata` + interceptor in new development instead of the `post_create_route` interceptor. + When both interceptors are used, this `post_create_route_with_metadata` interceptor runs after the + `post_create_route` interceptor. The (possibly modified) response returned by + `post_create_route` will be passed to + `post_create_route_with_metadata`. + """ + return response, metadata + def pre_create_stream( self, request: datastream.CreateStreamRequest, @@ -378,12 +447,35 @@ def post_create_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_stream` interceptor runs + before the `post_create_stream_with_metadata` interceptor. """ return response + def post_create_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_stream_with_metadata` + interceptor in new development instead of the `post_create_stream` interceptor. + When both interceptors are used, this `post_create_stream_with_metadata` interceptor runs after the + `post_create_stream` interceptor. The (possibly modified) response returned by + `post_create_stream` will be passed to + `post_create_stream_with_metadata`. + """ + return response, metadata + def pre_delete_connection_profile( self, request: datastream.DeleteConnectionProfileRequest, @@ -404,12 +496,35 @@ def post_delete_connection_profile( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_connection_profile` interceptor runs + before the `post_delete_connection_profile_with_metadata` interceptor. """ return response + def post_delete_connection_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_connection_profile_with_metadata` + interceptor in new development instead of the `post_delete_connection_profile` interceptor. + When both interceptors are used, this `post_delete_connection_profile_with_metadata` interceptor runs after the + `post_delete_connection_profile` interceptor. The (possibly modified) response returned by + `post_delete_connection_profile` will be passed to + `post_delete_connection_profile_with_metadata`. + """ + return response, metadata + def pre_delete_private_connection( self, request: datastream.DeletePrivateConnectionRequest, @@ -430,12 +545,35 @@ def post_delete_private_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_private_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_private_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_private_connection` interceptor runs + before the `post_delete_private_connection_with_metadata` interceptor. """ return response + def post_delete_private_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_private_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_private_connection_with_metadata` + interceptor in new development instead of the `post_delete_private_connection` interceptor. + When both interceptors are used, this `post_delete_private_connection_with_metadata` interceptor runs after the + `post_delete_private_connection` interceptor. The (possibly modified) response returned by + `post_delete_private_connection` will be passed to + `post_delete_private_connection_with_metadata`. + """ + return response, metadata + def pre_delete_route( self, request: datastream.DeleteRouteRequest, @@ -453,12 +591,35 @@ def post_delete_route( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_route - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_route_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_route` interceptor runs + before the `post_delete_route_with_metadata` interceptor. """ return response + def post_delete_route_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_route + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_route_with_metadata` + interceptor in new development instead of the `post_delete_route` interceptor. + When both interceptors are used, this `post_delete_route_with_metadata` interceptor runs after the + `post_delete_route` interceptor. The (possibly modified) response returned by + `post_delete_route` will be passed to + `post_delete_route_with_metadata`. + """ + return response, metadata + def pre_delete_stream( self, request: datastream.DeleteStreamRequest, @@ -476,12 +637,35 @@ def post_delete_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_stream` interceptor runs + before the `post_delete_stream_with_metadata` interceptor. """ return response + def post_delete_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_stream_with_metadata` + interceptor in new development instead of the `post_delete_stream` interceptor. + When both interceptors are used, this `post_delete_stream_with_metadata` interceptor runs after the + `post_delete_stream` interceptor. The (possibly modified) response returned by + `post_delete_stream` will be passed to + `post_delete_stream_with_metadata`. + """ + return response, metadata + def pre_discover_connection_profile( self, request: datastream.DiscoverConnectionProfileRequest, @@ -502,12 +686,38 @@ def post_discover_connection_profile( ) -> datastream.DiscoverConnectionProfileResponse: """Post-rpc interceptor for discover_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_discover_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_discover_connection_profile` interceptor runs + before the `post_discover_connection_profile_with_metadata` interceptor. """ return response + def post_discover_connection_profile_with_metadata( + self, + response: datastream.DiscoverConnectionProfileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.DiscoverConnectionProfileResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for discover_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_discover_connection_profile_with_metadata` + interceptor in new development instead of the `post_discover_connection_profile` interceptor. + When both interceptors are used, this `post_discover_connection_profile_with_metadata` interceptor runs after the + `post_discover_connection_profile` interceptor. The (possibly modified) response returned by + `post_discover_connection_profile` will be passed to + `post_discover_connection_profile_with_metadata`. + """ + return response, metadata + def pre_fetch_static_ips( self, request: datastream.FetchStaticIpsRequest, @@ -527,12 +737,37 @@ def post_fetch_static_ips( ) -> datastream.FetchStaticIpsResponse: """Post-rpc interceptor for fetch_static_ips - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_static_ips_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_fetch_static_ips` interceptor runs + before the `post_fetch_static_ips_with_metadata` interceptor. """ return response + def post_fetch_static_ips_with_metadata( + self, + response: datastream.FetchStaticIpsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.FetchStaticIpsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_static_ips + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_fetch_static_ips_with_metadata` + interceptor in new development instead of the `post_fetch_static_ips` interceptor. + When both interceptors are used, this `post_fetch_static_ips_with_metadata` interceptor runs after the + `post_fetch_static_ips` interceptor. The (possibly modified) response returned by + `post_fetch_static_ips` will be passed to + `post_fetch_static_ips_with_metadata`. + """ + return response, metadata + def pre_get_connection_profile( self, request: datastream.GetConnectionProfileRequest, @@ -552,12 +787,37 @@ def post_get_connection_profile( ) -> datastream_resources.ConnectionProfile: """Post-rpc interceptor for get_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_connection_profile` interceptor runs + before the `post_get_connection_profile_with_metadata` interceptor. """ return response + def post_get_connection_profile_with_metadata( + self, + response: datastream_resources.ConnectionProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream_resources.ConnectionProfile, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_connection_profile_with_metadata` + interceptor in new development instead of the `post_get_connection_profile` interceptor. + When both interceptors are used, this `post_get_connection_profile_with_metadata` interceptor runs after the + `post_get_connection_profile` interceptor. The (possibly modified) response returned by + `post_get_connection_profile` will be passed to + `post_get_connection_profile_with_metadata`. + """ + return response, metadata + def pre_get_private_connection( self, request: datastream.GetPrivateConnectionRequest, @@ -577,12 +837,37 @@ def post_get_private_connection( ) -> datastream_resources.PrivateConnection: """Post-rpc interceptor for get_private_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_private_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_private_connection` interceptor runs + before the `post_get_private_connection_with_metadata` interceptor. """ return response + def post_get_private_connection_with_metadata( + self, + response: datastream_resources.PrivateConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream_resources.PrivateConnection, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_private_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_private_connection_with_metadata` + interceptor in new development instead of the `post_get_private_connection` interceptor. + When both interceptors are used, this `post_get_private_connection_with_metadata` interceptor runs after the + `post_get_private_connection` interceptor. The (possibly modified) response returned by + `post_get_private_connection` will be passed to + `post_get_private_connection_with_metadata`. + """ + return response, metadata + def pre_get_route( self, request: datastream.GetRouteRequest, @@ -600,12 +885,35 @@ def post_get_route( ) -> datastream_resources.Route: """Post-rpc interceptor for get_route - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_route_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_route` interceptor runs + before the `post_get_route_with_metadata` interceptor. """ return response + def post_get_route_with_metadata( + self, + response: datastream_resources.Route, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream_resources.Route, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_route + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_route_with_metadata` + interceptor in new development instead of the `post_get_route` interceptor. + When both interceptors are used, this `post_get_route_with_metadata` interceptor runs after the + `post_get_route` interceptor. The (possibly modified) response returned by + `post_get_route` will be passed to + `post_get_route_with_metadata`. + """ + return response, metadata + def pre_get_stream( self, request: datastream.GetStreamRequest, @@ -623,12 +931,35 @@ def post_get_stream( ) -> datastream_resources.Stream: """Post-rpc interceptor for get_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_stream` interceptor runs + before the `post_get_stream_with_metadata` interceptor. """ return response + def post_get_stream_with_metadata( + self, + response: datastream_resources.Stream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream_resources.Stream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_stream_with_metadata` + interceptor in new development instead of the `post_get_stream` interceptor. + When both interceptors are used, this `post_get_stream_with_metadata` interceptor runs after the + `post_get_stream` interceptor. The (possibly modified) response returned by + `post_get_stream` will be passed to + `post_get_stream_with_metadata`. + """ + return response, metadata + def pre_get_stream_object( self, request: datastream.GetStreamObjectRequest, @@ -648,12 +979,37 @@ def post_get_stream_object( ) -> datastream_resources.StreamObject: """Post-rpc interceptor for get_stream_object - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_stream_object_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_stream_object` interceptor runs + before the `post_get_stream_object_with_metadata` interceptor. """ return response + def post_get_stream_object_with_metadata( + self, + response: datastream_resources.StreamObject, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream_resources.StreamObject, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_stream_object + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_stream_object_with_metadata` + interceptor in new development instead of the `post_get_stream_object` interceptor. + When both interceptors are used, this `post_get_stream_object_with_metadata` interceptor runs after the + `post_get_stream_object` interceptor. The (possibly modified) response returned by + `post_get_stream_object` will be passed to + `post_get_stream_object_with_metadata`. + """ + return response, metadata + def pre_list_connection_profiles( self, request: datastream.ListConnectionProfilesRequest, @@ -674,12 +1030,38 @@ def post_list_connection_profiles( ) -> datastream.ListConnectionProfilesResponse: """Post-rpc interceptor for list_connection_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connection_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_connection_profiles` interceptor runs + before the `post_list_connection_profiles_with_metadata` interceptor. """ return response + def post_list_connection_profiles_with_metadata( + self, + response: datastream.ListConnectionProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.ListConnectionProfilesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_connection_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_connection_profiles_with_metadata` + interceptor in new development instead of the `post_list_connection_profiles` interceptor. + When both interceptors are used, this `post_list_connection_profiles_with_metadata` interceptor runs after the + `post_list_connection_profiles` interceptor. The (possibly modified) response returned by + `post_list_connection_profiles` will be passed to + `post_list_connection_profiles_with_metadata`. + """ + return response, metadata + def pre_list_private_connections( self, request: datastream.ListPrivateConnectionsRequest, @@ -700,12 +1082,38 @@ def post_list_private_connections( ) -> datastream.ListPrivateConnectionsResponse: """Post-rpc interceptor for list_private_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_private_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_private_connections` interceptor runs + before the `post_list_private_connections_with_metadata` interceptor. """ return response + def post_list_private_connections_with_metadata( + self, + response: datastream.ListPrivateConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.ListPrivateConnectionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_private_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_private_connections_with_metadata` + interceptor in new development instead of the `post_list_private_connections` interceptor. + When both interceptors are used, this `post_list_private_connections_with_metadata` interceptor runs after the + `post_list_private_connections` interceptor. The (possibly modified) response returned by + `post_list_private_connections` will be passed to + `post_list_private_connections_with_metadata`. + """ + return response, metadata + def pre_list_routes( self, request: datastream.ListRoutesRequest, @@ -723,12 +1131,35 @@ def post_list_routes( ) -> datastream.ListRoutesResponse: """Post-rpc interceptor for list_routes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_routes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_routes` interceptor runs + before the `post_list_routes_with_metadata` interceptor. """ return response + def post_list_routes_with_metadata( + self, + response: datastream.ListRoutesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream.ListRoutesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_routes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_routes_with_metadata` + interceptor in new development instead of the `post_list_routes` interceptor. + When both interceptors are used, this `post_list_routes_with_metadata` interceptor runs after the + `post_list_routes` interceptor. The (possibly modified) response returned by + `post_list_routes` will be passed to + `post_list_routes_with_metadata`. + """ + return response, metadata + def pre_list_stream_objects( self, request: datastream.ListStreamObjectsRequest, @@ -748,12 +1179,37 @@ def post_list_stream_objects( ) -> datastream.ListStreamObjectsResponse: """Post-rpc interceptor for list_stream_objects - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_stream_objects_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_stream_objects` interceptor runs + before the `post_list_stream_objects_with_metadata` interceptor. """ return response + def post_list_stream_objects_with_metadata( + self, + response: datastream.ListStreamObjectsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.ListStreamObjectsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_stream_objects + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_stream_objects_with_metadata` + interceptor in new development instead of the `post_list_stream_objects` interceptor. + When both interceptors are used, this `post_list_stream_objects_with_metadata` interceptor runs after the + `post_list_stream_objects` interceptor. The (possibly modified) response returned by + `post_list_stream_objects` will be passed to + `post_list_stream_objects_with_metadata`. + """ + return response, metadata + def pre_list_streams( self, request: datastream.ListStreamsRequest, @@ -771,12 +1227,35 @@ def post_list_streams( ) -> datastream.ListStreamsResponse: """Post-rpc interceptor for list_streams - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_streams_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_streams` interceptor runs + before the `post_list_streams_with_metadata` interceptor. """ return response + def post_list_streams_with_metadata( + self, + response: datastream.ListStreamsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream.ListStreamsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_streams + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_streams_with_metadata` + interceptor in new development instead of the `post_list_streams` interceptor. + When both interceptors are used, this `post_list_streams_with_metadata` interceptor runs after the + `post_list_streams` interceptor. The (possibly modified) response returned by + `post_list_streams` will be passed to + `post_list_streams_with_metadata`. + """ + return response, metadata + def pre_lookup_stream_object( self, request: datastream.LookupStreamObjectRequest, @@ -796,12 +1275,37 @@ def post_lookup_stream_object( ) -> datastream_resources.StreamObject: """Post-rpc interceptor for lookup_stream_object - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_stream_object_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_lookup_stream_object` interceptor runs + before the `post_lookup_stream_object_with_metadata` interceptor. """ return response + def post_lookup_stream_object_with_metadata( + self, + response: datastream_resources.StreamObject, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream_resources.StreamObject, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for lookup_stream_object + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_lookup_stream_object_with_metadata` + interceptor in new development instead of the `post_lookup_stream_object` interceptor. + When both interceptors are used, this `post_lookup_stream_object_with_metadata` interceptor runs after the + `post_lookup_stream_object` interceptor. The (possibly modified) response returned by + `post_lookup_stream_object` will be passed to + `post_lookup_stream_object_with_metadata`. + """ + return response, metadata + def pre_run_stream( self, request: datastream.RunStreamRequest, @@ -819,12 +1323,35 @@ def post_run_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_run_stream` interceptor runs + before the `post_run_stream_with_metadata` interceptor. """ return response + def post_run_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_run_stream_with_metadata` + interceptor in new development instead of the `post_run_stream` interceptor. + When both interceptors are used, this `post_run_stream_with_metadata` interceptor runs after the + `post_run_stream` interceptor. The (possibly modified) response returned by + `post_run_stream` will be passed to + `post_run_stream_with_metadata`. + """ + return response, metadata + def pre_start_backfill_job( self, request: datastream.StartBackfillJobRequest, @@ -844,12 +1371,37 @@ def post_start_backfill_job( ) -> datastream.StartBackfillJobResponse: """Post-rpc interceptor for start_backfill_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_backfill_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_start_backfill_job` interceptor runs + before the `post_start_backfill_job_with_metadata` interceptor. """ return response + def post_start_backfill_job_with_metadata( + self, + response: datastream.StartBackfillJobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.StartBackfillJobResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for start_backfill_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_start_backfill_job_with_metadata` + interceptor in new development instead of the `post_start_backfill_job` interceptor. + When both interceptors are used, this `post_start_backfill_job_with_metadata` interceptor runs after the + `post_start_backfill_job` interceptor. The (possibly modified) response returned by + `post_start_backfill_job` will be passed to + `post_start_backfill_job_with_metadata`. + """ + return response, metadata + def pre_stop_backfill_job( self, request: datastream.StopBackfillJobRequest, @@ -869,12 +1421,37 @@ def post_stop_backfill_job( ) -> datastream.StopBackfillJobResponse: """Post-rpc interceptor for stop_backfill_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_backfill_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_stop_backfill_job` interceptor runs + before the `post_stop_backfill_job_with_metadata` interceptor. """ return response + def post_stop_backfill_job_with_metadata( + self, + response: datastream.StopBackfillJobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.StopBackfillJobResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for stop_backfill_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_stop_backfill_job_with_metadata` + interceptor in new development instead of the `post_stop_backfill_job` interceptor. + When both interceptors are used, this `post_stop_backfill_job_with_metadata` interceptor runs after the + `post_stop_backfill_job` interceptor. The (possibly modified) response returned by + `post_stop_backfill_job` will be passed to + `post_stop_backfill_job_with_metadata`. + """ + return response, metadata + def pre_update_connection_profile( self, request: datastream.UpdateConnectionProfileRequest, @@ -895,12 +1472,35 @@ def post_update_connection_profile( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_update_connection_profile` interceptor runs + before the `post_update_connection_profile_with_metadata` interceptor. """ return response + def post_update_connection_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_update_connection_profile_with_metadata` + interceptor in new development instead of the `post_update_connection_profile` interceptor. + When both interceptors are used, this `post_update_connection_profile_with_metadata` interceptor runs after the + `post_update_connection_profile` interceptor. The (possibly modified) response returned by + `post_update_connection_profile` will be passed to + `post_update_connection_profile_with_metadata`. + """ + return response, metadata + def pre_update_stream( self, request: datastream.UpdateStreamRequest, @@ -918,12 +1518,35 @@ def post_update_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_update_stream` interceptor runs + before the `post_update_stream_with_metadata` interceptor. """ return response + def post_update_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_update_stream_with_metadata` + interceptor in new development instead of the `post_update_stream` interceptor. + When both interceptors are used, this `post_update_stream_with_metadata` interceptor runs after the + `post_update_stream` interceptor. The (possibly modified) response returned by + `post_update_stream` will be passed to + `post_update_stream_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1338,6 +1961,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1488,6 +2115,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_private_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_private_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1639,6 +2270,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_route(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_route_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1793,6 +2428,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1937,6 +2576,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2081,6 +2724,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_private_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_private_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2226,6 +2873,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_route(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_route_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2372,6 +3023,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2522,6 +3177,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_discover_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_discover_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2671,6 +3330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_static_ips(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_static_ips_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2819,6 +3482,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2968,6 +3635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_private_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_private_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3116,6 +3787,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_route(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_route_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3260,6 +3935,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3405,6 +4084,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_stream_object(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_stream_object_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3552,6 +4235,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connection_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connection_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3699,6 +4386,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_private_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_private_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3843,6 +4534,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_routes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_routes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3988,6 +4683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_stream_objects(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_stream_objects_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4134,6 +4833,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_streams(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_streams_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4286,6 +4989,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_stream_object(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_stream_object_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4437,6 +5144,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4590,6 +5301,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_backfill_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_backfill_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4744,6 +5459,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_backfill_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_backfill_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4895,6 +5614,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5049,6 +5772,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/__init__.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/__init__.py index 30c672e8fcb6..df155599bf74 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/__init__.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/__init__.py @@ -86,6 +86,7 @@ PostgresqlRdbms, PostgresqlSchema, PostgresqlSourceConfig, + PostgresqlSslConfig, PostgresqlTable, PrivateConnection, PrivateConnectivity, @@ -181,6 +182,7 @@ "PostgresqlRdbms", "PostgresqlSchema", "PostgresqlSourceConfig", + "PostgresqlSslConfig", "PostgresqlTable", "PrivateConnection", "PrivateConnectivity", diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py index d8e8d27c180c..d5091f6f5228 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1/types/datastream_resources.py @@ -39,6 +39,7 @@ "Route", "MysqlSslConfig", "OracleSslConfig", + "PostgresqlSslConfig", "ConnectionProfile", "OracleColumn", "OracleTable", @@ -158,7 +159,7 @@ class OracleProfile(proto.Message): class OracleAsmConfig(proto.Message): r"""Configuration for Oracle Automatic Storage Management (ASM) - connection. + connection. . Attributes: hostname (str): @@ -170,7 +171,7 @@ class OracleAsmConfig(proto.Message): Required. Username for the Oracle ASM connection. password (str): - Required. Password for the Oracle ASM + Optional. Password for the Oracle ASM connection. asm_service (str): Required. ASM service name for the Oracle ASM @@ -276,6 +277,12 @@ class PostgresqlProfile(proto.Message): database (str): Required. Database for the PostgreSQL connection. + ssl_config (google.cloud.datastream_v1.types.PostgresqlSslConfig): + Optional. SSL configuration for the PostgreSQL connection. + In case PostgresqlSslConfig is not set, the connection will + use the default SSL mode, which is ``prefer`` (i.e. this + mode will only use encryption if enabled from database side, + otherwise will use unencrypted communication) """ hostname: str = proto.Field( @@ -298,6 +305,11 @@ class PostgresqlProfile(proto.Message): proto.STRING, number=5, ) + ssl_config: "PostgresqlSslConfig" = proto.Field( + proto.MESSAGE, + number=7, + message="PostgresqlSslConfig", + ) class SqlServerProfile(proto.Message): @@ -459,7 +471,7 @@ class PrivateConnection(proto.Message): Attributes: name (str): - Output only. The resource's name. + Output only. Identifier. The resource's name. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The create time of the resource. update_time (google.protobuf.timestamp_pb2.Timestamp): @@ -567,7 +579,7 @@ class Route(proto.Message): Attributes: name (str): - Output only. The resource's name. + Output only. Identifier. The resource's name. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The create time of the resource. update_time (google.protobuf.timestamp_pb2.Timestamp): @@ -692,6 +704,104 @@ class OracleSslConfig(proto.Message): ) +class PostgresqlSslConfig(proto.Message): + r"""PostgreSQL SSL configuration information. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + server_verification (google.cloud.datastream_v1.types.PostgresqlSslConfig.ServerVerification): + If this field is set, the communication will + be encrypted with TLS encryption and the server + identity will be authenticated. + + This field is a member of `oneof`_ ``encryption_setting``. + server_and_client_verification (google.cloud.datastream_v1.types.PostgresqlSslConfig.ServerAndClientVerification): + If this field is set, the communication will + be encrypted with TLS encryption and both the + server identity and the client identity will be + authenticated. + + This field is a member of `oneof`_ ``encryption_setting``. + """ + + class ServerVerification(proto.Message): + r"""Message represents the option where Datastream will enforce the + encryption and authenticate the server identity. ca_certificate must + be set if user selects this option. + + Attributes: + ca_certificate (str): + Required. Input only. PEM-encoded server root + CA certificate. + """ + + ca_certificate: str = proto.Field( + proto.STRING, + number=1, + ) + + class ServerAndClientVerification(proto.Message): + r"""Message represents the option where Datastream will enforce the + encryption and authenticate the server identity as well as the + client identity. ca_certificate, client_certificate and client_key + must be set if user selects this option. + + Attributes: + client_certificate (str): + Required. Input only. PEM-encoded certificate + used by the source database to authenticate the + client identity (i.e., the Datastream's + identity). This certificate is signed by either + a root certificate trusted by the server or one + or more intermediate certificates (which is + stored with the leaf certificate) to link the + this certificate to the trusted root + certificate. + client_key (str): + Required. Input only. PEM-encoded private key + associated with the client certificate. This + value will be used during the SSL/TLS handshake, + allowing the PostgreSQL server to authenticate + the client's identity, i.e. identity of the + Datastream. + ca_certificate (str): + Required. Input only. PEM-encoded server root + CA certificate. + """ + + client_certificate: str = proto.Field( + proto.STRING, + number=1, + ) + client_key: str = proto.Field( + proto.STRING, + number=2, + ) + ca_certificate: str = proto.Field( + proto.STRING, + number=3, + ) + + server_verification: ServerVerification = proto.Field( + proto.MESSAGE, + number=1, + oneof="encryption_setting", + message=ServerVerification, + ) + server_and_client_verification: ServerAndClientVerification = proto.Field( + proto.MESSAGE, + number=2, + oneof="encryption_setting", + message=ServerAndClientVerification, + ) + + class ConnectionProfile(proto.Message): r"""A set of reusable connection configurations to be used as a source or destination for a stream. @@ -705,7 +815,7 @@ class ConnectionProfile(proto.Message): Attributes: name (str): - Output only. The resource's name. + Output only. Identifier. The resource's name. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The create time of the resource. update_time (google.protobuf.timestamp_pb2.Timestamp): @@ -2043,7 +2153,7 @@ class Stream(proto.Message): Attributes: name (str): - Output only. The stream's name. + Output only. Identifier. The stream's name. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation time of the stream. update_time (google.protobuf.timestamp_pb2.Timestamp): @@ -2264,7 +2374,8 @@ class StreamObject(proto.Message): Attributes: name (str): - Output only. The object resource's name. + Output only. Identifier. The object + resource's name. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation time of the object. update_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py index 50d842f376d0..43155ded0db3 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.13.0" # {x-release-please-version} diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py index 9d2869f6c71b..ef7a1ebcdaa5 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -556,6 +558,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py index 3283a75bf745..ffd3b2f2279a 100644 --- a/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py +++ b/packages/google-cloud-datastream/google/cloud/datastream_v1alpha1/services/datastream/transports/rest.py @@ -263,12 +263,35 @@ def post_create_connection_profile( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_connection_profile` interceptor runs + before the `post_create_connection_profile_with_metadata` interceptor. """ return response + def post_create_connection_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_connection_profile_with_metadata` + interceptor in new development instead of the `post_create_connection_profile` interceptor. + When both interceptors are used, this `post_create_connection_profile_with_metadata` interceptor runs after the + `post_create_connection_profile` interceptor. The (possibly modified) response returned by + `post_create_connection_profile` will be passed to + `post_create_connection_profile_with_metadata`. + """ + return response, metadata + def pre_create_private_connection( self, request: datastream.CreatePrivateConnectionRequest, @@ -289,12 +312,35 @@ def post_create_private_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_private_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_private_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_private_connection` interceptor runs + before the `post_create_private_connection_with_metadata` interceptor. """ return response + def post_create_private_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_private_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_private_connection_with_metadata` + interceptor in new development instead of the `post_create_private_connection` interceptor. + When both interceptors are used, this `post_create_private_connection_with_metadata` interceptor runs after the + `post_create_private_connection` interceptor. The (possibly modified) response returned by + `post_create_private_connection` will be passed to + `post_create_private_connection_with_metadata`. + """ + return response, metadata + def pre_create_route( self, request: datastream.CreateRouteRequest, @@ -312,12 +358,35 @@ def post_create_route( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_route - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_route_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_route` interceptor runs + before the `post_create_route_with_metadata` interceptor. """ return response + def post_create_route_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_route + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_route_with_metadata` + interceptor in new development instead of the `post_create_route` interceptor. + When both interceptors are used, this `post_create_route_with_metadata` interceptor runs after the + `post_create_route` interceptor. The (possibly modified) response returned by + `post_create_route` will be passed to + `post_create_route_with_metadata`. + """ + return response, metadata + def pre_create_stream( self, request: datastream.CreateStreamRequest, @@ -335,12 +404,35 @@ def post_create_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_create_stream` interceptor runs + before the `post_create_stream_with_metadata` interceptor. """ return response + def post_create_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_create_stream_with_metadata` + interceptor in new development instead of the `post_create_stream` interceptor. + When both interceptors are used, this `post_create_stream_with_metadata` interceptor runs after the + `post_create_stream` interceptor. The (possibly modified) response returned by + `post_create_stream` will be passed to + `post_create_stream_with_metadata`. + """ + return response, metadata + def pre_delete_connection_profile( self, request: datastream.DeleteConnectionProfileRequest, @@ -361,12 +453,35 @@ def post_delete_connection_profile( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_connection_profile` interceptor runs + before the `post_delete_connection_profile_with_metadata` interceptor. """ return response + def post_delete_connection_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_connection_profile_with_metadata` + interceptor in new development instead of the `post_delete_connection_profile` interceptor. + When both interceptors are used, this `post_delete_connection_profile_with_metadata` interceptor runs after the + `post_delete_connection_profile` interceptor. The (possibly modified) response returned by + `post_delete_connection_profile` will be passed to + `post_delete_connection_profile_with_metadata`. + """ + return response, metadata + def pre_delete_private_connection( self, request: datastream.DeletePrivateConnectionRequest, @@ -387,12 +502,35 @@ def post_delete_private_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_private_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_private_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_private_connection` interceptor runs + before the `post_delete_private_connection_with_metadata` interceptor. """ return response + def post_delete_private_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_private_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_private_connection_with_metadata` + interceptor in new development instead of the `post_delete_private_connection` interceptor. + When both interceptors are used, this `post_delete_private_connection_with_metadata` interceptor runs after the + `post_delete_private_connection` interceptor. The (possibly modified) response returned by + `post_delete_private_connection` will be passed to + `post_delete_private_connection_with_metadata`. + """ + return response, metadata + def pre_delete_route( self, request: datastream.DeleteRouteRequest, @@ -410,12 +548,35 @@ def post_delete_route( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_route - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_route_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_route` interceptor runs + before the `post_delete_route_with_metadata` interceptor. """ return response + def post_delete_route_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_route + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_route_with_metadata` + interceptor in new development instead of the `post_delete_route` interceptor. + When both interceptors are used, this `post_delete_route_with_metadata` interceptor runs after the + `post_delete_route` interceptor. The (possibly modified) response returned by + `post_delete_route` will be passed to + `post_delete_route_with_metadata`. + """ + return response, metadata + def pre_delete_stream( self, request: datastream.DeleteStreamRequest, @@ -433,12 +594,35 @@ def post_delete_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_delete_stream` interceptor runs + before the `post_delete_stream_with_metadata` interceptor. """ return response + def post_delete_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_delete_stream_with_metadata` + interceptor in new development instead of the `post_delete_stream` interceptor. + When both interceptors are used, this `post_delete_stream_with_metadata` interceptor runs after the + `post_delete_stream` interceptor. The (possibly modified) response returned by + `post_delete_stream` will be passed to + `post_delete_stream_with_metadata`. + """ + return response, metadata + def pre_discover_connection_profile( self, request: datastream.DiscoverConnectionProfileRequest, @@ -459,12 +643,38 @@ def post_discover_connection_profile( ) -> datastream.DiscoverConnectionProfileResponse: """Post-rpc interceptor for discover_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_discover_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_discover_connection_profile` interceptor runs + before the `post_discover_connection_profile_with_metadata` interceptor. """ return response + def post_discover_connection_profile_with_metadata( + self, + response: datastream.DiscoverConnectionProfileResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.DiscoverConnectionProfileResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for discover_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_discover_connection_profile_with_metadata` + interceptor in new development instead of the `post_discover_connection_profile` interceptor. + When both interceptors are used, this `post_discover_connection_profile_with_metadata` interceptor runs after the + `post_discover_connection_profile` interceptor. The (possibly modified) response returned by + `post_discover_connection_profile` will be passed to + `post_discover_connection_profile_with_metadata`. + """ + return response, metadata + def pre_fetch_errors( self, request: datastream.FetchErrorsRequest, @@ -482,12 +692,35 @@ def post_fetch_errors( ) -> operations_pb2.Operation: """Post-rpc interceptor for fetch_errors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_errors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_fetch_errors` interceptor runs + before the `post_fetch_errors_with_metadata` interceptor. """ return response + def post_fetch_errors_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for fetch_errors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_fetch_errors_with_metadata` + interceptor in new development instead of the `post_fetch_errors` interceptor. + When both interceptors are used, this `post_fetch_errors_with_metadata` interceptor runs after the + `post_fetch_errors` interceptor. The (possibly modified) response returned by + `post_fetch_errors` will be passed to + `post_fetch_errors_with_metadata`. + """ + return response, metadata + def pre_fetch_static_ips( self, request: datastream.FetchStaticIpsRequest, @@ -507,12 +740,37 @@ def post_fetch_static_ips( ) -> datastream.FetchStaticIpsResponse: """Post-rpc interceptor for fetch_static_ips - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_static_ips_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_fetch_static_ips` interceptor runs + before the `post_fetch_static_ips_with_metadata` interceptor. """ return response + def post_fetch_static_ips_with_metadata( + self, + response: datastream.FetchStaticIpsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.FetchStaticIpsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_static_ips + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_fetch_static_ips_with_metadata` + interceptor in new development instead of the `post_fetch_static_ips` interceptor. + When both interceptors are used, this `post_fetch_static_ips_with_metadata` interceptor runs after the + `post_fetch_static_ips` interceptor. The (possibly modified) response returned by + `post_fetch_static_ips` will be passed to + `post_fetch_static_ips_with_metadata`. + """ + return response, metadata + def pre_get_connection_profile( self, request: datastream.GetConnectionProfileRequest, @@ -532,12 +790,37 @@ def post_get_connection_profile( ) -> datastream_resources.ConnectionProfile: """Post-rpc interceptor for get_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_connection_profile` interceptor runs + before the `post_get_connection_profile_with_metadata` interceptor. """ return response + def post_get_connection_profile_with_metadata( + self, + response: datastream_resources.ConnectionProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream_resources.ConnectionProfile, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_connection_profile_with_metadata` + interceptor in new development instead of the `post_get_connection_profile` interceptor. + When both interceptors are used, this `post_get_connection_profile_with_metadata` interceptor runs after the + `post_get_connection_profile` interceptor. The (possibly modified) response returned by + `post_get_connection_profile` will be passed to + `post_get_connection_profile_with_metadata`. + """ + return response, metadata + def pre_get_private_connection( self, request: datastream.GetPrivateConnectionRequest, @@ -557,12 +840,37 @@ def post_get_private_connection( ) -> datastream_resources.PrivateConnection: """Post-rpc interceptor for get_private_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_private_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_private_connection` interceptor runs + before the `post_get_private_connection_with_metadata` interceptor. """ return response + def post_get_private_connection_with_metadata( + self, + response: datastream_resources.PrivateConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream_resources.PrivateConnection, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_private_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_private_connection_with_metadata` + interceptor in new development instead of the `post_get_private_connection` interceptor. + When both interceptors are used, this `post_get_private_connection_with_metadata` interceptor runs after the + `post_get_private_connection` interceptor. The (possibly modified) response returned by + `post_get_private_connection` will be passed to + `post_get_private_connection_with_metadata`. + """ + return response, metadata + def pre_get_route( self, request: datastream.GetRouteRequest, @@ -580,12 +888,35 @@ def post_get_route( ) -> datastream_resources.Route: """Post-rpc interceptor for get_route - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_route_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_route` interceptor runs + before the `post_get_route_with_metadata` interceptor. """ return response + def post_get_route_with_metadata( + self, + response: datastream_resources.Route, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream_resources.Route, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_route + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_route_with_metadata` + interceptor in new development instead of the `post_get_route` interceptor. + When both interceptors are used, this `post_get_route_with_metadata` interceptor runs after the + `post_get_route` interceptor. The (possibly modified) response returned by + `post_get_route` will be passed to + `post_get_route_with_metadata`. + """ + return response, metadata + def pre_get_stream( self, request: datastream.GetStreamRequest, @@ -603,12 +934,35 @@ def post_get_stream( ) -> datastream_resources.Stream: """Post-rpc interceptor for get_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_get_stream` interceptor runs + before the `post_get_stream_with_metadata` interceptor. """ return response + def post_get_stream_with_metadata( + self, + response: datastream_resources.Stream, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream_resources.Stream, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_get_stream_with_metadata` + interceptor in new development instead of the `post_get_stream` interceptor. + When both interceptors are used, this `post_get_stream_with_metadata` interceptor runs after the + `post_get_stream` interceptor. The (possibly modified) response returned by + `post_get_stream` will be passed to + `post_get_stream_with_metadata`. + """ + return response, metadata + def pre_list_connection_profiles( self, request: datastream.ListConnectionProfilesRequest, @@ -629,12 +983,38 @@ def post_list_connection_profiles( ) -> datastream.ListConnectionProfilesResponse: """Post-rpc interceptor for list_connection_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connection_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_connection_profiles` interceptor runs + before the `post_list_connection_profiles_with_metadata` interceptor. """ return response + def post_list_connection_profiles_with_metadata( + self, + response: datastream.ListConnectionProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.ListConnectionProfilesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_connection_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_connection_profiles_with_metadata` + interceptor in new development instead of the `post_list_connection_profiles` interceptor. + When both interceptors are used, this `post_list_connection_profiles_with_metadata` interceptor runs after the + `post_list_connection_profiles` interceptor. The (possibly modified) response returned by + `post_list_connection_profiles` will be passed to + `post_list_connection_profiles_with_metadata`. + """ + return response, metadata + def pre_list_private_connections( self, request: datastream.ListPrivateConnectionsRequest, @@ -655,12 +1035,38 @@ def post_list_private_connections( ) -> datastream.ListPrivateConnectionsResponse: """Post-rpc interceptor for list_private_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_private_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_private_connections` interceptor runs + before the `post_list_private_connections_with_metadata` interceptor. """ return response + def post_list_private_connections_with_metadata( + self, + response: datastream.ListPrivateConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datastream.ListPrivateConnectionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_private_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_private_connections_with_metadata` + interceptor in new development instead of the `post_list_private_connections` interceptor. + When both interceptors are used, this `post_list_private_connections_with_metadata` interceptor runs after the + `post_list_private_connections` interceptor. The (possibly modified) response returned by + `post_list_private_connections` will be passed to + `post_list_private_connections_with_metadata`. + """ + return response, metadata + def pre_list_routes( self, request: datastream.ListRoutesRequest, @@ -678,12 +1084,35 @@ def post_list_routes( ) -> datastream.ListRoutesResponse: """Post-rpc interceptor for list_routes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_routes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_routes` interceptor runs + before the `post_list_routes_with_metadata` interceptor. """ return response + def post_list_routes_with_metadata( + self, + response: datastream.ListRoutesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream.ListRoutesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_routes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_routes_with_metadata` + interceptor in new development instead of the `post_list_routes` interceptor. + When both interceptors are used, this `post_list_routes_with_metadata` interceptor runs after the + `post_list_routes` interceptor. The (possibly modified) response returned by + `post_list_routes` will be passed to + `post_list_routes_with_metadata`. + """ + return response, metadata + def pre_list_streams( self, request: datastream.ListStreamsRequest, @@ -701,12 +1130,35 @@ def post_list_streams( ) -> datastream.ListStreamsResponse: """Post-rpc interceptor for list_streams - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_streams_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_list_streams` interceptor runs + before the `post_list_streams_with_metadata` interceptor. """ return response + def post_list_streams_with_metadata( + self, + response: datastream.ListStreamsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[datastream.ListStreamsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_streams + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_list_streams_with_metadata` + interceptor in new development instead of the `post_list_streams` interceptor. + When both interceptors are used, this `post_list_streams_with_metadata` interceptor runs after the + `post_list_streams` interceptor. The (possibly modified) response returned by + `post_list_streams` will be passed to + `post_list_streams_with_metadata`. + """ + return response, metadata + def pre_update_connection_profile( self, request: datastream.UpdateConnectionProfileRequest, @@ -727,12 +1179,35 @@ def post_update_connection_profile( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_connection_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_connection_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_update_connection_profile` interceptor runs + before the `post_update_connection_profile_with_metadata` interceptor. """ return response + def post_update_connection_profile_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_update_connection_profile_with_metadata` + interceptor in new development instead of the `post_update_connection_profile` interceptor. + When both interceptors are used, this `post_update_connection_profile_with_metadata` interceptor runs after the + `post_update_connection_profile` interceptor. The (possibly modified) response returned by + `post_update_connection_profile` will be passed to + `post_update_connection_profile_with_metadata`. + """ + return response, metadata + def pre_update_stream( self, request: datastream.UpdateStreamRequest, @@ -750,12 +1225,35 @@ def post_update_stream( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_stream - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_stream_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Datastream server but before - it is returned to user code. + it is returned to user code. This `post_update_stream` interceptor runs + before the `post_update_stream_with_metadata` interceptor. """ return response + def post_update_stream_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_stream + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Datastream server but before it is returned to user code. + + We recommend only using this `post_update_stream_with_metadata` + interceptor in new development instead of the `post_update_stream` interceptor. + When both interceptors are used, this `post_update_stream_with_metadata` interceptor runs after the + `post_update_stream` interceptor. The (possibly modified) response returned by + `post_update_stream` will be passed to + `post_update_stream_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DatastreamRestStub: @@ -1023,6 +1521,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1172,6 +1674,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_private_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_private_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1323,6 +1829,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_route(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_route_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1476,6 +1986,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1619,6 +2133,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1762,6 +2280,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_private_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_private_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1907,6 +2429,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_route(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_route_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2052,6 +2578,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2202,6 +2732,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_discover_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_discover_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2356,6 +2890,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_errors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_errors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2503,6 +3041,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_static_ips(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_static_ips_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2647,6 +3189,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2795,6 +3341,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_private_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_private_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2943,6 +3493,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_route(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_route_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3085,6 +3639,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3227,6 +3785,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connection_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connection_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3371,6 +3933,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_private_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_private_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3515,6 +4081,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_routes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_routes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3659,6 +4229,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_streams(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_streams_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3808,6 +4382,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_connection_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3961,6 +4539,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_stream(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_stream_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json index 00d1a063213c..ff0238514ddc 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.11.0" + "version": "1.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json index 768d81895d0a..e19a39148c04 100644 --- a/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json +++ b/packages/google-cloud-datastream/samples/generated_samples/snippet_metadata_google.cloud.datastream.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datastream", - "version": "1.11.0" + "version": "1.13.0" }, "snippets": [ { diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py index f805276fc72a..da7287be8739 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1/test_datastream.py @@ -78,6 +78,13 @@ ) from google.cloud.datastream_v1.types import datastream, datastream_resources +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatastreamClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatastreamClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -17705,10 +17755,14 @@ def test_list_connection_profiles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_connection_profiles" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_list_connection_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_connection_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListConnectionProfilesRequest.pb( datastream.ListConnectionProfilesRequest() ) @@ -17734,6 +17788,10 @@ def test_list_connection_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListConnectionProfilesResponse() + post_with_metadata.return_value = ( + datastream.ListConnectionProfilesResponse(), + metadata, + ) client.list_connection_profiles( request, @@ -17745,6 +17803,7 @@ def test_list_connection_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_profile_rest_bad_request( @@ -17835,10 +17894,14 @@ def test_get_connection_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_get_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetConnectionProfileRequest.pb( datastream.GetConnectionProfileRequest() ) @@ -17864,6 +17927,10 @@ def test_get_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.ConnectionProfile() + post_with_metadata.return_value = ( + datastream_resources.ConnectionProfile(), + metadata, + ) client.get_connection_profile( request, @@ -17875,6 +17942,7 @@ def test_get_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_connection_profile_rest_bad_request( @@ -17966,6 +18034,14 @@ def test_create_connection_profile_rest_call_success(request_type): "username": "username_value", "password": "password_value", "database": "database_value", + "ssl_config": { + "server_verification": {"ca_certificate": "ca_certificate_value"}, + "server_and_client_verification": { + "client_certificate": "client_certificate_value", + "client_key": "client_key_value", + "ca_certificate": "ca_certificate_value", + }, + }, }, "sql_server_profile": { "hostname": "hostname_value", @@ -18092,10 +18168,14 @@ def test_create_connection_profile_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_create_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreateConnectionProfileRequest.pb( datastream.CreateConnectionProfileRequest() ) @@ -18119,6 +18199,7 @@ def test_create_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_connection_profile( request, @@ -18130,6 +18211,7 @@ def test_create_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_connection_profile_rest_bad_request( @@ -18229,6 +18311,14 @@ def test_update_connection_profile_rest_call_success(request_type): "username": "username_value", "password": "password_value", "database": "database_value", + "ssl_config": { + "server_verification": {"ca_certificate": "ca_certificate_value"}, + "server_and_client_verification": { + "client_certificate": "client_certificate_value", + "client_key": "client_key_value", + "ca_certificate": "ca_certificate_value", + }, + }, }, "sql_server_profile": { "hostname": "hostname_value", @@ -18355,10 +18445,14 @@ def test_update_connection_profile_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_update_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_update_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_update_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.UpdateConnectionProfileRequest.pb( datastream.UpdateConnectionProfileRequest() ) @@ -18382,6 +18476,7 @@ def test_update_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_connection_profile( request, @@ -18393,6 +18488,7 @@ def test_update_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connection_profile_rest_bad_request( @@ -18477,10 +18573,14 @@ def test_delete_connection_profile_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_delete_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeleteConnectionProfileRequest.pb( datastream.DeleteConnectionProfileRequest() ) @@ -18504,6 +18604,7 @@ def test_delete_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_connection_profile( request, @@ -18515,6 +18616,7 @@ def test_delete_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_discover_connection_profile_rest_bad_request( @@ -18596,10 +18698,14 @@ def test_discover_connection_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_discover_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_discover_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_discover_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DiscoverConnectionProfileRequest.pb( datastream.DiscoverConnectionProfileRequest() ) @@ -18625,6 +18731,10 @@ def test_discover_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.DiscoverConnectionProfileResponse() + post_with_metadata.return_value = ( + datastream.DiscoverConnectionProfileResponse(), + metadata, + ) client.discover_connection_profile( request, @@ -18636,6 +18746,7 @@ def test_discover_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_streams_rest_bad_request(request_type=datastream.ListStreamsRequest): @@ -18720,10 +18831,13 @@ def test_list_streams_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_streams" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_streams_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_streams" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListStreamsRequest.pb(datastream.ListStreamsRequest()) transcode.return_value = { "method": "post", @@ -18747,6 +18861,7 @@ def test_list_streams_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListStreamsResponse() + post_with_metadata.return_value = datastream.ListStreamsResponse(), metadata client.list_streams( request, @@ -18758,6 +18873,7 @@ def test_list_streams_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_stream_rest_bad_request(request_type=datastream.GetStreamRequest): @@ -18849,10 +18965,13 @@ def test_get_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetStreamRequest.pb(datastream.GetStreamRequest()) transcode.return_value = { "method": "post", @@ -18876,6 +18995,7 @@ def test_get_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.Stream() + post_with_metadata.return_value = datastream_resources.Stream(), metadata client.get_stream( request, @@ -18887,6 +19007,7 @@ def test_get_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_stream_rest_bad_request(request_type=datastream.CreateStreamRequest): @@ -19217,10 +19338,13 @@ def test_create_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreateStreamRequest.pb(datastream.CreateStreamRequest()) transcode.return_value = { "method": "post", @@ -19242,6 +19366,7 @@ def test_create_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_stream( request, @@ -19253,6 +19378,7 @@ def test_create_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_stream_rest_bad_request(request_type=datastream.UpdateStreamRequest): @@ -19587,10 +19713,13 @@ def test_update_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_update_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_update_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_update_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.UpdateStreamRequest.pb(datastream.UpdateStreamRequest()) transcode.return_value = { "method": "post", @@ -19612,6 +19741,7 @@ def test_update_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_stream( request, @@ -19623,6 +19753,7 @@ def test_update_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_stream_rest_bad_request(request_type=datastream.DeleteStreamRequest): @@ -19701,10 +19832,13 @@ def test_delete_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeleteStreamRequest.pb(datastream.DeleteStreamRequest()) transcode.return_value = { "method": "post", @@ -19726,6 +19860,7 @@ def test_delete_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_stream( request, @@ -19737,6 +19872,7 @@ def test_delete_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_stream_rest_bad_request(request_type=datastream.RunStreamRequest): @@ -19815,10 +19951,13 @@ def test_run_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_run_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_run_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_run_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.RunStreamRequest.pb(datastream.RunStreamRequest()) transcode.return_value = { "method": "post", @@ -19840,6 +19979,7 @@ def test_run_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_stream( request, @@ -19851,6 +19991,7 @@ def test_run_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_stream_object_rest_bad_request( @@ -19941,10 +20082,13 @@ def test_get_stream_object_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_stream_object" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_stream_object_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_stream_object" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetStreamObjectRequest.pb( datastream.GetStreamObjectRequest() ) @@ -19970,6 +20114,7 @@ def test_get_stream_object_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.StreamObject() + post_with_metadata.return_value = datastream_resources.StreamObject(), metadata client.get_stream_object( request, @@ -19981,6 +20126,7 @@ def test_get_stream_object_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lookup_stream_object_rest_bad_request( @@ -20067,10 +20213,13 @@ def test_lookup_stream_object_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_lookup_stream_object" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_lookup_stream_object_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_lookup_stream_object" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.LookupStreamObjectRequest.pb( datastream.LookupStreamObjectRequest() ) @@ -20096,6 +20245,7 @@ def test_lookup_stream_object_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.StreamObject() + post_with_metadata.return_value = datastream_resources.StreamObject(), metadata client.lookup_stream_object( request, @@ -20107,6 +20257,7 @@ def test_lookup_stream_object_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_stream_objects_rest_bad_request( @@ -20191,10 +20342,13 @@ def test_list_stream_objects_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_stream_objects" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_stream_objects_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_stream_objects" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListStreamObjectsRequest.pb( datastream.ListStreamObjectsRequest() ) @@ -20220,6 +20374,10 @@ def test_list_stream_objects_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListStreamObjectsResponse() + post_with_metadata.return_value = ( + datastream.ListStreamObjectsResponse(), + metadata, + ) client.list_stream_objects( request, @@ -20231,6 +20389,7 @@ def test_list_stream_objects_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_start_backfill_job_rest_bad_request( @@ -20316,10 +20475,13 @@ def test_start_backfill_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_start_backfill_job" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_start_backfill_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_start_backfill_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.StartBackfillJobRequest.pb( datastream.StartBackfillJobRequest() ) @@ -20345,6 +20507,10 @@ def test_start_backfill_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.StartBackfillJobResponse() + post_with_metadata.return_value = ( + datastream.StartBackfillJobResponse(), + metadata, + ) client.start_backfill_job( request, @@ -20356,6 +20522,7 @@ def test_start_backfill_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_backfill_job_rest_bad_request( @@ -20441,10 +20608,13 @@ def test_stop_backfill_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_stop_backfill_job" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_stop_backfill_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_stop_backfill_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.StopBackfillJobRequest.pb( datastream.StopBackfillJobRequest() ) @@ -20470,6 +20640,7 @@ def test_stop_backfill_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.StopBackfillJobResponse() + post_with_metadata.return_value = datastream.StopBackfillJobResponse(), metadata client.stop_backfill_job( request, @@ -20481,6 +20652,7 @@ def test_stop_backfill_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_static_ips_rest_bad_request( @@ -20567,10 +20739,13 @@ def test_fetch_static_ips_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_fetch_static_ips" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_fetch_static_ips_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_fetch_static_ips" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.FetchStaticIpsRequest.pb( datastream.FetchStaticIpsRequest() ) @@ -20596,6 +20771,7 @@ def test_fetch_static_ips_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.FetchStaticIpsResponse() + post_with_metadata.return_value = datastream.FetchStaticIpsResponse(), metadata client.fetch_static_ips( request, @@ -20607,6 +20783,7 @@ def test_fetch_static_ips_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_private_connection_rest_bad_request( @@ -20772,10 +20949,14 @@ def test_create_private_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_private_connection" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_create_private_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreatePrivateConnectionRequest.pb( datastream.CreatePrivateConnectionRequest() ) @@ -20799,6 +20980,7 @@ def test_create_private_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_private_connection( request, @@ -20810,6 +20992,7 @@ def test_create_private_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_private_connection_rest_bad_request( @@ -20902,10 +21085,14 @@ def test_get_private_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_private_connection" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_get_private_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetPrivateConnectionRequest.pb( datastream.GetPrivateConnectionRequest() ) @@ -20931,6 +21118,10 @@ def test_get_private_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.PrivateConnection() + post_with_metadata.return_value = ( + datastream_resources.PrivateConnection(), + metadata, + ) client.get_private_connection( request, @@ -20942,6 +21133,7 @@ def test_get_private_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_private_connections_rest_bad_request( @@ -21028,10 +21220,14 @@ def test_list_private_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_private_connections" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_list_private_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_private_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListPrivateConnectionsRequest.pb( datastream.ListPrivateConnectionsRequest() ) @@ -21057,6 +21253,10 @@ def test_list_private_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListPrivateConnectionsResponse() + post_with_metadata.return_value = ( + datastream.ListPrivateConnectionsResponse(), + metadata, + ) client.list_private_connections( request, @@ -21068,6 +21268,7 @@ def test_list_private_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_private_connection_rest_bad_request( @@ -21152,10 +21353,14 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_private_connection" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_delete_private_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeletePrivateConnectionRequest.pb( datastream.DeletePrivateConnectionRequest() ) @@ -21179,6 +21384,7 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_private_connection( request, @@ -21190,6 +21396,7 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_route_rest_bad_request(request_type=datastream.CreateRouteRequest): @@ -21348,10 +21555,13 @@ def test_create_route_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_route" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_route_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_route" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreateRouteRequest.pb(datastream.CreateRouteRequest()) transcode.return_value = { "method": "post", @@ -21373,6 +21583,7 @@ def test_create_route_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_route( request, @@ -21384,6 +21595,7 @@ def test_create_route_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_route_rest_bad_request(request_type=datastream.GetRouteRequest): @@ -21476,10 +21688,13 @@ def test_get_route_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_route" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_route_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_route" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetRouteRequest.pb(datastream.GetRouteRequest()) transcode.return_value = { "method": "post", @@ -21501,6 +21716,7 @@ def test_get_route_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.Route() + post_with_metadata.return_value = datastream_resources.Route(), metadata client.get_route( request, @@ -21512,6 +21728,7 @@ def test_get_route_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_routes_rest_bad_request(request_type=datastream.ListRoutesRequest): @@ -21600,10 +21817,13 @@ def test_list_routes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_routes" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_routes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_routes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListRoutesRequest.pb(datastream.ListRoutesRequest()) transcode.return_value = { "method": "post", @@ -21627,6 +21847,7 @@ def test_list_routes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListRoutesResponse() + post_with_metadata.return_value = datastream.ListRoutesResponse(), metadata client.list_routes( request, @@ -21638,6 +21859,7 @@ def test_list_routes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_route_rest_bad_request(request_type=datastream.DeleteRouteRequest): @@ -21720,10 +21942,13 @@ def test_delete_route_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_route" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_route_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_route" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeleteRouteRequest.pb(datastream.DeleteRouteRequest()) transcode.return_value = { "method": "post", @@ -21745,6 +21970,7 @@ def test_delete_route_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_route( request, @@ -21756,6 +21982,7 @@ def test_delete_route_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py index f6abd17ce818..2ef2dbd8762b 100644 --- a/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py +++ b/packages/google-cloud-datastream/tests/unit/gapic/datastream_v1alpha1/test_datastream.py @@ -74,6 +74,13 @@ ) from google.cloud.datastream_v1alpha1.types import datastream, datastream_resources +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -309,6 +316,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DatastreamClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DatastreamClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -14586,10 +14636,14 @@ def test_list_connection_profiles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_connection_profiles" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_list_connection_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_connection_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListConnectionProfilesRequest.pb( datastream.ListConnectionProfilesRequest() ) @@ -14615,6 +14669,10 @@ def test_list_connection_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListConnectionProfilesResponse() + post_with_metadata.return_value = ( + datastream.ListConnectionProfilesResponse(), + metadata, + ) client.list_connection_profiles( request, @@ -14626,6 +14684,7 @@ def test_list_connection_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_profile_rest_bad_request( @@ -14716,10 +14775,14 @@ def test_get_connection_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_get_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetConnectionProfileRequest.pb( datastream.GetConnectionProfileRequest() ) @@ -14745,6 +14808,10 @@ def test_get_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.ConnectionProfile() + post_with_metadata.return_value = ( + datastream_resources.ConnectionProfile(), + metadata, + ) client.get_connection_profile( request, @@ -14756,6 +14823,7 @@ def test_get_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_connection_profile_rest_bad_request( @@ -14950,10 +15018,14 @@ def test_create_connection_profile_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_create_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreateConnectionProfileRequest.pb( datastream.CreateConnectionProfileRequest() ) @@ -14977,6 +15049,7 @@ def test_create_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_connection_profile( request, @@ -14988,6 +15061,7 @@ def test_create_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_connection_profile_rest_bad_request( @@ -15190,10 +15264,14 @@ def test_update_connection_profile_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_update_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_update_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_update_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.UpdateConnectionProfileRequest.pb( datastream.UpdateConnectionProfileRequest() ) @@ -15217,6 +15295,7 @@ def test_update_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_connection_profile( request, @@ -15228,6 +15307,7 @@ def test_update_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connection_profile_rest_bad_request( @@ -15312,10 +15392,14 @@ def test_delete_connection_profile_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_delete_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeleteConnectionProfileRequest.pb( datastream.DeleteConnectionProfileRequest() ) @@ -15339,6 +15423,7 @@ def test_delete_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_connection_profile( request, @@ -15350,6 +15435,7 @@ def test_delete_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_discover_connection_profile_rest_bad_request( @@ -15431,10 +15517,14 @@ def test_discover_connection_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_discover_connection_profile" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_discover_connection_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_discover_connection_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DiscoverConnectionProfileRequest.pb( datastream.DiscoverConnectionProfileRequest() ) @@ -15460,6 +15550,10 @@ def test_discover_connection_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.DiscoverConnectionProfileResponse() + post_with_metadata.return_value = ( + datastream.DiscoverConnectionProfileResponse(), + metadata, + ) client.discover_connection_profile( request, @@ -15471,6 +15565,7 @@ def test_discover_connection_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_streams_rest_bad_request(request_type=datastream.ListStreamsRequest): @@ -15555,10 +15650,13 @@ def test_list_streams_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_streams" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_streams_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_streams" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListStreamsRequest.pb(datastream.ListStreamsRequest()) transcode.return_value = { "method": "post", @@ -15582,6 +15680,7 @@ def test_list_streams_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListStreamsResponse() + post_with_metadata.return_value = datastream.ListStreamsResponse(), metadata client.list_streams( request, @@ -15593,6 +15692,7 @@ def test_list_streams_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_stream_rest_bad_request(request_type=datastream.GetStreamRequest): @@ -15679,10 +15779,13 @@ def test_get_stream_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetStreamRequest.pb(datastream.GetStreamRequest()) transcode.return_value = { "method": "post", @@ -15706,6 +15809,7 @@ def test_get_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.Stream() + post_with_metadata.return_value = datastream_resources.Stream(), metadata client.get_stream( request, @@ -15717,6 +15821,7 @@ def test_get_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_stream_rest_bad_request(request_type=datastream.CreateStreamRequest): @@ -15949,10 +16054,13 @@ def test_create_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreateStreamRequest.pb(datastream.CreateStreamRequest()) transcode.return_value = { "method": "post", @@ -15974,6 +16082,7 @@ def test_create_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_stream( request, @@ -15985,6 +16094,7 @@ def test_create_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_stream_rest_bad_request(request_type=datastream.UpdateStreamRequest): @@ -16221,10 +16331,13 @@ def test_update_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_update_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_update_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_update_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.UpdateStreamRequest.pb(datastream.UpdateStreamRequest()) transcode.return_value = { "method": "post", @@ -16246,6 +16359,7 @@ def test_update_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_stream( request, @@ -16257,6 +16371,7 @@ def test_update_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_stream_rest_bad_request(request_type=datastream.DeleteStreamRequest): @@ -16335,10 +16450,13 @@ def test_delete_stream_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_stream" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_stream_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_stream" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeleteStreamRequest.pb(datastream.DeleteStreamRequest()) transcode.return_value = { "method": "post", @@ -16360,6 +16478,7 @@ def test_delete_stream_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_stream( request, @@ -16371,6 +16490,7 @@ def test_delete_stream_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_errors_rest_bad_request(request_type=datastream.FetchErrorsRequest): @@ -16449,10 +16569,13 @@ def test_fetch_errors_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_fetch_errors" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_fetch_errors_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_fetch_errors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.FetchErrorsRequest.pb(datastream.FetchErrorsRequest()) transcode.return_value = { "method": "post", @@ -16474,6 +16597,7 @@ def test_fetch_errors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.fetch_errors( request, @@ -16485,6 +16609,7 @@ def test_fetch_errors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_static_ips_rest_bad_request( @@ -16571,10 +16696,13 @@ def test_fetch_static_ips_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_fetch_static_ips" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_fetch_static_ips_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_fetch_static_ips" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.FetchStaticIpsRequest.pb( datastream.FetchStaticIpsRequest() ) @@ -16600,6 +16728,7 @@ def test_fetch_static_ips_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.FetchStaticIpsResponse() + post_with_metadata.return_value = datastream.FetchStaticIpsResponse(), metadata client.fetch_static_ips( request, @@ -16611,6 +16740,7 @@ def test_fetch_static_ips_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_private_connection_rest_bad_request( @@ -16776,10 +16906,14 @@ def test_create_private_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_private_connection" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_create_private_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreatePrivateConnectionRequest.pb( datastream.CreatePrivateConnectionRequest() ) @@ -16803,6 +16937,7 @@ def test_create_private_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_private_connection( request, @@ -16814,6 +16949,7 @@ def test_create_private_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_private_connection_rest_bad_request( @@ -16906,10 +17042,14 @@ def test_get_private_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_private_connection" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_get_private_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetPrivateConnectionRequest.pb( datastream.GetPrivateConnectionRequest() ) @@ -16935,6 +17075,10 @@ def test_get_private_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.PrivateConnection() + post_with_metadata.return_value = ( + datastream_resources.PrivateConnection(), + metadata, + ) client.get_private_connection( request, @@ -16946,6 +17090,7 @@ def test_get_private_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_private_connections_rest_bad_request( @@ -17032,10 +17177,14 @@ def test_list_private_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_private_connections" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_list_private_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_private_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListPrivateConnectionsRequest.pb( datastream.ListPrivateConnectionsRequest() ) @@ -17061,6 +17210,10 @@ def test_list_private_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListPrivateConnectionsResponse() + post_with_metadata.return_value = ( + datastream.ListPrivateConnectionsResponse(), + metadata, + ) client.list_private_connections( request, @@ -17072,6 +17225,7 @@ def test_list_private_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_private_connection_rest_bad_request( @@ -17156,10 +17310,14 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_private_connection" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, + "post_delete_private_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_private_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeletePrivateConnectionRequest.pb( datastream.DeletePrivateConnectionRequest() ) @@ -17183,6 +17341,7 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_private_connection( request, @@ -17194,6 +17353,7 @@ def test_delete_private_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_route_rest_bad_request(request_type=datastream.CreateRouteRequest): @@ -17352,10 +17512,13 @@ def test_create_route_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_create_route" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_create_route_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_create_route" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.CreateRouteRequest.pb(datastream.CreateRouteRequest()) transcode.return_value = { "method": "post", @@ -17377,6 +17540,7 @@ def test_create_route_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_route( request, @@ -17388,6 +17552,7 @@ def test_create_route_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_route_rest_bad_request(request_type=datastream.GetRouteRequest): @@ -17480,10 +17645,13 @@ def test_get_route_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_get_route" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_get_route_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_get_route" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.GetRouteRequest.pb(datastream.GetRouteRequest()) transcode.return_value = { "method": "post", @@ -17505,6 +17673,7 @@ def test_get_route_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream_resources.Route() + post_with_metadata.return_value = datastream_resources.Route(), metadata client.get_route( request, @@ -17516,6 +17685,7 @@ def test_get_route_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_routes_rest_bad_request(request_type=datastream.ListRoutesRequest): @@ -17604,10 +17774,13 @@ def test_list_routes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DatastreamRestInterceptor, "post_list_routes" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_list_routes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_list_routes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.ListRoutesRequest.pb(datastream.ListRoutesRequest()) transcode.return_value = { "method": "post", @@ -17631,6 +17804,7 @@ def test_list_routes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = datastream.ListRoutesResponse() + post_with_metadata.return_value = datastream.ListRoutesResponse(), metadata client.list_routes( request, @@ -17642,6 +17816,7 @@ def test_list_routes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_route_rest_bad_request(request_type=datastream.DeleteRouteRequest): @@ -17724,10 +17899,13 @@ def test_delete_route_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DatastreamRestInterceptor, "post_delete_route" ) as post, mock.patch.object( + transports.DatastreamRestInterceptor, "post_delete_route_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DatastreamRestInterceptor, "pre_delete_route" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = datastream.DeleteRouteRequest.pb(datastream.DeleteRouteRequest()) transcode.return_value = { "method": "post", @@ -17749,6 +17927,7 @@ def test_delete_route_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_route( request, @@ -17760,6 +17939,7 @@ def test_delete_route_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-deploy/CHANGELOG.md b/packages/google-cloud-deploy/CHANGELOG.md index a7a384fc8844..b7ec50c392be 100644 --- a/packages/google-cloud-deploy/CHANGELOG.md +++ b/packages/google-cloud-deploy/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [2.6.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v2.5.0...google-cloud-deploy-v2.6.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + + +### Documentation + +* Minor documentation improvements ([3e64234](https://github.com/googleapis/google-cloud-python/commit/3e64234e201bbbaaceb39e8b0da0258c3d5be3b2)) + ## [2.5.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-deploy-v2.4.1...google-cloud-deploy-v2.5.0) (2024-12-12) diff --git a/packages/google-cloud-deploy/deploy-v1-py.tar.gz b/packages/google-cloud-deploy/deploy-v1-py.tar.gz new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py index 21f7deacf8df..14799a6aa533 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.5.0" # {x-release-please-version} +__version__ = "2.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py index 21f7deacf8df..14799a6aa533 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.5.0" # {x-release-please-version} +__version__ = "2.6.0" # {x-release-please-version} diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py index a074a3469f0a..9e0a1ce77f2f 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -866,6 +868,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -6618,16 +6647,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -6673,16 +6706,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -6905,16 +6942,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -7027,16 +7068,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -7087,16 +7132,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -7142,16 +7191,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -7197,16 +7250,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py index c17ab4438f70..cbca0c6a4167 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest.py @@ -457,12 +457,37 @@ def post_abandon_release( ) -> cloud_deploy.AbandonReleaseResponse: """Post-rpc interceptor for abandon_release - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_abandon_release_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_abandon_release` interceptor runs + before the `post_abandon_release_with_metadata` interceptor. """ return response + def post_abandon_release_with_metadata( + self, + response: cloud_deploy.AbandonReleaseResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.AbandonReleaseResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for abandon_release + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_abandon_release_with_metadata` + interceptor in new development instead of the `post_abandon_release` interceptor. + When both interceptors are used, this `post_abandon_release_with_metadata` interceptor runs after the + `post_abandon_release` interceptor. The (possibly modified) response returned by + `post_abandon_release` will be passed to + `post_abandon_release_with_metadata`. + """ + return response, metadata + def pre_advance_rollout( self, request: cloud_deploy.AdvanceRolloutRequest, @@ -482,12 +507,37 @@ def post_advance_rollout( ) -> cloud_deploy.AdvanceRolloutResponse: """Post-rpc interceptor for advance_rollout - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_advance_rollout_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_advance_rollout` interceptor runs + before the `post_advance_rollout_with_metadata` interceptor. """ return response + def post_advance_rollout_with_metadata( + self, + response: cloud_deploy.AdvanceRolloutResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.AdvanceRolloutResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for advance_rollout + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_advance_rollout_with_metadata` + interceptor in new development instead of the `post_advance_rollout` interceptor. + When both interceptors are used, this `post_advance_rollout_with_metadata` interceptor runs after the + `post_advance_rollout` interceptor. The (possibly modified) response returned by + `post_advance_rollout` will be passed to + `post_advance_rollout_with_metadata`. + """ + return response, metadata + def pre_approve_rollout( self, request: cloud_deploy.ApproveRolloutRequest, @@ -507,12 +557,37 @@ def post_approve_rollout( ) -> cloud_deploy.ApproveRolloutResponse: """Post-rpc interceptor for approve_rollout - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_approve_rollout_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_approve_rollout` interceptor runs + before the `post_approve_rollout_with_metadata` interceptor. """ return response + def post_approve_rollout_with_metadata( + self, + response: cloud_deploy.ApproveRolloutResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ApproveRolloutResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for approve_rollout + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_approve_rollout_with_metadata` + interceptor in new development instead of the `post_approve_rollout` interceptor. + When both interceptors are used, this `post_approve_rollout_with_metadata` interceptor runs after the + `post_approve_rollout` interceptor. The (possibly modified) response returned by + `post_approve_rollout` will be passed to + `post_approve_rollout_with_metadata`. + """ + return response, metadata + def pre_cancel_automation_run( self, request: cloud_deploy.CancelAutomationRunRequest, @@ -532,12 +607,38 @@ def post_cancel_automation_run( ) -> cloud_deploy.CancelAutomationRunResponse: """Post-rpc interceptor for cancel_automation_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_automation_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_cancel_automation_run` interceptor runs + before the `post_cancel_automation_run_with_metadata` interceptor. """ return response + def post_cancel_automation_run_with_metadata( + self, + response: cloud_deploy.CancelAutomationRunResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.CancelAutomationRunResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for cancel_automation_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_cancel_automation_run_with_metadata` + interceptor in new development instead of the `post_cancel_automation_run` interceptor. + When both interceptors are used, this `post_cancel_automation_run_with_metadata` interceptor runs after the + `post_cancel_automation_run` interceptor. The (possibly modified) response returned by + `post_cancel_automation_run` will be passed to + `post_cancel_automation_run_with_metadata`. + """ + return response, metadata + def pre_cancel_rollout( self, request: cloud_deploy.CancelRolloutRequest, @@ -557,12 +658,37 @@ def post_cancel_rollout( ) -> cloud_deploy.CancelRolloutResponse: """Post-rpc interceptor for cancel_rollout - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_cancel_rollout_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_cancel_rollout` interceptor runs + before the `post_cancel_rollout_with_metadata` interceptor. """ return response + def post_cancel_rollout_with_metadata( + self, + response: cloud_deploy.CancelRolloutResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.CancelRolloutResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for cancel_rollout + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_cancel_rollout_with_metadata` + interceptor in new development instead of the `post_cancel_rollout` interceptor. + When both interceptors are used, this `post_cancel_rollout_with_metadata` interceptor runs after the + `post_cancel_rollout` interceptor. The (possibly modified) response returned by + `post_cancel_rollout` will be passed to + `post_cancel_rollout_with_metadata`. + """ + return response, metadata + def pre_create_automation( self, request: cloud_deploy.CreateAutomationRequest, @@ -582,12 +708,35 @@ def post_create_automation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_automation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_automation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_automation` interceptor runs + before the `post_create_automation_with_metadata` interceptor. """ return response + def post_create_automation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_automation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_automation_with_metadata` + interceptor in new development instead of the `post_create_automation` interceptor. + When both interceptors are used, this `post_create_automation_with_metadata` interceptor runs after the + `post_create_automation` interceptor. The (possibly modified) response returned by + `post_create_automation` will be passed to + `post_create_automation_with_metadata`. + """ + return response, metadata + def pre_create_custom_target_type( self, request: cloud_deploy.CreateCustomTargetTypeRequest, @@ -608,12 +757,35 @@ def post_create_custom_target_type( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_custom_target_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_custom_target_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_custom_target_type` interceptor runs + before the `post_create_custom_target_type_with_metadata` interceptor. """ return response + def post_create_custom_target_type_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_custom_target_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_custom_target_type_with_metadata` + interceptor in new development instead of the `post_create_custom_target_type` interceptor. + When both interceptors are used, this `post_create_custom_target_type_with_metadata` interceptor runs after the + `post_create_custom_target_type` interceptor. The (possibly modified) response returned by + `post_create_custom_target_type` will be passed to + `post_create_custom_target_type_with_metadata`. + """ + return response, metadata + def pre_create_delivery_pipeline( self, request: cloud_deploy.CreateDeliveryPipelineRequest, @@ -634,12 +806,35 @@ def post_create_delivery_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_delivery_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_delivery_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_delivery_pipeline` interceptor runs + before the `post_create_delivery_pipeline_with_metadata` interceptor. """ return response + def post_create_delivery_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_delivery_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_delivery_pipeline_with_metadata` + interceptor in new development instead of the `post_create_delivery_pipeline` interceptor. + When both interceptors are used, this `post_create_delivery_pipeline_with_metadata` interceptor runs after the + `post_create_delivery_pipeline` interceptor. The (possibly modified) response returned by + `post_create_delivery_pipeline` will be passed to + `post_create_delivery_pipeline_with_metadata`. + """ + return response, metadata + def pre_create_deploy_policy( self, request: cloud_deploy.CreateDeployPolicyRequest, @@ -659,12 +854,35 @@ def post_create_deploy_policy( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_deploy_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_deploy_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_deploy_policy` interceptor runs + before the `post_create_deploy_policy_with_metadata` interceptor. """ return response + def post_create_deploy_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deploy_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_deploy_policy_with_metadata` + interceptor in new development instead of the `post_create_deploy_policy` interceptor. + When both interceptors are used, this `post_create_deploy_policy_with_metadata` interceptor runs after the + `post_create_deploy_policy` interceptor. The (possibly modified) response returned by + `post_create_deploy_policy` will be passed to + `post_create_deploy_policy_with_metadata`. + """ + return response, metadata + def pre_create_release( self, request: cloud_deploy.CreateReleaseRequest, @@ -684,12 +902,35 @@ def post_create_release( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_release - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_release_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_release` interceptor runs + before the `post_create_release_with_metadata` interceptor. """ return response + def post_create_release_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_release + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_release_with_metadata` + interceptor in new development instead of the `post_create_release` interceptor. + When both interceptors are used, this `post_create_release_with_metadata` interceptor runs after the + `post_create_release` interceptor. The (possibly modified) response returned by + `post_create_release` will be passed to + `post_create_release_with_metadata`. + """ + return response, metadata + def pre_create_rollout( self, request: cloud_deploy.CreateRolloutRequest, @@ -709,12 +950,35 @@ def post_create_rollout( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_rollout - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_rollout_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_rollout` interceptor runs + before the `post_create_rollout_with_metadata` interceptor. """ return response + def post_create_rollout_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_rollout + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_rollout_with_metadata` + interceptor in new development instead of the `post_create_rollout` interceptor. + When both interceptors are used, this `post_create_rollout_with_metadata` interceptor runs after the + `post_create_rollout` interceptor. The (possibly modified) response returned by + `post_create_rollout` will be passed to + `post_create_rollout_with_metadata`. + """ + return response, metadata + def pre_create_target( self, request: cloud_deploy.CreateTargetRequest, @@ -734,12 +998,35 @@ def post_create_target( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_target - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_create_target` interceptor runs + before the `post_create_target_with_metadata` interceptor. """ return response + def post_create_target_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_create_target_with_metadata` + interceptor in new development instead of the `post_create_target` interceptor. + When both interceptors are used, this `post_create_target_with_metadata` interceptor runs after the + `post_create_target` interceptor. The (possibly modified) response returned by + `post_create_target` will be passed to + `post_create_target_with_metadata`. + """ + return response, metadata + def pre_delete_automation( self, request: cloud_deploy.DeleteAutomationRequest, @@ -759,12 +1046,35 @@ def post_delete_automation( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_automation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_automation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_delete_automation` interceptor runs + before the `post_delete_automation_with_metadata` interceptor. """ return response + def post_delete_automation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_automation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_delete_automation_with_metadata` + interceptor in new development instead of the `post_delete_automation` interceptor. + When both interceptors are used, this `post_delete_automation_with_metadata` interceptor runs after the + `post_delete_automation` interceptor. The (possibly modified) response returned by + `post_delete_automation` will be passed to + `post_delete_automation_with_metadata`. + """ + return response, metadata + def pre_delete_custom_target_type( self, request: cloud_deploy.DeleteCustomTargetTypeRequest, @@ -785,12 +1095,35 @@ def post_delete_custom_target_type( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_custom_target_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_custom_target_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_delete_custom_target_type` interceptor runs + before the `post_delete_custom_target_type_with_metadata` interceptor. """ return response + def post_delete_custom_target_type_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_custom_target_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_delete_custom_target_type_with_metadata` + interceptor in new development instead of the `post_delete_custom_target_type` interceptor. + When both interceptors are used, this `post_delete_custom_target_type_with_metadata` interceptor runs after the + `post_delete_custom_target_type` interceptor. The (possibly modified) response returned by + `post_delete_custom_target_type` will be passed to + `post_delete_custom_target_type_with_metadata`. + """ + return response, metadata + def pre_delete_delivery_pipeline( self, request: cloud_deploy.DeleteDeliveryPipelineRequest, @@ -811,12 +1144,35 @@ def post_delete_delivery_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_delivery_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_delivery_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_delete_delivery_pipeline` interceptor runs + before the `post_delete_delivery_pipeline_with_metadata` interceptor. """ return response + def post_delete_delivery_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_delivery_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_delete_delivery_pipeline_with_metadata` + interceptor in new development instead of the `post_delete_delivery_pipeline` interceptor. + When both interceptors are used, this `post_delete_delivery_pipeline_with_metadata` interceptor runs after the + `post_delete_delivery_pipeline` interceptor. The (possibly modified) response returned by + `post_delete_delivery_pipeline` will be passed to + `post_delete_delivery_pipeline_with_metadata`. + """ + return response, metadata + def pre_delete_deploy_policy( self, request: cloud_deploy.DeleteDeployPolicyRequest, @@ -836,12 +1192,35 @@ def post_delete_deploy_policy( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_deploy_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_deploy_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_delete_deploy_policy` interceptor runs + before the `post_delete_deploy_policy_with_metadata` interceptor. """ return response + def post_delete_deploy_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_deploy_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_delete_deploy_policy_with_metadata` + interceptor in new development instead of the `post_delete_deploy_policy` interceptor. + When both interceptors are used, this `post_delete_deploy_policy_with_metadata` interceptor runs after the + `post_delete_deploy_policy` interceptor. The (possibly modified) response returned by + `post_delete_deploy_policy` will be passed to + `post_delete_deploy_policy_with_metadata`. + """ + return response, metadata + def pre_delete_target( self, request: cloud_deploy.DeleteTargetRequest, @@ -861,12 +1240,35 @@ def post_delete_target( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_target - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_delete_target` interceptor runs + before the `post_delete_target_with_metadata` interceptor. """ return response + def post_delete_target_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_delete_target_with_metadata` + interceptor in new development instead of the `post_delete_target` interceptor. + When both interceptors are used, this `post_delete_target_with_metadata` interceptor runs after the + `post_delete_target` interceptor. The (possibly modified) response returned by + `post_delete_target` will be passed to + `post_delete_target_with_metadata`. + """ + return response, metadata + def pre_get_automation( self, request: cloud_deploy.GetAutomationRequest, @@ -886,12 +1288,35 @@ def post_get_automation( ) -> cloud_deploy.Automation: """Post-rpc interceptor for get_automation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_automation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_automation` interceptor runs + before the `post_get_automation_with_metadata` interceptor. """ return response + def post_get_automation_with_metadata( + self, + response: cloud_deploy.Automation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.Automation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_automation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_automation_with_metadata` + interceptor in new development instead of the `post_get_automation` interceptor. + When both interceptors are used, this `post_get_automation_with_metadata` interceptor runs after the + `post_get_automation` interceptor. The (possibly modified) response returned by + `post_get_automation` will be passed to + `post_get_automation_with_metadata`. + """ + return response, metadata + def pre_get_automation_run( self, request: cloud_deploy.GetAutomationRunRequest, @@ -911,12 +1336,35 @@ def post_get_automation_run( ) -> cloud_deploy.AutomationRun: """Post-rpc interceptor for get_automation_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_automation_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_automation_run` interceptor runs + before the `post_get_automation_run_with_metadata` interceptor. """ return response + def post_get_automation_run_with_metadata( + self, + response: cloud_deploy.AutomationRun, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.AutomationRun, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_automation_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_automation_run_with_metadata` + interceptor in new development instead of the `post_get_automation_run` interceptor. + When both interceptors are used, this `post_get_automation_run_with_metadata` interceptor runs after the + `post_get_automation_run` interceptor. The (possibly modified) response returned by + `post_get_automation_run` will be passed to + `post_get_automation_run_with_metadata`. + """ + return response, metadata + def pre_get_config( self, request: cloud_deploy.GetConfigRequest, @@ -932,12 +1380,35 @@ def pre_get_config( def post_get_config(self, response: cloud_deploy.Config) -> cloud_deploy.Config: """Post-rpc interceptor for get_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_config` interceptor runs + before the `post_get_config_with_metadata` interceptor. """ return response + def post_get_config_with_metadata( + self, + response: cloud_deploy.Config, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.Config, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_config_with_metadata` + interceptor in new development instead of the `post_get_config` interceptor. + When both interceptors are used, this `post_get_config_with_metadata` interceptor runs after the + `post_get_config` interceptor. The (possibly modified) response returned by + `post_get_config` will be passed to + `post_get_config_with_metadata`. + """ + return response, metadata + def pre_get_custom_target_type( self, request: cloud_deploy.GetCustomTargetTypeRequest, @@ -957,12 +1428,35 @@ def post_get_custom_target_type( ) -> cloud_deploy.CustomTargetType: """Post-rpc interceptor for get_custom_target_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_custom_target_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_custom_target_type` interceptor runs + before the `post_get_custom_target_type_with_metadata` interceptor. """ return response + def post_get_custom_target_type_with_metadata( + self, + response: cloud_deploy.CustomTargetType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.CustomTargetType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_custom_target_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_custom_target_type_with_metadata` + interceptor in new development instead of the `post_get_custom_target_type` interceptor. + When both interceptors are used, this `post_get_custom_target_type_with_metadata` interceptor runs after the + `post_get_custom_target_type` interceptor. The (possibly modified) response returned by + `post_get_custom_target_type` will be passed to + `post_get_custom_target_type_with_metadata`. + """ + return response, metadata + def pre_get_delivery_pipeline( self, request: cloud_deploy.GetDeliveryPipelineRequest, @@ -982,12 +1476,35 @@ def post_get_delivery_pipeline( ) -> cloud_deploy.DeliveryPipeline: """Post-rpc interceptor for get_delivery_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_delivery_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_delivery_pipeline` interceptor runs + before the `post_get_delivery_pipeline_with_metadata` interceptor. """ return response + def post_get_delivery_pipeline_with_metadata( + self, + response: cloud_deploy.DeliveryPipeline, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.DeliveryPipeline, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_delivery_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_delivery_pipeline_with_metadata` + interceptor in new development instead of the `post_get_delivery_pipeline` interceptor. + When both interceptors are used, this `post_get_delivery_pipeline_with_metadata` interceptor runs after the + `post_get_delivery_pipeline` interceptor. The (possibly modified) response returned by + `post_get_delivery_pipeline` will be passed to + `post_get_delivery_pipeline_with_metadata`. + """ + return response, metadata + def pre_get_deploy_policy( self, request: cloud_deploy.GetDeployPolicyRequest, @@ -1007,12 +1524,35 @@ def post_get_deploy_policy( ) -> cloud_deploy.DeployPolicy: """Post-rpc interceptor for get_deploy_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deploy_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_deploy_policy` interceptor runs + before the `post_get_deploy_policy_with_metadata` interceptor. """ return response + def post_get_deploy_policy_with_metadata( + self, + response: cloud_deploy.DeployPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.DeployPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deploy_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_deploy_policy_with_metadata` + interceptor in new development instead of the `post_get_deploy_policy` interceptor. + When both interceptors are used, this `post_get_deploy_policy_with_metadata` interceptor runs after the + `post_get_deploy_policy` interceptor. The (possibly modified) response returned by + `post_get_deploy_policy` will be passed to + `post_get_deploy_policy_with_metadata`. + """ + return response, metadata + def pre_get_job_run( self, request: cloud_deploy.GetJobRunRequest, @@ -1028,12 +1568,35 @@ def pre_get_job_run( def post_get_job_run(self, response: cloud_deploy.JobRun) -> cloud_deploy.JobRun: """Post-rpc interceptor for get_job_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_job_run` interceptor runs + before the `post_get_job_run_with_metadata` interceptor. """ return response + def post_get_job_run_with_metadata( + self, + response: cloud_deploy.JobRun, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.JobRun, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_job_run_with_metadata` + interceptor in new development instead of the `post_get_job_run` interceptor. + When both interceptors are used, this `post_get_job_run_with_metadata` interceptor runs after the + `post_get_job_run` interceptor. The (possibly modified) response returned by + `post_get_job_run` will be passed to + `post_get_job_run_with_metadata`. + """ + return response, metadata + def pre_get_release( self, request: cloud_deploy.GetReleaseRequest, @@ -1049,12 +1612,35 @@ def pre_get_release( def post_get_release(self, response: cloud_deploy.Release) -> cloud_deploy.Release: """Post-rpc interceptor for get_release - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_release_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_release` interceptor runs + before the `post_get_release_with_metadata` interceptor. """ return response + def post_get_release_with_metadata( + self, + response: cloud_deploy.Release, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.Release, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_release + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_release_with_metadata` + interceptor in new development instead of the `post_get_release` interceptor. + When both interceptors are used, this `post_get_release_with_metadata` interceptor runs after the + `post_get_release` interceptor. The (possibly modified) response returned by + `post_get_release` will be passed to + `post_get_release_with_metadata`. + """ + return response, metadata + def pre_get_rollout( self, request: cloud_deploy.GetRolloutRequest, @@ -1070,12 +1656,35 @@ def pre_get_rollout( def post_get_rollout(self, response: cloud_deploy.Rollout) -> cloud_deploy.Rollout: """Post-rpc interceptor for get_rollout - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_rollout_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_get_rollout` interceptor runs + before the `post_get_rollout_with_metadata` interceptor. """ return response + def post_get_rollout_with_metadata( + self, + response: cloud_deploy.Rollout, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.Rollout, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_rollout + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_rollout_with_metadata` + interceptor in new development instead of the `post_get_rollout` interceptor. + When both interceptors are used, this `post_get_rollout_with_metadata` interceptor runs after the + `post_get_rollout` interceptor. The (possibly modified) response returned by + `post_get_rollout` will be passed to + `post_get_rollout_with_metadata`. + """ + return response, metadata + def pre_get_target( self, request: cloud_deploy.GetTargetRequest, @@ -1091,11 +1700,34 @@ def pre_get_target( def post_get_target(self, response: cloud_deploy.Target) -> cloud_deploy.Target: """Post-rpc interceptor for get_target - Override in a subclass to manipulate the response - after it is returned by the CloudDeploy server but before - it is returned to user code. + DEPRECATED. Please use the `post_get_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudDeploy server but before + it is returned to user code. This `post_get_target` interceptor runs + before the `post_get_target_with_metadata` interceptor. + """ + return response + + def post_get_target_with_metadata( + self, + response: cloud_deploy.Target, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.Target, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_get_target_with_metadata` + interceptor in new development instead of the `post_get_target` interceptor. + When both interceptors are used, this `post_get_target_with_metadata` interceptor runs after the + `post_get_target` interceptor. The (possibly modified) response returned by + `post_get_target` will be passed to + `post_get_target_with_metadata`. """ - return response + return response, metadata def pre_ignore_job( self, @@ -1114,12 +1746,35 @@ def post_ignore_job( ) -> cloud_deploy.IgnoreJobResponse: """Post-rpc interceptor for ignore_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_ignore_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_ignore_job` interceptor runs + before the `post_ignore_job_with_metadata` interceptor. """ return response + def post_ignore_job_with_metadata( + self, + response: cloud_deploy.IgnoreJobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.IgnoreJobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for ignore_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_ignore_job_with_metadata` + interceptor in new development instead of the `post_ignore_job` interceptor. + When both interceptors are used, this `post_ignore_job_with_metadata` interceptor runs after the + `post_ignore_job` interceptor. The (possibly modified) response returned by + `post_ignore_job` will be passed to + `post_ignore_job_with_metadata`. + """ + return response, metadata + def pre_list_automation_runs( self, request: cloud_deploy.ListAutomationRunsRequest, @@ -1139,12 +1794,37 @@ def post_list_automation_runs( ) -> cloud_deploy.ListAutomationRunsResponse: """Post-rpc interceptor for list_automation_runs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_automation_runs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_automation_runs` interceptor runs + before the `post_list_automation_runs_with_metadata` interceptor. """ return response + def post_list_automation_runs_with_metadata( + self, + response: cloud_deploy.ListAutomationRunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListAutomationRunsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_automation_runs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_automation_runs_with_metadata` + interceptor in new development instead of the `post_list_automation_runs` interceptor. + When both interceptors are used, this `post_list_automation_runs_with_metadata` interceptor runs after the + `post_list_automation_runs` interceptor. The (possibly modified) response returned by + `post_list_automation_runs` will be passed to + `post_list_automation_runs_with_metadata`. + """ + return response, metadata + def pre_list_automations( self, request: cloud_deploy.ListAutomationsRequest, @@ -1164,12 +1844,37 @@ def post_list_automations( ) -> cloud_deploy.ListAutomationsResponse: """Post-rpc interceptor for list_automations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_automations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_automations` interceptor runs + before the `post_list_automations_with_metadata` interceptor. """ return response + def post_list_automations_with_metadata( + self, + response: cloud_deploy.ListAutomationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListAutomationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_automations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_automations_with_metadata` + interceptor in new development instead of the `post_list_automations` interceptor. + When both interceptors are used, this `post_list_automations_with_metadata` interceptor runs after the + `post_list_automations` interceptor. The (possibly modified) response returned by + `post_list_automations` will be passed to + `post_list_automations_with_metadata`. + """ + return response, metadata + def pre_list_custom_target_types( self, request: cloud_deploy.ListCustomTargetTypesRequest, @@ -1190,12 +1895,38 @@ def post_list_custom_target_types( ) -> cloud_deploy.ListCustomTargetTypesResponse: """Post-rpc interceptor for list_custom_target_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_target_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_target_types` interceptor runs + before the `post_list_custom_target_types_with_metadata` interceptor. """ return response + def post_list_custom_target_types_with_metadata( + self, + response: cloud_deploy.ListCustomTargetTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListCustomTargetTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_target_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_custom_target_types_with_metadata` + interceptor in new development instead of the `post_list_custom_target_types` interceptor. + When both interceptors are used, this `post_list_custom_target_types_with_metadata` interceptor runs after the + `post_list_custom_target_types` interceptor. The (possibly modified) response returned by + `post_list_custom_target_types` will be passed to + `post_list_custom_target_types_with_metadata`. + """ + return response, metadata + def pre_list_delivery_pipelines( self, request: cloud_deploy.ListDeliveryPipelinesRequest, @@ -1216,12 +1947,38 @@ def post_list_delivery_pipelines( ) -> cloud_deploy.ListDeliveryPipelinesResponse: """Post-rpc interceptor for list_delivery_pipelines - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_delivery_pipelines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_delivery_pipelines` interceptor runs + before the `post_list_delivery_pipelines_with_metadata` interceptor. """ return response + def post_list_delivery_pipelines_with_metadata( + self, + response: cloud_deploy.ListDeliveryPipelinesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListDeliveryPipelinesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_delivery_pipelines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_delivery_pipelines_with_metadata` + interceptor in new development instead of the `post_list_delivery_pipelines` interceptor. + When both interceptors are used, this `post_list_delivery_pipelines_with_metadata` interceptor runs after the + `post_list_delivery_pipelines` interceptor. The (possibly modified) response returned by + `post_list_delivery_pipelines` will be passed to + `post_list_delivery_pipelines_with_metadata`. + """ + return response, metadata + def pre_list_deploy_policies( self, request: cloud_deploy.ListDeployPoliciesRequest, @@ -1241,12 +1998,37 @@ def post_list_deploy_policies( ) -> cloud_deploy.ListDeployPoliciesResponse: """Post-rpc interceptor for list_deploy_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deploy_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_deploy_policies` interceptor runs + before the `post_list_deploy_policies_with_metadata` interceptor. """ return response + def post_list_deploy_policies_with_metadata( + self, + response: cloud_deploy.ListDeployPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListDeployPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deploy_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_deploy_policies_with_metadata` + interceptor in new development instead of the `post_list_deploy_policies` interceptor. + When both interceptors are used, this `post_list_deploy_policies_with_metadata` interceptor runs after the + `post_list_deploy_policies` interceptor. The (possibly modified) response returned by + `post_list_deploy_policies` will be passed to + `post_list_deploy_policies_with_metadata`. + """ + return response, metadata + def pre_list_job_runs( self, request: cloud_deploy.ListJobRunsRequest, @@ -1266,12 +2048,37 @@ def post_list_job_runs( ) -> cloud_deploy.ListJobRunsResponse: """Post-rpc interceptor for list_job_runs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_job_runs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_job_runs` interceptor runs + before the `post_list_job_runs_with_metadata` interceptor. """ return response + def post_list_job_runs_with_metadata( + self, + response: cloud_deploy.ListJobRunsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListJobRunsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_job_runs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_job_runs_with_metadata` + interceptor in new development instead of the `post_list_job_runs` interceptor. + When both interceptors are used, this `post_list_job_runs_with_metadata` interceptor runs after the + `post_list_job_runs` interceptor. The (possibly modified) response returned by + `post_list_job_runs` will be passed to + `post_list_job_runs_with_metadata`. + """ + return response, metadata + def pre_list_releases( self, request: cloud_deploy.ListReleasesRequest, @@ -1291,12 +2098,37 @@ def post_list_releases( ) -> cloud_deploy.ListReleasesResponse: """Post-rpc interceptor for list_releases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_releases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_releases` interceptor runs + before the `post_list_releases_with_metadata` interceptor. """ return response + def post_list_releases_with_metadata( + self, + response: cloud_deploy.ListReleasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListReleasesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_releases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_releases_with_metadata` + interceptor in new development instead of the `post_list_releases` interceptor. + When both interceptors are used, this `post_list_releases_with_metadata` interceptor runs after the + `post_list_releases` interceptor. The (possibly modified) response returned by + `post_list_releases` will be passed to + `post_list_releases_with_metadata`. + """ + return response, metadata + def pre_list_rollouts( self, request: cloud_deploy.ListRolloutsRequest, @@ -1316,12 +2148,37 @@ def post_list_rollouts( ) -> cloud_deploy.ListRolloutsResponse: """Post-rpc interceptor for list_rollouts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_rollouts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_rollouts` interceptor runs + before the `post_list_rollouts_with_metadata` interceptor. """ return response + def post_list_rollouts_with_metadata( + self, + response: cloud_deploy.ListRolloutsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListRolloutsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_rollouts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_rollouts_with_metadata` + interceptor in new development instead of the `post_list_rollouts` interceptor. + When both interceptors are used, this `post_list_rollouts_with_metadata` interceptor runs after the + `post_list_rollouts` interceptor. The (possibly modified) response returned by + `post_list_rollouts` will be passed to + `post_list_rollouts_with_metadata`. + """ + return response, metadata + def pre_list_targets( self, request: cloud_deploy.ListTargetsRequest, @@ -1341,12 +2198,37 @@ def post_list_targets( ) -> cloud_deploy.ListTargetsResponse: """Post-rpc interceptor for list_targets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_targets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_list_targets` interceptor runs + before the `post_list_targets_with_metadata` interceptor. """ return response + def post_list_targets_with_metadata( + self, + response: cloud_deploy.ListTargetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.ListTargetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_targets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_list_targets_with_metadata` + interceptor in new development instead of the `post_list_targets` interceptor. + When both interceptors are used, this `post_list_targets_with_metadata` interceptor runs after the + `post_list_targets` interceptor. The (possibly modified) response returned by + `post_list_targets` will be passed to + `post_list_targets_with_metadata`. + """ + return response, metadata + def pre_retry_job( self, request: cloud_deploy.RetryJobRequest, @@ -1364,12 +2246,35 @@ def post_retry_job( ) -> cloud_deploy.RetryJobResponse: """Post-rpc interceptor for retry_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retry_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_retry_job` interceptor runs + before the `post_retry_job_with_metadata` interceptor. """ return response + def post_retry_job_with_metadata( + self, + response: cloud_deploy.RetryJobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_deploy.RetryJobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for retry_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_retry_job_with_metadata` + interceptor in new development instead of the `post_retry_job` interceptor. + When both interceptors are used, this `post_retry_job_with_metadata` interceptor runs after the + `post_retry_job` interceptor. The (possibly modified) response returned by + `post_retry_job` will be passed to + `post_retry_job_with_metadata`. + """ + return response, metadata + def pre_rollback_target( self, request: cloud_deploy.RollbackTargetRequest, @@ -1389,12 +2294,37 @@ def post_rollback_target( ) -> cloud_deploy.RollbackTargetResponse: """Post-rpc interceptor for rollback_target - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_rollback_target` interceptor runs + before the `post_rollback_target_with_metadata` interceptor. """ return response + def post_rollback_target_with_metadata( + self, + response: cloud_deploy.RollbackTargetResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.RollbackTargetResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for rollback_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_rollback_target_with_metadata` + interceptor in new development instead of the `post_rollback_target` interceptor. + When both interceptors are used, this `post_rollback_target_with_metadata` interceptor runs after the + `post_rollback_target` interceptor. The (possibly modified) response returned by + `post_rollback_target` will be passed to + `post_rollback_target_with_metadata`. + """ + return response, metadata + def pre_terminate_job_run( self, request: cloud_deploy.TerminateJobRunRequest, @@ -1414,12 +2344,37 @@ def post_terminate_job_run( ) -> cloud_deploy.TerminateJobRunResponse: """Post-rpc interceptor for terminate_job_run - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_terminate_job_run_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_terminate_job_run` interceptor runs + before the `post_terminate_job_run_with_metadata` interceptor. """ return response + def post_terminate_job_run_with_metadata( + self, + response: cloud_deploy.TerminateJobRunResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_deploy.TerminateJobRunResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for terminate_job_run + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_terminate_job_run_with_metadata` + interceptor in new development instead of the `post_terminate_job_run` interceptor. + When both interceptors are used, this `post_terminate_job_run_with_metadata` interceptor runs after the + `post_terminate_job_run` interceptor. The (possibly modified) response returned by + `post_terminate_job_run` will be passed to + `post_terminate_job_run_with_metadata`. + """ + return response, metadata + def pre_update_automation( self, request: cloud_deploy.UpdateAutomationRequest, @@ -1439,12 +2394,35 @@ def post_update_automation( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_automation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_automation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_update_automation` interceptor runs + before the `post_update_automation_with_metadata` interceptor. """ return response + def post_update_automation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_automation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_update_automation_with_metadata` + interceptor in new development instead of the `post_update_automation` interceptor. + When both interceptors are used, this `post_update_automation_with_metadata` interceptor runs after the + `post_update_automation` interceptor. The (possibly modified) response returned by + `post_update_automation` will be passed to + `post_update_automation_with_metadata`. + """ + return response, metadata + def pre_update_custom_target_type( self, request: cloud_deploy.UpdateCustomTargetTypeRequest, @@ -1465,12 +2443,35 @@ def post_update_custom_target_type( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_custom_target_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_custom_target_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_update_custom_target_type` interceptor runs + before the `post_update_custom_target_type_with_metadata` interceptor. """ return response + def post_update_custom_target_type_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_custom_target_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_update_custom_target_type_with_metadata` + interceptor in new development instead of the `post_update_custom_target_type` interceptor. + When both interceptors are used, this `post_update_custom_target_type_with_metadata` interceptor runs after the + `post_update_custom_target_type` interceptor. The (possibly modified) response returned by + `post_update_custom_target_type` will be passed to + `post_update_custom_target_type_with_metadata`. + """ + return response, metadata + def pre_update_delivery_pipeline( self, request: cloud_deploy.UpdateDeliveryPipelineRequest, @@ -1491,12 +2492,35 @@ def post_update_delivery_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_delivery_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_delivery_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_update_delivery_pipeline` interceptor runs + before the `post_update_delivery_pipeline_with_metadata` interceptor. """ return response + def post_update_delivery_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_delivery_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_update_delivery_pipeline_with_metadata` + interceptor in new development instead of the `post_update_delivery_pipeline` interceptor. + When both interceptors are used, this `post_update_delivery_pipeline_with_metadata` interceptor runs after the + `post_update_delivery_pipeline` interceptor. The (possibly modified) response returned by + `post_update_delivery_pipeline` will be passed to + `post_update_delivery_pipeline_with_metadata`. + """ + return response, metadata + def pre_update_deploy_policy( self, request: cloud_deploy.UpdateDeployPolicyRequest, @@ -1516,12 +2540,35 @@ def post_update_deploy_policy( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_deploy_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_deploy_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_update_deploy_policy` interceptor runs + before the `post_update_deploy_policy_with_metadata` interceptor. """ return response + def post_update_deploy_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_deploy_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_update_deploy_policy_with_metadata` + interceptor in new development instead of the `post_update_deploy_policy` interceptor. + When both interceptors are used, this `post_update_deploy_policy_with_metadata` interceptor runs after the + `post_update_deploy_policy` interceptor. The (possibly modified) response returned by + `post_update_deploy_policy` will be passed to + `post_update_deploy_policy_with_metadata`. + """ + return response, metadata + def pre_update_target( self, request: cloud_deploy.UpdateTargetRequest, @@ -1541,12 +2588,35 @@ def post_update_target( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_target - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_target_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudDeploy server but before - it is returned to user code. + it is returned to user code. This `post_update_target` interceptor runs + before the `post_update_target_with_metadata` interceptor. """ return response + def post_update_target_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_target + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudDeploy server but before it is returned to user code. + + We recommend only using this `post_update_target_with_metadata` + interceptor in new development instead of the `post_update_target` interceptor. + When both interceptors are used, this `post_update_target_with_metadata` interceptor runs after the + `post_update_target` interceptor. The (possibly modified) response returned by + `post_update_target` will be passed to + `post_update_target_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -2031,6 +3101,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_abandon_release(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_abandon_release_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2179,6 +3253,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_advance_rollout(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_advance_rollout_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2327,6 +3405,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_approve_rollout(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_approve_rollout_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2477,6 +3559,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel_automation_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_automation_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2629,6 +3715,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_cancel_rollout(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_rollout_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2780,6 +3870,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_automation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_automation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2929,6 +4023,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_custom_target_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_custom_target_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3078,6 +4176,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_delivery_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_delivery_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3227,6 +4329,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_deploy_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_deploy_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3378,6 +4484,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_release(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_release_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3530,6 +4640,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_rollout(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_rollout_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3683,6 +4797,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3826,6 +4944,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_automation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_automation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3969,6 +5091,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_custom_target_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_custom_target_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4112,6 +5238,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_delivery_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_delivery_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4255,6 +5385,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_deploy_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_deploy_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4400,6 +5534,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4550,6 +5688,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_automation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_automation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4696,6 +5838,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_automation_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_automation_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4838,6 +5984,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4986,6 +6136,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_custom_target_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_custom_target_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5132,6 +6286,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_delivery_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_delivery_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5278,6 +6436,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deploy_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deploy_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5425,6 +6587,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5573,6 +6739,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_release(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_release_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5722,6 +6892,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_rollout(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_rollout_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5868,6 +7042,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6016,6 +7194,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_ignore_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_ignore_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6158,6 +7340,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_automation_runs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_automation_runs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6302,6 +7488,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_automations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_automations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6446,6 +7636,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_target_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_target_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6590,6 +7784,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_delivery_pipelines(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_delivery_pipelines_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6734,6 +7932,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deploy_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deploy_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6883,6 +8085,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_job_runs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_job_runs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7029,6 +8235,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_releases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_releases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7178,6 +8388,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_rollouts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_rollouts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7324,6 +8538,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_targets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_targets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7475,6 +8693,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retry_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retry_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7621,6 +8843,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rollback_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7771,6 +8997,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_terminate_job_run(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_terminate_job_run_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7922,6 +9152,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_automation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_automation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8071,6 +9305,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_custom_target_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_custom_target_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8220,6 +9458,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_delivery_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_delivery_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8369,6 +9611,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_deploy_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_deploy_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8522,6 +9768,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_target(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_target_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest_base.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest_base.py index b96921944346..2e257f4b8722 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest_base.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/services/cloud_deploy/transports/rest_base.py @@ -2509,6 +2509,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{resource=projects/*/locations/*/customTargetTypes/*}:getIamPolicy", }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/deployPolicies/*}:getIamPolicy", + }, ] return http_options @@ -2545,6 +2549,11 @@ def _get_http_options(): "uri": "/v1/{resource=projects/*/locations/*/customTargetTypes/*}:setIamPolicy", "body": "*", }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/deployPolicies/*}:setIamPolicy", + "body": "*", + }, ] return http_options diff --git a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py index 35942b148209..9684fec95468 100644 --- a/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py +++ b/packages/google-cloud-deploy/google/cloud/deploy_v1/types/cloud_deploy.py @@ -269,19 +269,19 @@ class DeliveryPipeline(proto.Message): Attributes: name (str): - Optional. Name of the ``DeliveryPipeline``. Format is + Identifier. Name of the ``DeliveryPipeline``. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}``. The ``deliveryPipeline`` component must match ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` uid (str): Output only. Unique identifier of the ``DeliveryPipeline``. description (str): - Description of the ``DeliveryPipeline``. Max length is 255 - characters. + Optional. Description of the ``DeliveryPipeline``. Max + length is 255 characters. annotations (MutableMapping[str, str]): - User annotations. These attributes can only - be set and used by the user, and not by Cloud - Deploy. + Optional. User annotations. These attributes + can only be set and used by the user, and not by + Cloud Deploy. labels (MutableMapping[str, str]): Labels are attributes that can be set and used by both the user and by Cloud Deploy. Labels must meet the following @@ -304,8 +304,8 @@ class DeliveryPipeline(proto.Message): Output only. Most recent time at which the pipeline was updated. serial_pipeline (google.cloud.deploy_v1.types.SerialPipeline): - SerialPipeline defines a sequential set of stages for a - ``DeliveryPipeline``. + Optional. SerialPipeline defines a sequential set of stages + for a ``DeliveryPipeline``. This field is a member of `oneof`_ ``pipeline``. condition (google.cloud.deploy_v1.types.PipelineCondition): @@ -317,9 +317,9 @@ class DeliveryPipeline(proto.Message): update and delete requests to ensure the client has an up-to-date value before proceeding. suspended (bool): - When suspended, no new releases or rollouts - can be created, but in-progress ones will - complete. + Optional. When suspended, no new releases or + rollouts can be created, but in-progress ones + will complete. """ name: str = proto.Field( @@ -381,8 +381,9 @@ class SerialPipeline(proto.Message): Attributes: stages (MutableSequence[google.cloud.deploy_v1.types.Stage]): - Each stage specifies configuration for a ``Target``. The - ordering of this list defines the promotion flow. + Optional. Each stage specifies configuration for a + ``Target``. The ordering of this list defines the promotion + flow. """ stages: MutableSequence["Stage"] = proto.RepeatedField( @@ -397,16 +398,17 @@ class Stage(proto.Message): Attributes: target_id (str): - The target_id to which this stage points. This field refers - exclusively to the last segment of a target name. For - example, this field would just be ``my-target`` (rather than + Optional. The target_id to which this stage points. This + field refers exclusively to the last segment of a target + name. For example, this field would just be ``my-target`` + (rather than ``projects/project/locations/location/targets/my-target``). The location of the ``Target`` is inferred to be the same as the location of the ``DeliveryPipeline`` that contains this ``Stage``. profiles (MutableSequence[str]): - Skaffold profiles to use when rendering the manifest for - this stage's ``Target``. + Optional. Skaffold profiles to use when rendering the + manifest for this stage's ``Target``. strategy (google.cloud.deploy_v1.types.Strategy): Optional. The strategy to use for a ``Rollout`` to this stage. @@ -473,13 +475,13 @@ class Strategy(proto.Message): Attributes: standard (google.cloud.deploy_v1.types.Standard): - Standard deployment strategy executes a - single deploy and allows verifying the - deployment. + Optional. Standard deployment strategy + executes a single deploy and allows verifying + the deployment. This field is a member of `oneof`_ ``deployment_strategy``. canary (google.cloud.deploy_v1.types.Canary): - Canary deployment strategy provides + Optional. Canary deployment strategy provides progressive percentage based deployments to a Target. @@ -539,7 +541,7 @@ class Standard(proto.Message): Attributes: verify (bool): - Whether to verify a deployment. + Optional. Whether to verify a deployment. predeploy (google.cloud.deploy_v1.types.Predeploy): Optional. Configuration for the predeploy job. If this is not configured, predeploy job @@ -584,15 +586,15 @@ class Canary(proto.Message): Deploy will split traffic to enable a progressive deployment. canary_deployment (google.cloud.deploy_v1.types.CanaryDeployment): - Configures the progressive based deployment - for a Target. + Optional. Configures the progressive based + deployment for a Target. This field is a member of `oneof`_ ``mode``. custom_canary_deployment (google.cloud.deploy_v1.types.CustomCanaryDeployment): - Configures the progressive based deployment - for a Target, but allows customizing at the - phase level where a phase represents each of the - percentage deployments. + Optional. Configures the progressive based + deployment for a Target, but allows customizing + at the phase level where a phase represents each + of the percentage deployments. This field is a member of `oneof`_ ``mode``. """ @@ -628,8 +630,8 @@ class CanaryDeployment(proto.Message): GatewayServiceMesh is configured for Kubernetes, then the range for n is 0 <= n <= 100. verify (bool): - Whether to run verify tests after each - percentage deployment. + Optional. Whether to run verify tests after + each percentage deployment. predeploy (google.cloud.deploy_v1.types.Predeploy): Optional. Configuration for the predeploy job of the first phase. If this is not configured, @@ -687,12 +689,12 @@ class PhaseConfig(proto.Message): Required. Percentage deployment for the phase. profiles (MutableSequence[str]): - Skaffold profiles to use when rendering the manifest for - this phase. These are in addition to the profiles list - specified in the ``DeliveryPipeline`` stage. + Optional. Skaffold profiles to use when rendering the + manifest for this phase. These are in addition to the + profiles list specified in the ``DeliveryPipeline`` stage. verify (bool): - Whether to run verify tests after the - deployment. + Optional. Whether to run verify tests after + the deployment. predeploy (google.cloud.deploy_v1.types.Predeploy): Optional. Configuration for the predeploy job of this phase. If this is not configured, there @@ -750,12 +752,13 @@ class KubernetesConfig(proto.Message): Attributes: gateway_service_mesh (google.cloud.deploy_v1.types.KubernetesConfig.GatewayServiceMesh): - Kubernetes Gateway API service mesh + Optional. Kubernetes Gateway API service mesh configuration. This field is a member of `oneof`_ ``service_definition``. service_networking (google.cloud.deploy_v1.types.KubernetesConfig.ServiceNetworking): - Kubernetes Service networking configuration. + Optional. Kubernetes Service networking + configuration. This field is a member of `oneof`_ ``service_definition``. """ @@ -926,8 +929,8 @@ class CloudRunConfig(proto.Message): Attributes: automatic_traffic_control (bool): - Whether Cloud Deploy should update the - traffic stanza in a Cloud Run Service on the + Optional. Whether Cloud Deploy should update + the traffic stanza in a Cloud Run Service on the user's behalf to facilitate traffic splitting. This is required to be true for CanaryDeployments, but optional for @@ -977,11 +980,11 @@ class RuntimeConfig(proto.Message): Attributes: kubernetes (google.cloud.deploy_v1.types.KubernetesConfig): - Kubernetes runtime configuration. + Optional. Kubernetes runtime configuration. This field is a member of `oneof`_ ``runtime_config``. cloud_run (google.cloud.deploy_v1.types.CloudRunConfig): - Cloud Run runtime configuration. + Optional. Cloud Run runtime configuration. This field is a member of `oneof`_ ``runtime_config``. """ @@ -1530,7 +1533,7 @@ class Target(proto.Message): Attributes: name (str): - Optional. Name of the ``Target``. Format is + Identifier. Name of the ``Target``. Format is ``projects/{project}/locations/{location}/targets/{target}``. The ``target`` component must match ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` @@ -1612,14 +1615,14 @@ class Target(proto.Message): ensure the client has an up-to-date value before proceeding. execution_configs (MutableSequence[google.cloud.deploy_v1.types.ExecutionConfig]): - Configurations for all execution that relates to this - ``Target``. Each ``ExecutionEnvironmentUsage`` value may - only be used in a single configuration; using the same value - multiple times is an error. When one or more configurations - are specified, they must include the ``RENDER`` and - ``DEPLOY`` ``ExecutionEnvironmentUsage`` values. When no - configurations are specified, execution will use the default - specified in ``DefaultPool``. + Optional. Configurations for all execution that relates to + this ``Target``. Each ``ExecutionEnvironmentUsage`` value + may only be used in a single configuration; using the same + value multiple times is an error. When one or more + configurations are specified, they must include the + ``RENDER`` and ``DEPLOY`` ``ExecutionEnvironmentUsage`` + values. When no configurations are specified, execution will + use the default specified in ``DefaultPool``. deploy_parameters (MutableMapping[str, str]): Optional. The deploy parameters to use for this target. @@ -1922,8 +1925,8 @@ class GkeCluster(proto.Message): to the Kubernetes server. dns_endpoint (bool): Optional. If set, the cluster will be accessed using the DNS - endpoint. Note that ``dns_endpoint`` and ``internal_ip`` - cannot both be set to true. + endpoint. Note that both ``dns_endpoint`` and + ``internal_ip`` cannot be set to true. """ cluster: str = proto.Field( @@ -2331,7 +2334,7 @@ class CustomTargetType(proto.Message): Attributes: name (str): - Optional. Name of the ``CustomTargetType``. Format is + Identifier. Name of the ``CustomTargetType``. Format is ``projects/{project}/locations/{location}/customTargetTypes/{customTargetType}``. The ``customTargetType`` component must match ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` @@ -2376,8 +2379,8 @@ class CustomTargetType(proto.Message): ensure the client has an up-to-date value before proceeding. custom_actions (google.cloud.deploy_v1.types.CustomTargetSkaffoldActions): - Configures render and deploy for the ``CustomTargetType`` - using Skaffold custom actions. + Optional. Configures render and deploy for the + ``CustomTargetType`` using Skaffold custom actions. This field is a member of `oneof`_ ``definition``. """ @@ -2478,18 +2481,18 @@ class SkaffoldModules(proto.Message): Optional. The Skaffold Config modules to use from the specified source. git (google.cloud.deploy_v1.types.SkaffoldModules.SkaffoldGitSource): - Remote git repository containing the Skaffold - Config modules. + Optional. Remote git repository containing + the Skaffold Config modules. This field is a member of `oneof`_ ``source``. google_cloud_storage (google.cloud.deploy_v1.types.SkaffoldModules.SkaffoldGCSSource): - Cloud Storage bucket containing the Skaffold - Config modules. + Optional. Cloud Storage bucket containing the + Skaffold Config modules. This field is a member of `oneof`_ ``source``. google_cloud_build_repo (google.cloud.deploy_v1.types.SkaffoldModules.SkaffoldGCBRepoSource): - Cloud Build V2 repository containing the - Skaffold Config modules. + Optional. Cloud Build V2 repository + containing the Skaffold Config modules. This field is a member of `oneof`_ ``source``. """ @@ -2906,12 +2909,12 @@ class DeployPolicy(proto.Message): uid (str): Output only. Unique identifier of the ``DeployPolicy``. description (str): - Description of the ``DeployPolicy``. Max length is 255 - characters. + Optional. Description of the ``DeployPolicy``. Max length is + 255 characters. annotations (MutableMapping[str, str]): - User annotations. These attributes can only be set and used - by the user, and not by Cloud Deploy. Annotations must meet - the following constraints: + Optional. User annotations. These attributes can only be set + and used by the user, and not by Cloud Deploy. Annotations + must meet the following constraints: - Annotations are key/value pairs. - Valid annotation keys have two segments: an optional @@ -2950,9 +2953,9 @@ class DeployPolicy(proto.Message): Output only. Most recent time at which the deploy policy was updated. suspended (bool): - When suspended, the policy will not prevent - actions from occurring, even if the action - violates the policy. + Optional. When suspended, the policy will not + prevent actions from occurring, even if the + action violates the policy. selectors (MutableSequence[google.cloud.deploy_v1.types.DeployPolicyResourceSelector]): Required. Selected resources to which the policy will be applied. At least one selector is @@ -3074,8 +3077,8 @@ class DeliveryPipelineAttribute(proto.Message): Attributes: id (str): - ID of the ``DeliveryPipeline``. The value of this field - could be one of the following: + Optional. ID of the ``DeliveryPipeline``. The value of this + field could be one of the following: - The last segment of a pipeline name - "*", all delivery pipelines in a location @@ -3100,8 +3103,8 @@ class TargetAttribute(proto.Message): Attributes: id (str): - ID of the ``Target``. The value of this field could be one - of the following: + Optional. ID of the ``Target``. The value of this field + could be one of the following: - The last segment of a target name - "*", all targets in a location @@ -3127,7 +3130,7 @@ class PolicyRule(proto.Message): Attributes: rollout_restriction (google.cloud.deploy_v1.types.RolloutRestriction): - Rollout restrictions. + Optional. Rollout restrictions. This field is a member of `oneof`_ ``rule``. """ @@ -3384,19 +3387,19 @@ class Release(proto.Message): Attributes: name (str): - Optional. Name of the ``Release``. Format is + Identifier. Name of the ``Release``. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}``. The ``release`` component must match ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` uid (str): Output only. Unique identifier of the ``Release``. description (str): - Description of the ``Release``. Max length is 255 + Optional. Description of the ``Release``. Max length is 255 characters. annotations (MutableMapping[str, str]): - User annotations. These attributes can only - be set and used by the user, and not by Cloud - Deploy. See + Optional. User annotations. These attributes + can only be set and used by the user, and not by + Cloud Deploy. See https://google.aip.dev/128#annotations for more details such as format and size limitations. labels (MutableMapping[str, str]): @@ -3425,14 +3428,14 @@ class Release(proto.Message): Output only. Time at which the render completed. skaffold_config_uri (str): - Cloud Storage URI of tar.gz archive + Optional. Cloud Storage URI of tar.gz archive containing Skaffold configuration. skaffold_config_path (str): - Filepath of the Skaffold config inside of the - config URI. + Optional. Filepath of the Skaffold config + inside of the config URI. build_artifacts (MutableSequence[google.cloud.deploy_v1.types.BuildArtifact]): - List of artifacts to pass through to Skaffold - command. + Optional. List of artifacts to pass through + to Skaffold command. delivery_pipeline_snapshot (google.cloud.deploy_v1.types.DeliveryPipeline): Output only. Snapshot of the parent pipeline taken at release creation time. @@ -4093,10 +4096,11 @@ class BuildArtifact(proto.Message): Attributes: image (str): - Image name in Skaffold configuration. + Optional. Image name in Skaffold + configuration. tag (str): - Image tag to use. This will generally be the - full path to an image, such as + Optional. Image tag to use. This will + generally be the full path to an image, such as "gcr.io/my-project/busybox:1.2.3" or "gcr.io/my-project/busybox@sha256:abc123". """ @@ -4427,19 +4431,19 @@ class Rollout(proto.Message): Attributes: name (str): - Optional. Name of the ``Rollout``. Format is + Identifier. Name of the ``Rollout``. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{release}/rollouts/{rollout}``. The ``rollout`` component must match ``[a-z]([a-z0-9-]{0,61}[a-z0-9])?`` uid (str): Output only. Unique identifier of the ``Rollout``. description (str): - Description of the ``Rollout`` for user purposes. Max length - is 255 characters. + Optional. Description of the ``Rollout`` for user purposes. + Max length is 255 characters. annotations (MutableMapping[str, str]): - User annotations. These attributes can only - be set and used by the user, and not by Cloud - Deploy. See + Optional. User annotations. These attributes + can only be set and used by the user, and not by + Cloud Deploy. See https://google.aip.dev/128#annotations for more details such as format and size limitations. labels (MutableMapping[str, str]): @@ -4977,20 +4981,25 @@ class DeploymentJobs(proto.Message): r"""Deployment job composition. Attributes: + predeploy_job (google.cloud.deploy_v1.types.Job): + Output only. The predeploy Job, which is the + first job on the phase. deploy_job (google.cloud.deploy_v1.types.Job): Output only. The deploy Job. This is the deploy job in the phase. verify_job (google.cloud.deploy_v1.types.Job): Output only. The verify Job. Runs after a deploy if the deploy succeeds. - predeploy_job (google.cloud.deploy_v1.types.Job): - Output only. The predeploy Job, which is the - first job on the phase. postdeploy_job (google.cloud.deploy_v1.types.Job): Output only. The postdeploy Job, which is the last job on the phase. """ + predeploy_job: "Job" = proto.Field( + proto.MESSAGE, + number=3, + message="Job", + ) deploy_job: "Job" = proto.Field( proto.MESSAGE, number=1, @@ -5001,11 +5010,6 @@ class DeploymentJobs(proto.Message): number=2, message="Job", ) - predeploy_job: "Job" = proto.Field( - proto.MESSAGE, - number=3, - message="Job", - ) postdeploy_job: "Job" = proto.Field( proto.MESSAGE, number=4, @@ -5659,7 +5663,7 @@ class JobRun(proto.Message): Attributes: name (str): - Optional. Name of the ``JobRun``. Format is + Output only. Name of the ``JobRun``. Format is ``projects/{project}/locations/{location}/deliveryPipelines/{deliveryPipeline}/releases/{releases}/rollouts/{rollouts}/jobRuns/{uuid}``. uid (str): Output only. Unique identifier of the ``JobRun``. @@ -6497,7 +6501,7 @@ class AutomationResourceSelector(proto.Message): Attributes: targets (MutableSequence[google.cloud.deploy_v1.types.TargetAttribute]): - Contains attributes about a target. + Optional. Contains attributes about a target. """ targets: MutableSequence["TargetAttribute"] = proto.RepeatedField( diff --git a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json index 8899f8655d94..c24aac9f2e10 100644 --- a/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json +++ b/packages/google-cloud-deploy/samples/generated_samples/snippet_metadata_google.cloud.deploy.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-deploy", - "version": "2.5.0" + "version": "2.6.0" }, "snippets": [ { diff --git a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py index e7c517531ce9..239c9163fa42 100644 --- a/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py +++ b/packages/google-cloud-deploy/tests/unit/gapic/deploy_v1/test_cloud_deploy.py @@ -81,6 +81,13 @@ ) from google.cloud.deploy_v1.types import cloud_deploy +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudDeployClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudDeployClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -30366,10 +30416,14 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_delivery_pipelines" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_list_delivery_pipelines_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_delivery_pipelines" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListDeliveryPipelinesRequest.pb( cloud_deploy.ListDeliveryPipelinesRequest() ) @@ -30395,6 +30449,10 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListDeliveryPipelinesResponse() + post_with_metadata.return_value = ( + cloud_deploy.ListDeliveryPipelinesResponse(), + metadata, + ) client.list_delivery_pipelines( request, @@ -30406,6 +30464,7 @@ def test_list_delivery_pipelines_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_delivery_pipeline_rest_bad_request( @@ -30502,10 +30561,14 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_delivery_pipeline" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_get_delivery_pipeline_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetDeliveryPipelineRequest.pb( cloud_deploy.GetDeliveryPipelineRequest() ) @@ -30531,6 +30594,7 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.DeliveryPipeline() + post_with_metadata.return_value = cloud_deploy.DeliveryPipeline(), metadata client.get_delivery_pipeline( request, @@ -30542,6 +30606,7 @@ def test_get_delivery_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_delivery_pipeline_rest_bad_request( @@ -30800,10 +30865,14 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_delivery_pipeline" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_create_delivery_pipeline_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateDeliveryPipelineRequest.pb( cloud_deploy.CreateDeliveryPipelineRequest() ) @@ -30827,6 +30896,7 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_delivery_pipeline( request, @@ -30838,6 +30908,7 @@ def test_create_delivery_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_delivery_pipeline_rest_bad_request( @@ -31104,10 +31175,14 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_update_delivery_pipeline" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_update_delivery_pipeline_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_update_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.UpdateDeliveryPipelineRequest.pb( cloud_deploy.UpdateDeliveryPipelineRequest() ) @@ -31131,6 +31206,7 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_delivery_pipeline( request, @@ -31142,6 +31218,7 @@ def test_update_delivery_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_delivery_pipeline_rest_bad_request( @@ -31226,10 +31303,14 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_delete_delivery_pipeline" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_delete_delivery_pipeline_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_delete_delivery_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.DeleteDeliveryPipelineRequest.pb( cloud_deploy.DeleteDeliveryPipelineRequest() ) @@ -31253,6 +31334,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_delivery_pipeline( request, @@ -31264,6 +31346,7 @@ def test_delete_delivery_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_targets_rest_bad_request(request_type=cloud_deploy.ListTargetsRequest): @@ -31348,10 +31431,13 @@ def test_list_targets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_targets" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_targets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_targets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListTargetsRequest.pb( cloud_deploy.ListTargetsRequest() ) @@ -31377,6 +31463,7 @@ def test_list_targets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListTargetsResponse() + post_with_metadata.return_value = cloud_deploy.ListTargetsResponse(), metadata client.list_targets( request, @@ -31388,6 +31475,7 @@ def test_list_targets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_target_rest_bad_request( @@ -31473,10 +31561,13 @@ def test_rollback_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_rollback_target" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_rollback_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_rollback_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.RollbackTargetRequest.pb( cloud_deploy.RollbackTargetRequest() ) @@ -31502,6 +31593,10 @@ def test_rollback_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.RollbackTargetResponse() + post_with_metadata.return_value = ( + cloud_deploy.RollbackTargetResponse(), + metadata, + ) client.rollback_target( request, @@ -31513,6 +31608,7 @@ def test_rollback_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_target_rest_bad_request(request_type=cloud_deploy.GetTargetRequest): @@ -31605,10 +31701,13 @@ def test_get_target_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_target" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetTargetRequest.pb(cloud_deploy.GetTargetRequest()) transcode.return_value = { "method": "post", @@ -31630,6 +31729,7 @@ def test_get_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.Target() + post_with_metadata.return_value = cloud_deploy.Target(), metadata client.get_target( request, @@ -31641,6 +31741,7 @@ def test_get_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_target_rest_bad_request(request_type=cloud_deploy.CreateTargetRequest): @@ -31829,10 +31930,13 @@ def test_create_target_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_target" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateTargetRequest.pb( cloud_deploy.CreateTargetRequest() ) @@ -31856,6 +31960,7 @@ def test_create_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_target( request, @@ -31867,6 +31972,7 @@ def test_create_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_target_rest_bad_request(request_type=cloud_deploy.UpdateTargetRequest): @@ -32059,10 +32165,13 @@ def test_update_target_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_update_target" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_update_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.UpdateTargetRequest.pb( cloud_deploy.UpdateTargetRequest() ) @@ -32086,6 +32195,7 @@ def test_update_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_target( request, @@ -32097,6 +32207,7 @@ def test_update_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_target_rest_bad_request(request_type=cloud_deploy.DeleteTargetRequest): @@ -32175,10 +32286,13 @@ def test_delete_target_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_delete_target" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_target_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_delete_target" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.DeleteTargetRequest.pb( cloud_deploy.DeleteTargetRequest() ) @@ -32202,6 +32316,7 @@ def test_delete_target_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_target( request, @@ -32213,6 +32328,7 @@ def test_delete_target_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_target_types_rest_bad_request( @@ -32299,10 +32415,14 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_custom_target_types" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_list_custom_target_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_custom_target_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListCustomTargetTypesRequest.pb( cloud_deploy.ListCustomTargetTypesRequest() ) @@ -32328,6 +32448,10 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListCustomTargetTypesResponse() + post_with_metadata.return_value = ( + cloud_deploy.ListCustomTargetTypesResponse(), + metadata, + ) client.list_custom_target_types( request, @@ -32339,6 +32463,7 @@ def test_list_custom_target_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_custom_target_type_rest_bad_request( @@ -32435,10 +32560,14 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_custom_target_type" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_get_custom_target_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetCustomTargetTypeRequest.pb( cloud_deploy.GetCustomTargetTypeRequest() ) @@ -32464,6 +32593,7 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.CustomTargetType() + post_with_metadata.return_value = cloud_deploy.CustomTargetType(), metadata client.get_custom_target_type( request, @@ -32475,6 +32605,7 @@ def test_get_custom_target_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_custom_target_type_rest_bad_request( @@ -32658,10 +32789,14 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_custom_target_type" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_create_custom_target_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateCustomTargetTypeRequest.pb( cloud_deploy.CreateCustomTargetTypeRequest() ) @@ -32685,6 +32820,7 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_custom_target_type( request, @@ -32696,6 +32832,7 @@ def test_create_custom_target_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_custom_target_type_rest_bad_request( @@ -32887,10 +33024,14 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_update_custom_target_type" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_update_custom_target_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_update_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.UpdateCustomTargetTypeRequest.pb( cloud_deploy.UpdateCustomTargetTypeRequest() ) @@ -32914,6 +33055,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_custom_target_type( request, @@ -32925,6 +33067,7 @@ def test_update_custom_target_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_custom_target_type_rest_bad_request( @@ -33009,10 +33152,14 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_delete_custom_target_type" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_delete_custom_target_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_delete_custom_target_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.DeleteCustomTargetTypeRequest.pb( cloud_deploy.DeleteCustomTargetTypeRequest() ) @@ -33036,6 +33183,7 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_custom_target_type( request, @@ -33047,6 +33195,7 @@ def test_delete_custom_target_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_releases_rest_bad_request(request_type=cloud_deploy.ListReleasesRequest): @@ -33135,10 +33284,13 @@ def test_list_releases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_releases" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_releases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_releases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListReleasesRequest.pb( cloud_deploy.ListReleasesRequest() ) @@ -33164,6 +33316,7 @@ def test_list_releases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListReleasesResponse() + post_with_metadata.return_value = cloud_deploy.ListReleasesResponse(), metadata client.list_releases( request, @@ -33175,6 +33328,7 @@ def test_list_releases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_release_rest_bad_request(request_type=cloud_deploy.GetReleaseRequest): @@ -33277,10 +33431,13 @@ def test_get_release_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_release" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_release_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_release" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetReleaseRequest.pb(cloud_deploy.GetReleaseRequest()) transcode.return_value = { "method": "post", @@ -33302,6 +33459,7 @@ def test_get_release_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.Release() + post_with_metadata.return_value = cloud_deploy.Release(), metadata client.get_release( request, @@ -33313,6 +33471,7 @@ def test_get_release_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_release_rest_bad_request( @@ -33690,10 +33849,13 @@ def test_create_release_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_release" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_release_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_release" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateReleaseRequest.pb( cloud_deploy.CreateReleaseRequest() ) @@ -33717,6 +33879,7 @@ def test_create_release_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_release( request, @@ -33728,6 +33891,7 @@ def test_create_release_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_abandon_release_rest_bad_request( @@ -33813,10 +33977,13 @@ def test_abandon_release_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_abandon_release" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_abandon_release_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_abandon_release" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.AbandonReleaseRequest.pb( cloud_deploy.AbandonReleaseRequest() ) @@ -33842,6 +34009,10 @@ def test_abandon_release_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.AbandonReleaseResponse() + post_with_metadata.return_value = ( + cloud_deploy.AbandonReleaseResponse(), + metadata, + ) client.abandon_release( request, @@ -33853,6 +34024,7 @@ def test_abandon_release_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_deploy_policy_rest_bad_request( @@ -34045,10 +34217,13 @@ def test_create_deploy_policy_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_deploy_policy" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_deploy_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateDeployPolicyRequest.pb( cloud_deploy.CreateDeployPolicyRequest() ) @@ -34072,6 +34247,7 @@ def test_create_deploy_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_deploy_policy( request, @@ -34083,6 +34259,7 @@ def test_create_deploy_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_deploy_policy_rest_bad_request( @@ -34283,10 +34460,13 @@ def test_update_deploy_policy_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_update_deploy_policy" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_deploy_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_update_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.UpdateDeployPolicyRequest.pb( cloud_deploy.UpdateDeployPolicyRequest() ) @@ -34310,6 +34490,7 @@ def test_update_deploy_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_deploy_policy( request, @@ -34321,6 +34502,7 @@ def test_update_deploy_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_deploy_policy_rest_bad_request( @@ -34401,10 +34583,13 @@ def test_delete_deploy_policy_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_delete_deploy_policy" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_deploy_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_delete_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.DeleteDeployPolicyRequest.pb( cloud_deploy.DeleteDeployPolicyRequest() ) @@ -34428,6 +34613,7 @@ def test_delete_deploy_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_deploy_policy( request, @@ -34439,6 +34625,7 @@ def test_delete_deploy_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_deploy_policies_rest_bad_request( @@ -34525,10 +34712,13 @@ def test_list_deploy_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_deploy_policies" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_deploy_policies_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_deploy_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListDeployPoliciesRequest.pb( cloud_deploy.ListDeployPoliciesRequest() ) @@ -34554,6 +34744,10 @@ def test_list_deploy_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListDeployPoliciesResponse() + post_with_metadata.return_value = ( + cloud_deploy.ListDeployPoliciesResponse(), + metadata, + ) client.list_deploy_policies( request, @@ -34565,6 +34759,7 @@ def test_list_deploy_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deploy_policy_rest_bad_request( @@ -34657,10 +34852,13 @@ def test_get_deploy_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_deploy_policy" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_deploy_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_deploy_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetDeployPolicyRequest.pb( cloud_deploy.GetDeployPolicyRequest() ) @@ -34684,6 +34882,7 @@ def test_get_deploy_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.DeployPolicy() + post_with_metadata.return_value = cloud_deploy.DeployPolicy(), metadata client.get_deploy_policy( request, @@ -34695,6 +34894,7 @@ def test_get_deploy_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_approve_rollout_rest_bad_request( @@ -34780,10 +34980,13 @@ def test_approve_rollout_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_approve_rollout" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_approve_rollout_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_approve_rollout" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ApproveRolloutRequest.pb( cloud_deploy.ApproveRolloutRequest() ) @@ -34809,6 +35012,10 @@ def test_approve_rollout_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ApproveRolloutResponse() + post_with_metadata.return_value = ( + cloud_deploy.ApproveRolloutResponse(), + metadata, + ) client.approve_rollout( request, @@ -34820,6 +35027,7 @@ def test_approve_rollout_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_advance_rollout_rest_bad_request( @@ -34905,10 +35113,13 @@ def test_advance_rollout_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_advance_rollout" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_advance_rollout_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_advance_rollout" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.AdvanceRolloutRequest.pb( cloud_deploy.AdvanceRolloutRequest() ) @@ -34934,6 +35145,10 @@ def test_advance_rollout_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.AdvanceRolloutResponse() + post_with_metadata.return_value = ( + cloud_deploy.AdvanceRolloutResponse(), + metadata, + ) client.advance_rollout( request, @@ -34945,6 +35160,7 @@ def test_advance_rollout_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_rollout_rest_bad_request( @@ -35030,10 +35246,13 @@ def test_cancel_rollout_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_cancel_rollout" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_cancel_rollout_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_cancel_rollout" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CancelRolloutRequest.pb( cloud_deploy.CancelRolloutRequest() ) @@ -35059,6 +35278,7 @@ def test_cancel_rollout_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.CancelRolloutResponse() + post_with_metadata.return_value = cloud_deploy.CancelRolloutResponse(), metadata client.cancel_rollout( request, @@ -35070,6 +35290,7 @@ def test_cancel_rollout_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_rollouts_rest_bad_request(request_type=cloud_deploy.ListRolloutsRequest): @@ -35158,10 +35379,13 @@ def test_list_rollouts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_rollouts" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_rollouts_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_rollouts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListRolloutsRequest.pb( cloud_deploy.ListRolloutsRequest() ) @@ -35187,6 +35411,7 @@ def test_list_rollouts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListRolloutsResponse() + post_with_metadata.return_value = cloud_deploy.ListRolloutsResponse(), metadata client.list_rollouts( request, @@ -35198,6 +35423,7 @@ def test_list_rollouts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_rollout_rest_bad_request(request_type=cloud_deploy.GetRolloutRequest): @@ -35313,10 +35539,13 @@ def test_get_rollout_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_rollout" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_rollout_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_rollout" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetRolloutRequest.pb(cloud_deploy.GetRolloutRequest()) transcode.return_value = { "method": "post", @@ -35338,6 +35567,7 @@ def test_get_rollout_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.Rollout() + post_with_metadata.return_value = cloud_deploy.Rollout(), metadata client.get_rollout( request, @@ -35349,6 +35579,7 @@ def test_get_rollout_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_rollout_rest_bad_request( @@ -35418,7 +35649,7 @@ def test_create_rollout_rest_call_success(request_type): "state": 1, "skip_message": "skip_message_value", "deployment_jobs": { - "deploy_job": { + "predeploy_job": { "id": "id_value", "state": 1, "skip_message": "skip_message_value", @@ -35434,8 +35665,8 @@ def test_create_rollout_rest_call_success(request_type): "create_child_rollout_job": {}, "advance_child_rollout_job": {}, }, + "deploy_job": {}, "verify_job": {}, - "predeploy_job": {}, "postdeploy_job": {}, }, "child_rollout_jobs": { @@ -35578,10 +35809,13 @@ def test_create_rollout_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_rollout" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_rollout_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_rollout" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateRolloutRequest.pb( cloud_deploy.CreateRolloutRequest() ) @@ -35605,6 +35839,7 @@ def test_create_rollout_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_rollout( request, @@ -35616,6 +35851,7 @@ def test_create_rollout_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_ignore_job_rest_bad_request(request_type=cloud_deploy.IgnoreJobRequest): @@ -35699,10 +35935,13 @@ def test_ignore_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_ignore_job" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_ignore_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_ignore_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.IgnoreJobRequest.pb(cloud_deploy.IgnoreJobRequest()) transcode.return_value = { "method": "post", @@ -35726,6 +35965,7 @@ def test_ignore_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.IgnoreJobResponse() + post_with_metadata.return_value = cloud_deploy.IgnoreJobResponse(), metadata client.ignore_job( request, @@ -35737,6 +35977,7 @@ def test_ignore_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retry_job_rest_bad_request(request_type=cloud_deploy.RetryJobRequest): @@ -35820,10 +36061,13 @@ def test_retry_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_retry_job" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_retry_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_retry_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.RetryJobRequest.pb(cloud_deploy.RetryJobRequest()) transcode.return_value = { "method": "post", @@ -35847,6 +36091,7 @@ def test_retry_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.RetryJobResponse() + post_with_metadata.return_value = cloud_deploy.RetryJobResponse(), metadata client.retry_job( request, @@ -35858,6 +36103,7 @@ def test_retry_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_job_runs_rest_bad_request(request_type=cloud_deploy.ListJobRunsRequest): @@ -35946,10 +36192,13 @@ def test_list_job_runs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_job_runs" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_job_runs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_job_runs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListJobRunsRequest.pb( cloud_deploy.ListJobRunsRequest() ) @@ -35975,6 +36224,7 @@ def test_list_job_runs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListJobRunsResponse() + post_with_metadata.return_value = cloud_deploy.ListJobRunsResponse(), metadata client.list_job_runs( request, @@ -35986,6 +36236,7 @@ def test_list_job_runs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_run_rest_bad_request(request_type=cloud_deploy.GetJobRunRequest): @@ -36082,10 +36333,13 @@ def test_get_job_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_job_run" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_job_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_job_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetJobRunRequest.pb(cloud_deploy.GetJobRunRequest()) transcode.return_value = { "method": "post", @@ -36107,6 +36361,7 @@ def test_get_job_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.JobRun() + post_with_metadata.return_value = cloud_deploy.JobRun(), metadata client.get_job_run( request, @@ -36118,6 +36373,7 @@ def test_get_job_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_terminate_job_run_rest_bad_request( @@ -36203,10 +36459,13 @@ def test_terminate_job_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_terminate_job_run" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_terminate_job_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_terminate_job_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.TerminateJobRunRequest.pb( cloud_deploy.TerminateJobRunRequest() ) @@ -36232,6 +36491,10 @@ def test_terminate_job_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.TerminateJobRunResponse() + post_with_metadata.return_value = ( + cloud_deploy.TerminateJobRunResponse(), + metadata, + ) client.terminate_job_run( request, @@ -36243,6 +36506,7 @@ def test_terminate_job_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_config_rest_bad_request(request_type=cloud_deploy.GetConfigRequest): @@ -36327,10 +36591,13 @@ def test_get_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_config" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetConfigRequest.pb(cloud_deploy.GetConfigRequest()) transcode.return_value = { "method": "post", @@ -36352,6 +36619,7 @@ def test_get_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.Config() + post_with_metadata.return_value = cloud_deploy.Config(), metadata client.get_config( request, @@ -36363,6 +36631,7 @@ def test_get_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_automation_rest_bad_request( @@ -36585,10 +36854,13 @@ def test_create_automation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_create_automation" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_create_automation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_create_automation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CreateAutomationRequest.pb( cloud_deploy.CreateAutomationRequest() ) @@ -36612,6 +36884,7 @@ def test_create_automation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_automation( request, @@ -36623,6 +36896,7 @@ def test_create_automation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_automation_rest_bad_request( @@ -36849,10 +37123,13 @@ def test_update_automation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_update_automation" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_update_automation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_update_automation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.UpdateAutomationRequest.pb( cloud_deploy.UpdateAutomationRequest() ) @@ -36876,6 +37153,7 @@ def test_update_automation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_automation( request, @@ -36887,6 +37165,7 @@ def test_update_automation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_automation_rest_bad_request( @@ -36971,10 +37250,13 @@ def test_delete_automation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudDeployRestInterceptor, "post_delete_automation" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_delete_automation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_delete_automation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.DeleteAutomationRequest.pb( cloud_deploy.DeleteAutomationRequest() ) @@ -36998,6 +37280,7 @@ def test_delete_automation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_automation( request, @@ -37009,6 +37292,7 @@ def test_delete_automation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_automation_rest_bad_request( @@ -37107,10 +37391,13 @@ def test_get_automation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_automation" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_automation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_automation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetAutomationRequest.pb( cloud_deploy.GetAutomationRequest() ) @@ -37134,6 +37421,7 @@ def test_get_automation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.Automation() + post_with_metadata.return_value = cloud_deploy.Automation(), metadata client.get_automation( request, @@ -37145,6 +37433,7 @@ def test_get_automation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_automations_rest_bad_request( @@ -37235,10 +37524,13 @@ def test_list_automations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_automations" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_automations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_automations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListAutomationsRequest.pb( cloud_deploy.ListAutomationsRequest() ) @@ -37264,6 +37556,10 @@ def test_list_automations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListAutomationsResponse() + post_with_metadata.return_value = ( + cloud_deploy.ListAutomationsResponse(), + metadata, + ) client.list_automations( request, @@ -37275,6 +37571,7 @@ def test_list_automations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_automation_run_rest_bad_request( @@ -37377,10 +37674,13 @@ def test_get_automation_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_get_automation_run" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_get_automation_run_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_get_automation_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.GetAutomationRunRequest.pb( cloud_deploy.GetAutomationRunRequest() ) @@ -37404,6 +37704,7 @@ def test_get_automation_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.AutomationRun() + post_with_metadata.return_value = cloud_deploy.AutomationRun(), metadata client.get_automation_run( request, @@ -37415,6 +37716,7 @@ def test_get_automation_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_automation_runs_rest_bad_request( @@ -37505,10 +37807,13 @@ def test_list_automation_runs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_list_automation_runs" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, "post_list_automation_runs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_list_automation_runs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.ListAutomationRunsRequest.pb( cloud_deploy.ListAutomationRunsRequest() ) @@ -37534,6 +37839,10 @@ def test_list_automation_runs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.ListAutomationRunsResponse() + post_with_metadata.return_value = ( + cloud_deploy.ListAutomationRunsResponse(), + metadata, + ) client.list_automation_runs( request, @@ -37545,6 +37854,7 @@ def test_list_automation_runs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_automation_run_rest_bad_request( @@ -37630,10 +37940,14 @@ def test_cancel_automation_run_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudDeployRestInterceptor, "post_cancel_automation_run" ) as post, mock.patch.object( + transports.CloudDeployRestInterceptor, + "post_cancel_automation_run_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudDeployRestInterceptor, "pre_cancel_automation_run" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_deploy.CancelAutomationRunRequest.pb( cloud_deploy.CancelAutomationRunRequest() ) @@ -37659,6 +37973,10 @@ def test_cancel_automation_run_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_deploy.CancelAutomationRunResponse() + post_with_metadata.return_value = ( + cloud_deploy.CancelAutomationRunResponse(), + metadata, + ) client.cancel_automation_run( request, @@ -37670,6 +37988,7 @@ def test_cancel_automation_run_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-developerconnect/CHANGELOG.md b/packages/google-cloud-developerconnect/CHANGELOG.md index c1c9e0b6fa8d..fc31bd60e324 100644 --- a/packages/google-cloud-developerconnect/CHANGELOG.md +++ b/packages/google-cloud-developerconnect/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-developerconnect-v0.1.6...google-cloud-developerconnect-v0.1.7) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-developerconnect-v0.1.5...google-cloud-developerconnect-v0.1.6) (2024-12-12) diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py index 51d2795b9d6b..cf5493b86bbc 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.1.7" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py index 51d2795b9d6b..cf5493b86bbc 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.1.7" # {x-release-please-version} diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py index 134b9ac4e341..0cab9cbed5db 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -588,6 +590,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2580,16 +2609,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2635,16 +2668,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2801,16 +2838,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2856,16 +2897,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py index 4dec0bdacd8c..b632621bb8d5 100644 --- a/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py +++ b/packages/google-cloud-developerconnect/google/cloud/developerconnect_v1/services/developer_connect/transports/rest.py @@ -208,12 +208,35 @@ def post_create_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_create_connection` interceptor runs + before the `post_create_connection_with_metadata` interceptor. """ return response + def post_create_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_create_connection_with_metadata` + interceptor in new development instead of the `post_create_connection` interceptor. + When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the + `post_create_connection` interceptor. The (possibly modified) response returned by + `post_create_connection` will be passed to + `post_create_connection_with_metadata`. + """ + return response, metadata + def pre_create_git_repository_link( self, request: developer_connect.CreateGitRepositoryLinkRequest, @@ -234,12 +257,35 @@ def post_create_git_repository_link( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_git_repository_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_git_repository_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_create_git_repository_link` interceptor runs + before the `post_create_git_repository_link_with_metadata` interceptor. """ return response + def post_create_git_repository_link_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_git_repository_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_create_git_repository_link_with_metadata` + interceptor in new development instead of the `post_create_git_repository_link` interceptor. + When both interceptors are used, this `post_create_git_repository_link_with_metadata` interceptor runs after the + `post_create_git_repository_link` interceptor. The (possibly modified) response returned by + `post_create_git_repository_link` will be passed to + `post_create_git_repository_link_with_metadata`. + """ + return response, metadata + def pre_delete_connection( self, request: developer_connect.DeleteConnectionRequest, @@ -260,12 +306,35 @@ def post_delete_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_delete_connection` interceptor runs + before the `post_delete_connection_with_metadata` interceptor. """ return response + def post_delete_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_delete_connection_with_metadata` + interceptor in new development instead of the `post_delete_connection` interceptor. + When both interceptors are used, this `post_delete_connection_with_metadata` interceptor runs after the + `post_delete_connection` interceptor. The (possibly modified) response returned by + `post_delete_connection` will be passed to + `post_delete_connection_with_metadata`. + """ + return response, metadata + def pre_delete_git_repository_link( self, request: developer_connect.DeleteGitRepositoryLinkRequest, @@ -286,12 +355,35 @@ def post_delete_git_repository_link( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_git_repository_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_git_repository_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_delete_git_repository_link` interceptor runs + before the `post_delete_git_repository_link_with_metadata` interceptor. """ return response + def post_delete_git_repository_link_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_git_repository_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_delete_git_repository_link_with_metadata` + interceptor in new development instead of the `post_delete_git_repository_link` interceptor. + When both interceptors are used, this `post_delete_git_repository_link_with_metadata` interceptor runs after the + `post_delete_git_repository_link` interceptor. The (possibly modified) response returned by + `post_delete_git_repository_link` will be passed to + `post_delete_git_repository_link_with_metadata`. + """ + return response, metadata + def pre_fetch_git_hub_installations( self, request: developer_connect.FetchGitHubInstallationsRequest, @@ -312,12 +404,38 @@ def post_fetch_git_hub_installations( ) -> developer_connect.FetchGitHubInstallationsResponse: """Post-rpc interceptor for fetch_git_hub_installations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_git_hub_installations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_fetch_git_hub_installations` interceptor runs + before the `post_fetch_git_hub_installations_with_metadata` interceptor. """ return response + def post_fetch_git_hub_installations_with_metadata( + self, + response: developer_connect.FetchGitHubInstallationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.FetchGitHubInstallationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_git_hub_installations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_fetch_git_hub_installations_with_metadata` + interceptor in new development instead of the `post_fetch_git_hub_installations` interceptor. + When both interceptors are used, this `post_fetch_git_hub_installations_with_metadata` interceptor runs after the + `post_fetch_git_hub_installations` interceptor. The (possibly modified) response returned by + `post_fetch_git_hub_installations` will be passed to + `post_fetch_git_hub_installations_with_metadata`. + """ + return response, metadata + def pre_fetch_git_refs( self, request: developer_connect.FetchGitRefsRequest, @@ -337,12 +455,37 @@ def post_fetch_git_refs( ) -> developer_connect.FetchGitRefsResponse: """Post-rpc interceptor for fetch_git_refs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_git_refs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_fetch_git_refs` interceptor runs + before the `post_fetch_git_refs_with_metadata` interceptor. """ return response + def post_fetch_git_refs_with_metadata( + self, + response: developer_connect.FetchGitRefsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.FetchGitRefsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_git_refs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_fetch_git_refs_with_metadata` + interceptor in new development instead of the `post_fetch_git_refs` interceptor. + When both interceptors are used, this `post_fetch_git_refs_with_metadata` interceptor runs after the + `post_fetch_git_refs` interceptor. The (possibly modified) response returned by + `post_fetch_git_refs` will be passed to + `post_fetch_git_refs_with_metadata`. + """ + return response, metadata + def pre_fetch_linkable_git_repositories( self, request: developer_connect.FetchLinkableGitRepositoriesRequest, @@ -363,12 +506,38 @@ def post_fetch_linkable_git_repositories( ) -> developer_connect.FetchLinkableGitRepositoriesResponse: """Post-rpc interceptor for fetch_linkable_git_repositories - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_linkable_git_repositories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_fetch_linkable_git_repositories` interceptor runs + before the `post_fetch_linkable_git_repositories_with_metadata` interceptor. """ return response + def post_fetch_linkable_git_repositories_with_metadata( + self, + response: developer_connect.FetchLinkableGitRepositoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.FetchLinkableGitRepositoriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_linkable_git_repositories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_fetch_linkable_git_repositories_with_metadata` + interceptor in new development instead of the `post_fetch_linkable_git_repositories` interceptor. + When both interceptors are used, this `post_fetch_linkable_git_repositories_with_metadata` interceptor runs after the + `post_fetch_linkable_git_repositories` interceptor. The (possibly modified) response returned by + `post_fetch_linkable_git_repositories` will be passed to + `post_fetch_linkable_git_repositories_with_metadata`. + """ + return response, metadata + def pre_fetch_read_token( self, request: developer_connect.FetchReadTokenRequest, @@ -388,12 +557,38 @@ def post_fetch_read_token( ) -> developer_connect.FetchReadTokenResponse: """Post-rpc interceptor for fetch_read_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_read_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_fetch_read_token` interceptor runs + before the `post_fetch_read_token_with_metadata` interceptor. """ return response + def post_fetch_read_token_with_metadata( + self, + response: developer_connect.FetchReadTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.FetchReadTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_read_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_fetch_read_token_with_metadata` + interceptor in new development instead of the `post_fetch_read_token` interceptor. + When both interceptors are used, this `post_fetch_read_token_with_metadata` interceptor runs after the + `post_fetch_read_token` interceptor. The (possibly modified) response returned by + `post_fetch_read_token` will be passed to + `post_fetch_read_token_with_metadata`. + """ + return response, metadata + def pre_fetch_read_write_token( self, request: developer_connect.FetchReadWriteTokenRequest, @@ -414,12 +609,38 @@ def post_fetch_read_write_token( ) -> developer_connect.FetchReadWriteTokenResponse: """Post-rpc interceptor for fetch_read_write_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_read_write_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_fetch_read_write_token` interceptor runs + before the `post_fetch_read_write_token_with_metadata` interceptor. """ return response + def post_fetch_read_write_token_with_metadata( + self, + response: developer_connect.FetchReadWriteTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.FetchReadWriteTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_read_write_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_fetch_read_write_token_with_metadata` + interceptor in new development instead of the `post_fetch_read_write_token` interceptor. + When both interceptors are used, this `post_fetch_read_write_token_with_metadata` interceptor runs after the + `post_fetch_read_write_token` interceptor. The (possibly modified) response returned by + `post_fetch_read_write_token` will be passed to + `post_fetch_read_write_token_with_metadata`. + """ + return response, metadata + def pre_get_connection( self, request: developer_connect.GetConnectionRequest, @@ -439,12 +660,35 @@ def post_get_connection( ) -> developer_connect.Connection: """Post-rpc interceptor for get_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_get_connection` interceptor runs + before the `post_get_connection_with_metadata` interceptor. """ return response + def post_get_connection_with_metadata( + self, + response: developer_connect.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[developer_connect.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_get_connection_with_metadata` + interceptor in new development instead of the `post_get_connection` interceptor. + When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the + `post_get_connection` interceptor. The (possibly modified) response returned by + `post_get_connection` will be passed to + `post_get_connection_with_metadata`. + """ + return response, metadata + def pre_get_git_repository_link( self, request: developer_connect.GetGitRepositoryLinkRequest, @@ -465,12 +709,37 @@ def post_get_git_repository_link( ) -> developer_connect.GitRepositoryLink: """Post-rpc interceptor for get_git_repository_link - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_git_repository_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_get_git_repository_link` interceptor runs + before the `post_get_git_repository_link_with_metadata` interceptor. """ return response + def post_get_git_repository_link_with_metadata( + self, + response: developer_connect.GitRepositoryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.GitRepositoryLink, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_git_repository_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_get_git_repository_link_with_metadata` + interceptor in new development instead of the `post_get_git_repository_link` interceptor. + When both interceptors are used, this `post_get_git_repository_link_with_metadata` interceptor runs after the + `post_get_git_repository_link` interceptor. The (possibly modified) response returned by + `post_get_git_repository_link` will be passed to + `post_get_git_repository_link_with_metadata`. + """ + return response, metadata + def pre_list_connections( self, request: developer_connect.ListConnectionsRequest, @@ -491,12 +760,38 @@ def post_list_connections( ) -> developer_connect.ListConnectionsResponse: """Post-rpc interceptor for list_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_list_connections` interceptor runs + before the `post_list_connections_with_metadata` interceptor. """ return response + def post_list_connections_with_metadata( + self, + response: developer_connect.ListConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.ListConnectionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_list_connections_with_metadata` + interceptor in new development instead of the `post_list_connections` interceptor. + When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the + `post_list_connections` interceptor. The (possibly modified) response returned by + `post_list_connections` will be passed to + `post_list_connections_with_metadata`. + """ + return response, metadata + def pre_list_git_repository_links( self, request: developer_connect.ListGitRepositoryLinksRequest, @@ -517,12 +812,38 @@ def post_list_git_repository_links( ) -> developer_connect.ListGitRepositoryLinksResponse: """Post-rpc interceptor for list_git_repository_links - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_git_repository_links_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_list_git_repository_links` interceptor runs + before the `post_list_git_repository_links_with_metadata` interceptor. """ return response + def post_list_git_repository_links_with_metadata( + self, + response: developer_connect.ListGitRepositoryLinksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + developer_connect.ListGitRepositoryLinksResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_git_repository_links + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_list_git_repository_links_with_metadata` + interceptor in new development instead of the `post_list_git_repository_links` interceptor. + When both interceptors are used, this `post_list_git_repository_links_with_metadata` interceptor runs after the + `post_list_git_repository_links` interceptor. The (possibly modified) response returned by + `post_list_git_repository_links` will be passed to + `post_list_git_repository_links_with_metadata`. + """ + return response, metadata + def pre_update_connection( self, request: developer_connect.UpdateConnectionRequest, @@ -543,12 +864,35 @@ def post_update_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DeveloperConnect server but before - it is returned to user code. + it is returned to user code. This `post_update_connection` interceptor runs + before the `post_update_connection_with_metadata` interceptor. """ return response + def post_update_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DeveloperConnect server but before it is returned to user code. + + We recommend only using this `post_update_connection_with_metadata` + interceptor in new development instead of the `post_update_connection` interceptor. + When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the + `post_update_connection` interceptor. The (possibly modified) response returned by + `post_update_connection` will be passed to + `post_update_connection_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -963,6 +1307,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1117,6 +1465,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_git_repository_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_git_repository_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1261,6 +1613,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1409,6 +1765,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_git_repository_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_git_repository_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1558,6 +1918,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_git_hub_installations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_git_hub_installations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1702,6 +2066,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_git_refs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_git_refs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1851,6 +2219,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_linkable_git_repositories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_linkable_git_repositories_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2005,6 +2380,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_read_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_read_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2159,6 +2538,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_read_write_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_read_write_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2301,6 +2684,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2449,6 +2836,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_git_repository_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_git_repository_link_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2597,6 +2988,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2747,6 +3142,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_git_repository_links(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_git_repository_links_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2901,6 +3300,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json index ad7feba7c0ba..dcfe6e0bbfa4 100644 --- a/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json +++ b/packages/google-cloud-developerconnect/samples/generated_samples/snippet_metadata_google.cloud.developerconnect.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-developerconnect", - "version": "0.1.6" + "version": "0.1.7" }, "snippets": [ { diff --git a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py index 1421dbac69c1..2802719629a9 100644 --- a/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py +++ b/packages/google-cloud-developerconnect/tests/unit/gapic/developerconnect_v1/test_developer_connect.py @@ -74,6 +74,13 @@ ) from google.cloud.developerconnect_v1.types import developer_connect +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -333,6 +340,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DeveloperConnectClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DeveloperConnectClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10820,10 +10870,14 @@ def test_list_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_list_connections" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_list_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_list_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.ListConnectionsRequest.pb( developer_connect.ListConnectionsRequest() ) @@ -10849,6 +10903,10 @@ def test_list_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.ListConnectionsResponse() + post_with_metadata.return_value = ( + developer_connect.ListConnectionsResponse(), + metadata, + ) client.list_connections( request, @@ -10860,6 +10918,7 @@ def test_list_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_rest_bad_request( @@ -10952,10 +11011,13 @@ def test_get_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_get_connection" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_get_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_get_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.GetConnectionRequest.pb( developer_connect.GetConnectionRequest() ) @@ -10981,6 +11043,7 @@ def test_get_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.Connection() + post_with_metadata.return_value = developer_connect.Connection(), metadata client.get_connection( request, @@ -10992,6 +11055,7 @@ def test_get_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_connection_rest_bad_request( @@ -11195,10 +11259,14 @@ def test_create_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_create_connection" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_create_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_create_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.CreateConnectionRequest.pb( developer_connect.CreateConnectionRequest() ) @@ -11222,6 +11290,7 @@ def test_create_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_connection( request, @@ -11233,6 +11302,7 @@ def test_create_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_connection_rest_bad_request( @@ -11440,10 +11510,14 @@ def test_update_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_update_connection" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_update_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_update_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.UpdateConnectionRequest.pb( developer_connect.UpdateConnectionRequest() ) @@ -11467,6 +11541,7 @@ def test_update_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_connection( request, @@ -11478,6 +11553,7 @@ def test_update_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connection_rest_bad_request( @@ -11558,10 +11634,14 @@ def test_delete_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_delete_connection" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_delete_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_delete_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.DeleteConnectionRequest.pb( developer_connect.DeleteConnectionRequest() ) @@ -11585,6 +11665,7 @@ def test_delete_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_connection( request, @@ -11596,6 +11677,7 @@ def test_delete_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_git_repository_link_rest_bad_request( @@ -11758,10 +11840,14 @@ def test_create_git_repository_link_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_create_git_repository_link" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_create_git_repository_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_create_git_repository_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.CreateGitRepositoryLinkRequest.pb( developer_connect.CreateGitRepositoryLinkRequest() ) @@ -11785,6 +11871,7 @@ def test_create_git_repository_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_git_repository_link( request, @@ -11796,6 +11883,7 @@ def test_create_git_repository_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_git_repository_link_rest_bad_request( @@ -11880,10 +11968,14 @@ def test_delete_git_repository_link_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_delete_git_repository_link" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_delete_git_repository_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_delete_git_repository_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.DeleteGitRepositoryLinkRequest.pb( developer_connect.DeleteGitRepositoryLinkRequest() ) @@ -11907,6 +11999,7 @@ def test_delete_git_repository_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_git_repository_link( request, @@ -11918,6 +12011,7 @@ def test_delete_git_repository_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_git_repository_links_rest_bad_request( @@ -12004,10 +12098,14 @@ def test_list_git_repository_links_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_list_git_repository_links" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_list_git_repository_links_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_list_git_repository_links" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.ListGitRepositoryLinksRequest.pb( developer_connect.ListGitRepositoryLinksRequest() ) @@ -12033,6 +12131,10 @@ def test_list_git_repository_links_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.ListGitRepositoryLinksResponse() + post_with_metadata.return_value = ( + developer_connect.ListGitRepositoryLinksResponse(), + metadata, + ) client.list_git_repository_links( request, @@ -12044,6 +12146,7 @@ def test_list_git_repository_links_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_git_repository_link_rest_bad_request( @@ -12142,10 +12245,14 @@ def test_get_git_repository_link_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_get_git_repository_link" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_get_git_repository_link_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_get_git_repository_link" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.GetGitRepositoryLinkRequest.pb( developer_connect.GetGitRepositoryLinkRequest() ) @@ -12171,6 +12278,10 @@ def test_get_git_repository_link_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.GitRepositoryLink() + post_with_metadata.return_value = ( + developer_connect.GitRepositoryLink(), + metadata, + ) client.get_git_repository_link( request, @@ -12182,6 +12293,7 @@ def test_get_git_repository_link_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_read_write_token_rest_bad_request( @@ -12272,10 +12384,14 @@ def test_fetch_read_write_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_fetch_read_write_token" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_fetch_read_write_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_fetch_read_write_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.FetchReadWriteTokenRequest.pb( developer_connect.FetchReadWriteTokenRequest() ) @@ -12301,6 +12417,10 @@ def test_fetch_read_write_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.FetchReadWriteTokenResponse() + post_with_metadata.return_value = ( + developer_connect.FetchReadWriteTokenResponse(), + metadata, + ) client.fetch_read_write_token( request, @@ -12312,6 +12432,7 @@ def test_fetch_read_write_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_read_token_rest_bad_request( @@ -12402,10 +12523,14 @@ def test_fetch_read_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_fetch_read_token" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_fetch_read_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_fetch_read_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.FetchReadTokenRequest.pb( developer_connect.FetchReadTokenRequest() ) @@ -12431,6 +12556,10 @@ def test_fetch_read_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.FetchReadTokenResponse() + post_with_metadata.return_value = ( + developer_connect.FetchReadTokenResponse(), + metadata, + ) client.fetch_read_token( request, @@ -12442,6 +12571,7 @@ def test_fetch_read_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_linkable_git_repositories_rest_bad_request( @@ -12533,11 +12663,15 @@ def test_fetch_linkable_git_repositories_rest_interceptors(null_interceptor): transports.DeveloperConnectRestInterceptor, "post_fetch_linkable_git_repositories", ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_fetch_linkable_git_repositories_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_fetch_linkable_git_repositories", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.FetchLinkableGitRepositoriesRequest.pb( developer_connect.FetchLinkableGitRepositoriesRequest() ) @@ -12563,6 +12697,10 @@ def test_fetch_linkable_git_repositories_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.FetchLinkableGitRepositoriesResponse() + post_with_metadata.return_value = ( + developer_connect.FetchLinkableGitRepositoriesResponse(), + metadata, + ) client.fetch_linkable_git_repositories( request, @@ -12574,6 +12712,7 @@ def test_fetch_linkable_git_repositories_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_git_hub_installations_rest_bad_request( @@ -12661,10 +12800,14 @@ def test_fetch_git_hub_installations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_fetch_git_hub_installations" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, + "post_fetch_git_hub_installations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_fetch_git_hub_installations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.FetchGitHubInstallationsRequest.pb( developer_connect.FetchGitHubInstallationsRequest() ) @@ -12690,6 +12833,10 @@ def test_fetch_git_hub_installations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.FetchGitHubInstallationsResponse() + post_with_metadata.return_value = ( + developer_connect.FetchGitHubInstallationsResponse(), + metadata, + ) client.fetch_git_hub_installations( request, @@ -12701,6 +12848,7 @@ def test_fetch_git_hub_installations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_git_refs_rest_bad_request( @@ -12791,10 +12939,13 @@ def test_fetch_git_refs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeveloperConnectRestInterceptor, "post_fetch_git_refs" ) as post, mock.patch.object( + transports.DeveloperConnectRestInterceptor, "post_fetch_git_refs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DeveloperConnectRestInterceptor, "pre_fetch_git_refs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = developer_connect.FetchGitRefsRequest.pb( developer_connect.FetchGitRefsRequest() ) @@ -12820,6 +12971,10 @@ def test_fetch_git_refs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = developer_connect.FetchGitRefsResponse() + post_with_metadata.return_value = ( + developer_connect.FetchGitRefsResponse(), + metadata, + ) client.fetch_git_refs( request, @@ -12831,6 +12986,7 @@ def test_fetch_git_refs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/CHANGELOG.md b/packages/google-cloud-dialogflow-cx/CHANGELOG.md index 8a05236b7f5a..283af65fdd10 100644 --- a/packages/google-cloud-dialogflow-cx/CHANGELOG.md +++ b/packages/google-cloud-dialogflow-cx/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.39.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.38.0...google-cloud-dialogflow-cx-v1.39.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [1.38.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-cx-v1.37.0...google-cloud-dialogflow-cx-v1.38.0) (2024-12-12) diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py index e553ae451f41..e589b34795e6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.38.0" # {x-release-please-version} +__version__ = "1.39.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py index e553ae451f41..e589b34795e6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.38.0" # {x-release-please-version} +__version__ = "1.39.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py index 737832a1035c..c8f4c0984e15 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -658,6 +660,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2177,16 +2206,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2232,16 +2265,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2342,16 +2379,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2397,16 +2438,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py index eb8e6052d972..c09dcb6e6432 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/agents/transports/rest.py @@ -181,12 +181,35 @@ def pre_create_agent( def post_create_agent(self, response: gcdc_agent.Agent) -> gcdc_agent.Agent: """Post-rpc interceptor for create_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_create_agent` interceptor runs + before the `post_create_agent_with_metadata` interceptor. """ return response + def post_create_agent_with_metadata( + self, + response: gcdc_agent.Agent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_create_agent_with_metadata` + interceptor in new development instead of the `post_create_agent` interceptor. + When both interceptors are used, this `post_create_agent_with_metadata` interceptor runs after the + `post_create_agent` interceptor. The (possibly modified) response returned by + `post_create_agent` will be passed to + `post_create_agent_with_metadata`. + """ + return response, metadata + def pre_delete_agent( self, request: agent.DeleteAgentRequest, @@ -216,12 +239,35 @@ def post_export_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_export_agent` interceptor runs + before the `post_export_agent_with_metadata` interceptor. """ return response + def post_export_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_export_agent_with_metadata` + interceptor in new development instead of the `post_export_agent` interceptor. + When both interceptors are used, this `post_export_agent_with_metadata` interceptor runs after the + `post_export_agent` interceptor. The (possibly modified) response returned by + `post_export_agent` will be passed to + `post_export_agent_with_metadata`. + """ + return response, metadata + def pre_get_agent( self, request: agent.GetAgentRequest, @@ -237,12 +283,33 @@ def pre_get_agent( def post_get_agent(self, response: agent.Agent) -> agent.Agent: """Post-rpc interceptor for get_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_agent` interceptor runs + before the `post_get_agent_with_metadata` interceptor. """ return response + def post_get_agent_with_metadata( + self, response: agent.Agent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_agent_with_metadata` + interceptor in new development instead of the `post_get_agent` interceptor. + When both interceptors are used, this `post_get_agent_with_metadata` interceptor runs after the + `post_get_agent` interceptor. The (possibly modified) response returned by + `post_get_agent` will be passed to + `post_get_agent_with_metadata`. + """ + return response, metadata + def pre_get_agent_validation_result( self, request: agent.GetAgentValidationResultRequest, @@ -262,12 +329,35 @@ def post_get_agent_validation_result( ) -> agent.AgentValidationResult: """Post-rpc interceptor for get_agent_validation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_agent_validation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_agent_validation_result` interceptor runs + before the `post_get_agent_validation_result_with_metadata` interceptor. """ return response + def post_get_agent_validation_result_with_metadata( + self, + response: agent.AgentValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.AgentValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_agent_validation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_agent_validation_result_with_metadata` + interceptor in new development instead of the `post_get_agent_validation_result` interceptor. + When both interceptors are used, this `post_get_agent_validation_result_with_metadata` interceptor runs after the + `post_get_agent_validation_result` interceptor. The (possibly modified) response returned by + `post_get_agent_validation_result` will be passed to + `post_get_agent_validation_result_with_metadata`. + """ + return response, metadata + def pre_get_generative_settings( self, request: agent.GetGenerativeSettingsRequest, @@ -287,12 +377,37 @@ def post_get_generative_settings( ) -> generative_settings.GenerativeSettings: """Post-rpc interceptor for get_generative_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_generative_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_generative_settings` interceptor runs + before the `post_get_generative_settings_with_metadata` interceptor. """ return response + def post_get_generative_settings_with_metadata( + self, + response: generative_settings.GenerativeSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_settings.GenerativeSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_generative_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_generative_settings_with_metadata` + interceptor in new development instead of the `post_get_generative_settings` interceptor. + When both interceptors are used, this `post_get_generative_settings_with_metadata` interceptor runs after the + `post_get_generative_settings` interceptor. The (possibly modified) response returned by + `post_get_generative_settings` will be passed to + `post_get_generative_settings_with_metadata`. + """ + return response, metadata + def pre_list_agents( self, request: agent.ListAgentsRequest, @@ -310,12 +425,35 @@ def post_list_agents( ) -> agent.ListAgentsResponse: """Post-rpc interceptor for list_agents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_agents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_list_agents` interceptor runs + before the `post_list_agents_with_metadata` interceptor. """ return response + def post_list_agents_with_metadata( + self, + response: agent.ListAgentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.ListAgentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_agents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_list_agents_with_metadata` + interceptor in new development instead of the `post_list_agents` interceptor. + When both interceptors are used, this `post_list_agents_with_metadata` interceptor runs after the + `post_list_agents` interceptor. The (possibly modified) response returned by + `post_list_agents` will be passed to + `post_list_agents_with_metadata`. + """ + return response, metadata + def pre_restore_agent( self, request: agent.RestoreAgentRequest, @@ -333,12 +471,35 @@ def post_restore_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_restore_agent` interceptor runs + before the `post_restore_agent_with_metadata` interceptor. """ return response + def post_restore_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_restore_agent_with_metadata` + interceptor in new development instead of the `post_restore_agent` interceptor. + When both interceptors are used, this `post_restore_agent_with_metadata` interceptor runs after the + `post_restore_agent` interceptor. The (possibly modified) response returned by + `post_restore_agent` will be passed to + `post_restore_agent_with_metadata`. + """ + return response, metadata + def pre_update_agent( self, request: gcdc_agent.UpdateAgentRequest, @@ -354,12 +515,35 @@ def pre_update_agent( def post_update_agent(self, response: gcdc_agent.Agent) -> gcdc_agent.Agent: """Post-rpc interceptor for update_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_update_agent` interceptor runs + before the `post_update_agent_with_metadata` interceptor. """ return response + def post_update_agent_with_metadata( + self, + response: gcdc_agent.Agent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_update_agent_with_metadata` + interceptor in new development instead of the `post_update_agent` interceptor. + When both interceptors are used, this `post_update_agent_with_metadata` interceptor runs after the + `post_update_agent` interceptor. The (possibly modified) response returned by + `post_update_agent` will be passed to + `post_update_agent_with_metadata`. + """ + return response, metadata + def pre_update_generative_settings( self, request: agent.UpdateGenerativeSettingsRequest, @@ -379,12 +563,38 @@ def post_update_generative_settings( ) -> gcdc_generative_settings.GenerativeSettings: """Post-rpc interceptor for update_generative_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_generative_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_update_generative_settings` interceptor runs + before the `post_update_generative_settings_with_metadata` interceptor. """ return response + def post_update_generative_settings_with_metadata( + self, + response: gcdc_generative_settings.GenerativeSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_generative_settings.GenerativeSettings, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_generative_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_update_generative_settings_with_metadata` + interceptor in new development instead of the `post_update_generative_settings` interceptor. + When both interceptors are used, this `post_update_generative_settings_with_metadata` interceptor runs after the + `post_update_generative_settings` interceptor. The (possibly modified) response returned by + `post_update_generative_settings` will be passed to + `post_update_generative_settings_with_metadata`. + """ + return response, metadata + def pre_validate_agent( self, request: agent.ValidateAgentRequest, @@ -402,12 +612,35 @@ def post_validate_agent( ) -> agent.AgentValidationResult: """Post-rpc interceptor for validate_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_validate_agent` interceptor runs + before the `post_validate_agent_with_metadata` interceptor. """ return response + def post_validate_agent_with_metadata( + self, + response: agent.AgentValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.AgentValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_validate_agent_with_metadata` + interceptor in new development instead of the `post_validate_agent` interceptor. + When both interceptors are used, this `post_validate_agent_with_metadata` interceptor runs after the + `post_validate_agent` interceptor. The (possibly modified) response returned by + `post_validate_agent` will be passed to + `post_validate_agent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -815,6 +1048,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1070,6 +1307,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1225,6 +1466,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1371,6 +1616,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_agent_validation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_agent_validation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1515,6 +1764,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_generative_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_generative_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1660,6 +1913,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_agents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_agents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1810,6 +2067,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1971,6 +2232,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2122,6 +2387,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_generative_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_generative_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2275,6 +2544,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py index 7ea16ff359c0..3dcd189df232 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -490,6 +492,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -965,16 +994,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1020,16 +1053,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1130,16 +1167,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1185,16 +1226,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py index 679423ef23ff..a1cabbfca117 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/changelogs/transports/rest.py @@ -107,12 +107,35 @@ def pre_get_changelog( def post_get_changelog(self, response: changelog.Changelog) -> changelog.Changelog: """Post-rpc interceptor for get_changelog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_changelog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Changelogs server but before - it is returned to user code. + it is returned to user code. This `post_get_changelog` interceptor runs + before the `post_get_changelog_with_metadata` interceptor. """ return response + def post_get_changelog_with_metadata( + self, + response: changelog.Changelog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[changelog.Changelog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_changelog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Changelogs server but before it is returned to user code. + + We recommend only using this `post_get_changelog_with_metadata` + interceptor in new development instead of the `post_get_changelog` interceptor. + When both interceptors are used, this `post_get_changelog_with_metadata` interceptor runs after the + `post_get_changelog` interceptor. The (possibly modified) response returned by + `post_get_changelog` will be passed to + `post_get_changelog_with_metadata`. + """ + return response, metadata + def pre_list_changelogs( self, request: changelog.ListChangelogsRequest, @@ -132,12 +155,37 @@ def post_list_changelogs( ) -> changelog.ListChangelogsResponse: """Post-rpc interceptor for list_changelogs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_changelogs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Changelogs server but before - it is returned to user code. + it is returned to user code. This `post_list_changelogs` interceptor runs + before the `post_list_changelogs_with_metadata` interceptor. """ return response + def post_list_changelogs_with_metadata( + self, + response: changelog.ListChangelogsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + changelog.ListChangelogsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_changelogs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Changelogs server but before it is returned to user code. + + We recommend only using this `post_list_changelogs_with_metadata` + interceptor in new development instead of the `post_list_changelogs` interceptor. + When both interceptors are used, this `post_list_changelogs_with_metadata` interceptor runs after the + `post_list_changelogs` interceptor. The (possibly modified) response returned by + `post_list_changelogs` will be passed to + `post_list_changelogs_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -473,6 +521,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_changelog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_changelog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -618,6 +670,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_changelogs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_changelogs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py index 888c2e6120d4..5877d775ee7b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -570,6 +572,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1056,16 +1085,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1111,16 +1144,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1221,16 +1258,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1276,16 +1317,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py index 90837cfd8808..7022b53361ce 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/deployments/transports/rest.py @@ -111,12 +111,35 @@ def post_get_deployment( ) -> deployment.Deployment: """Post-rpc interceptor for get_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Deployments server but before - it is returned to user code. + it is returned to user code. This `post_get_deployment` interceptor runs + before the `post_get_deployment_with_metadata` interceptor. """ return response + def post_get_deployment_with_metadata( + self, + response: deployment.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[deployment.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Deployments server but before it is returned to user code. + + We recommend only using this `post_get_deployment_with_metadata` + interceptor in new development instead of the `post_get_deployment` interceptor. + When both interceptors are used, this `post_get_deployment_with_metadata` interceptor runs after the + `post_get_deployment` interceptor. The (possibly modified) response returned by + `post_get_deployment` will be passed to + `post_get_deployment_with_metadata`. + """ + return response, metadata + def pre_list_deployments( self, request: deployment.ListDeploymentsRequest, @@ -136,12 +159,37 @@ def post_list_deployments( ) -> deployment.ListDeploymentsResponse: """Post-rpc interceptor for list_deployments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Deployments server but before - it is returned to user code. + it is returned to user code. This `post_list_deployments` interceptor runs + before the `post_list_deployments_with_metadata` interceptor. """ return response + def post_list_deployments_with_metadata( + self, + response: deployment.ListDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + deployment.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Deployments server but before it is returned to user code. + + We recommend only using this `post_list_deployments_with_metadata` + interceptor in new development instead of the `post_list_deployments` interceptor. + When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the + `post_list_deployments` interceptor. The (possibly modified) response returned by + `post_list_deployments` will be passed to + `post_list_deployments_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -480,6 +528,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -625,6 +677,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deployments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py index af63050de740..f542750f0765 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1608,16 +1637,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1663,16 +1696,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1773,16 +1810,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1828,16 +1869,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py index a9cad480071f..b413843889ec 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/entity_types/transports/rest.py @@ -150,12 +150,35 @@ def post_create_entity_type( ) -> gcdc_entity_type.EntityType: """Post-rpc interceptor for create_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_entity_type` interceptor runs + before the `post_create_entity_type_with_metadata` interceptor. """ return response + def post_create_entity_type_with_metadata( + self, + response: gcdc_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_entity_type_with_metadata` + interceptor in new development instead of the `post_create_entity_type` interceptor. + When both interceptors are used, this `post_create_entity_type_with_metadata` interceptor runs after the + `post_create_entity_type` interceptor. The (possibly modified) response returned by + `post_create_entity_type` will be passed to + `post_create_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_entity_type( self, request: entity_type.DeleteEntityTypeRequest, @@ -189,12 +212,35 @@ def post_export_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_export_entity_types` interceptor runs + before the `post_export_entity_types_with_metadata` interceptor. """ return response + def post_export_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_export_entity_types_with_metadata` + interceptor in new development instead of the `post_export_entity_types` interceptor. + When both interceptors are used, this `post_export_entity_types_with_metadata` interceptor runs after the + `post_export_entity_types` interceptor. The (possibly modified) response returned by + `post_export_entity_types` will be passed to + `post_export_entity_types_with_metadata`. + """ + return response, metadata + def pre_get_entity_type( self, request: entity_type.GetEntityTypeRequest, @@ -214,12 +260,35 @@ def post_get_entity_type( ) -> entity_type.EntityType: """Post-rpc interceptor for get_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_entity_type` interceptor runs + before the `post_get_entity_type_with_metadata` interceptor. """ return response + def post_get_entity_type_with_metadata( + self, + response: entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_entity_type_with_metadata` + interceptor in new development instead of the `post_get_entity_type` interceptor. + When both interceptors are used, this `post_get_entity_type_with_metadata` interceptor runs after the + `post_get_entity_type` interceptor. The (possibly modified) response returned by + `post_get_entity_type` will be passed to + `post_get_entity_type_with_metadata`. + """ + return response, metadata + def pre_import_entity_types( self, request: entity_type.ImportEntityTypesRequest, @@ -239,12 +308,35 @@ def post_import_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_import_entity_types` interceptor runs + before the `post_import_entity_types_with_metadata` interceptor. """ return response + def post_import_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_import_entity_types_with_metadata` + interceptor in new development instead of the `post_import_entity_types` interceptor. + When both interceptors are used, this `post_import_entity_types_with_metadata` interceptor runs after the + `post_import_entity_types` interceptor. The (possibly modified) response returned by + `post_import_entity_types` will be passed to + `post_import_entity_types_with_metadata`. + """ + return response, metadata + def pre_list_entity_types( self, request: entity_type.ListEntityTypesRequest, @@ -264,12 +356,37 @@ def post_list_entity_types( ) -> entity_type.ListEntityTypesResponse: """Post-rpc interceptor for list_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_entity_types` interceptor runs + before the `post_list_entity_types_with_metadata` interceptor. """ return response + def post_list_entity_types_with_metadata( + self, + response: entity_type.ListEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_type.ListEntityTypesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_entity_types_with_metadata` + interceptor in new development instead of the `post_list_entity_types` interceptor. + When both interceptors are used, this `post_list_entity_types_with_metadata` interceptor runs after the + `post_list_entity_types` interceptor. The (possibly modified) response returned by + `post_list_entity_types` will be passed to + `post_list_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_entity_type( self, request: gcdc_entity_type.UpdateEntityTypeRequest, @@ -290,12 +407,35 @@ def post_update_entity_type( ) -> gcdc_entity_type.EntityType: """Post-rpc interceptor for update_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_entity_type` interceptor runs + before the `post_update_entity_type_with_metadata` interceptor. """ return response + def post_update_entity_type_with_metadata( + self, + response: gcdc_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_entity_type_with_metadata` + interceptor in new development instead of the `post_update_entity_type` interceptor. + When both interceptors are used, this `post_update_entity_type_with_metadata` interceptor runs after the + `post_update_entity_type` interceptor. The (possibly modified) response returned by + `post_update_entity_type` will be passed to + `post_update_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -727,6 +867,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -986,6 +1130,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1164,6 +1312,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1314,6 +1466,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1459,6 +1615,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1645,6 +1805,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py index a27ed7a1ea35..b2848dc6fd65 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -645,6 +647,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2021,16 +2050,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2076,16 +2109,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2186,16 +2223,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2241,16 +2282,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py index aefee52aadb4..a2ac92f7ca29 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/environments/transports/rest.py @@ -166,12 +166,35 @@ def post_create_environment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_create_environment` interceptor runs + before the `post_create_environment_with_metadata` interceptor. """ return response + def post_create_environment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_create_environment_with_metadata` + interceptor in new development instead of the `post_create_environment` interceptor. + When both interceptors are used, this `post_create_environment_with_metadata` interceptor runs after the + `post_create_environment` interceptor. The (possibly modified) response returned by + `post_create_environment` will be passed to + `post_create_environment_with_metadata`. + """ + return response, metadata + def pre_delete_environment( self, request: environment.DeleteEnvironmentRequest, @@ -203,12 +226,35 @@ def post_deploy_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_deploy_flow` interceptor runs + before the `post_deploy_flow_with_metadata` interceptor. """ return response + def post_deploy_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_deploy_flow_with_metadata` + interceptor in new development instead of the `post_deploy_flow` interceptor. + When both interceptors are used, this `post_deploy_flow_with_metadata` interceptor runs after the + `post_deploy_flow` interceptor. The (possibly modified) response returned by + `post_deploy_flow` will be passed to + `post_deploy_flow_with_metadata`. + """ + return response, metadata + def pre_get_environment( self, request: environment.GetEnvironmentRequest, @@ -228,12 +274,35 @@ def post_get_environment( ) -> environment.Environment: """Post-rpc interceptor for get_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_get_environment` interceptor runs + before the `post_get_environment_with_metadata` interceptor. """ return response + def post_get_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_get_environment_with_metadata` + interceptor in new development instead of the `post_get_environment` interceptor. + When both interceptors are used, this `post_get_environment_with_metadata` interceptor runs after the + `post_get_environment` interceptor. The (possibly modified) response returned by + `post_get_environment` will be passed to + `post_get_environment_with_metadata`. + """ + return response, metadata + def pre_list_continuous_test_results( self, request: environment.ListContinuousTestResultsRequest, @@ -254,12 +323,38 @@ def post_list_continuous_test_results( ) -> environment.ListContinuousTestResultsResponse: """Post-rpc interceptor for list_continuous_test_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_continuous_test_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_list_continuous_test_results` interceptor runs + before the `post_list_continuous_test_results_with_metadata` interceptor. """ return response + def post_list_continuous_test_results_with_metadata( + self, + response: environment.ListContinuousTestResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.ListContinuousTestResultsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_continuous_test_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_list_continuous_test_results_with_metadata` + interceptor in new development instead of the `post_list_continuous_test_results` interceptor. + When both interceptors are used, this `post_list_continuous_test_results_with_metadata` interceptor runs after the + `post_list_continuous_test_results` interceptor. The (possibly modified) response returned by + `post_list_continuous_test_results` will be passed to + `post_list_continuous_test_results_with_metadata`. + """ + return response, metadata + def pre_list_environments( self, request: environment.ListEnvironmentsRequest, @@ -279,12 +374,37 @@ def post_list_environments( ) -> environment.ListEnvironmentsResponse: """Post-rpc interceptor for list_environments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_environments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_list_environments` interceptor runs + before the `post_list_environments_with_metadata` interceptor. """ return response + def post_list_environments_with_metadata( + self, + response: environment.ListEnvironmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.ListEnvironmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_environments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_list_environments_with_metadata` + interceptor in new development instead of the `post_list_environments` interceptor. + When both interceptors are used, this `post_list_environments_with_metadata` interceptor runs after the + `post_list_environments` interceptor. The (possibly modified) response returned by + `post_list_environments` will be passed to + `post_list_environments_with_metadata`. + """ + return response, metadata + def pre_lookup_environment_history( self, request: environment.LookupEnvironmentHistoryRequest, @@ -305,12 +425,38 @@ def post_lookup_environment_history( ) -> environment.LookupEnvironmentHistoryResponse: """Post-rpc interceptor for lookup_environment_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_environment_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_lookup_environment_history` interceptor runs + before the `post_lookup_environment_history_with_metadata` interceptor. """ return response + def post_lookup_environment_history_with_metadata( + self, + response: environment.LookupEnvironmentHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.LookupEnvironmentHistoryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_environment_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_lookup_environment_history_with_metadata` + interceptor in new development instead of the `post_lookup_environment_history` interceptor. + When both interceptors are used, this `post_lookup_environment_history_with_metadata` interceptor runs after the + `post_lookup_environment_history` interceptor. The (possibly modified) response returned by + `post_lookup_environment_history` will be passed to + `post_lookup_environment_history_with_metadata`. + """ + return response, metadata + def pre_run_continuous_test( self, request: environment.RunContinuousTestRequest, @@ -330,12 +476,35 @@ def post_run_continuous_test( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_continuous_test - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_continuous_test_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_run_continuous_test` interceptor runs + before the `post_run_continuous_test_with_metadata` interceptor. """ return response + def post_run_continuous_test_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_continuous_test + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_run_continuous_test_with_metadata` + interceptor in new development instead of the `post_run_continuous_test` interceptor. + When both interceptors are used, this `post_run_continuous_test_with_metadata` interceptor runs after the + `post_run_continuous_test` interceptor. The (possibly modified) response returned by + `post_run_continuous_test` will be passed to + `post_run_continuous_test_with_metadata`. + """ + return response, metadata + def pre_update_environment( self, request: gcdc_environment.UpdateEnvironmentRequest, @@ -356,12 +525,35 @@ def post_update_environment( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_update_environment` interceptor runs + before the `post_update_environment_with_metadata` interceptor. """ return response + def post_update_environment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_update_environment_with_metadata` + interceptor in new development instead of the `post_update_environment` interceptor. + When both interceptors are used, this `post_update_environment_with_metadata` interceptor runs after the + `post_update_environment` interceptor. The (possibly modified) response returned by + `post_update_environment` will be passed to + `post_update_environment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -759,6 +951,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1022,6 +1218,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1178,6 +1378,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1327,6 +1531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_continuous_test_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_continuous_test_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1474,6 +1682,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_environments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_environments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1625,6 +1837,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_environment_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_environment_history_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1777,6 +1993,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_continuous_test(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_continuous_test_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1927,6 +2147,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py index bc0d79bc99a1..a760ef6f2dc5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -521,6 +523,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1557,16 +1586,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1612,16 +1645,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1722,16 +1759,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1777,16 +1818,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py index 4cce41928a24..be29f4a6110c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/experiments/transports/rest.py @@ -149,12 +149,35 @@ def post_create_experiment( ) -> gcdc_experiment.Experiment: """Post-rpc interceptor for create_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_create_experiment` interceptor runs + before the `post_create_experiment_with_metadata` interceptor. """ return response + def post_create_experiment_with_metadata( + self, + response: gcdc_experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_create_experiment_with_metadata` + interceptor in new development instead of the `post_create_experiment` interceptor. + When both interceptors are used, this `post_create_experiment_with_metadata` interceptor runs after the + `post_create_experiment` interceptor. The (possibly modified) response returned by + `post_create_experiment` will be passed to + `post_create_experiment_with_metadata`. + """ + return response, metadata + def pre_delete_experiment( self, request: experiment.DeleteExperimentRequest, @@ -188,12 +211,35 @@ def post_get_experiment( ) -> experiment.Experiment: """Post-rpc interceptor for get_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_get_experiment` interceptor runs + before the `post_get_experiment_with_metadata` interceptor. """ return response + def post_get_experiment_with_metadata( + self, + response: experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_get_experiment_with_metadata` + interceptor in new development instead of the `post_get_experiment` interceptor. + When both interceptors are used, this `post_get_experiment_with_metadata` interceptor runs after the + `post_get_experiment` interceptor. The (possibly modified) response returned by + `post_get_experiment` will be passed to + `post_get_experiment_with_metadata`. + """ + return response, metadata + def pre_list_experiments( self, request: experiment.ListExperimentsRequest, @@ -213,12 +259,37 @@ def post_list_experiments( ) -> experiment.ListExperimentsResponse: """Post-rpc interceptor for list_experiments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_experiments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_list_experiments` interceptor runs + before the `post_list_experiments_with_metadata` interceptor. """ return response + def post_list_experiments_with_metadata( + self, + response: experiment.ListExperimentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + experiment.ListExperimentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_experiments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_list_experiments_with_metadata` + interceptor in new development instead of the `post_list_experiments` interceptor. + When both interceptors are used, this `post_list_experiments_with_metadata` interceptor runs after the + `post_list_experiments` interceptor. The (possibly modified) response returned by + `post_list_experiments` will be passed to + `post_list_experiments_with_metadata`. + """ + return response, metadata + def pre_start_experiment( self, request: experiment.StartExperimentRequest, @@ -238,12 +309,35 @@ def post_start_experiment( ) -> experiment.Experiment: """Post-rpc interceptor for start_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_start_experiment` interceptor runs + before the `post_start_experiment_with_metadata` interceptor. """ return response + def post_start_experiment_with_metadata( + self, + response: experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_start_experiment_with_metadata` + interceptor in new development instead of the `post_start_experiment` interceptor. + When both interceptors are used, this `post_start_experiment_with_metadata` interceptor runs after the + `post_start_experiment` interceptor. The (possibly modified) response returned by + `post_start_experiment` will be passed to + `post_start_experiment_with_metadata`. + """ + return response, metadata + def pre_stop_experiment( self, request: experiment.StopExperimentRequest, @@ -263,12 +357,35 @@ def post_stop_experiment( ) -> experiment.Experiment: """Post-rpc interceptor for stop_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_stop_experiment` interceptor runs + before the `post_stop_experiment_with_metadata` interceptor. """ return response + def post_stop_experiment_with_metadata( + self, + response: experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_stop_experiment_with_metadata` + interceptor in new development instead of the `post_stop_experiment` interceptor. + When both interceptors are used, this `post_stop_experiment_with_metadata` interceptor runs after the + `post_stop_experiment` interceptor. The (possibly modified) response returned by + `post_stop_experiment` will be passed to + `post_stop_experiment_with_metadata`. + """ + return response, metadata + def pre_update_experiment( self, request: gcdc_experiment.UpdateExperimentRequest, @@ -288,12 +405,35 @@ def post_update_experiment( ) -> gcdc_experiment.Experiment: """Post-rpc interceptor for update_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_update_experiment` interceptor runs + before the `post_update_experiment_with_metadata` interceptor. """ return response + def post_update_experiment_with_metadata( + self, + response: gcdc_experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_update_experiment_with_metadata` + interceptor in new development instead of the `post_update_experiment` interceptor. + When both interceptors are used, this `post_update_experiment_with_metadata` interceptor runs after the + `post_update_experiment` interceptor. The (possibly modified) response returned by + `post_update_experiment` will be passed to + `post_update_experiment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -633,6 +773,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -887,6 +1031,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1032,6 +1180,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_experiments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_experiments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1185,6 +1337,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1334,6 +1490,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +1645,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py index aa923f273602..1c5806e144b2 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -620,6 +622,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2057,16 +2086,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2112,16 +2145,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2222,16 +2259,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2277,16 +2318,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py index 9dbe5e5eb8b6..1e9b07eb2d86 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/flows/transports/rest.py @@ -169,12 +169,35 @@ def pre_create_flow( def post_create_flow(self, response: gcdc_flow.Flow) -> gcdc_flow.Flow: """Post-rpc interceptor for create_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_create_flow` interceptor runs + before the `post_create_flow_with_metadata` interceptor. """ return response + def post_create_flow_with_metadata( + self, + response: gcdc_flow.Flow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_flow.Flow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_create_flow_with_metadata` + interceptor in new development instead of the `post_create_flow` interceptor. + When both interceptors are used, this `post_create_flow_with_metadata` interceptor runs after the + `post_create_flow` interceptor. The (possibly modified) response returned by + `post_create_flow` will be passed to + `post_create_flow_with_metadata`. + """ + return response, metadata + def pre_delete_flow( self, request: flow.DeleteFlowRequest, @@ -204,12 +227,35 @@ def post_export_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_export_flow` interceptor runs + before the `post_export_flow_with_metadata` interceptor. """ return response + def post_export_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_export_flow_with_metadata` + interceptor in new development instead of the `post_export_flow` interceptor. + When both interceptors are used, this `post_export_flow_with_metadata` interceptor runs after the + `post_export_flow` interceptor. The (possibly modified) response returned by + `post_export_flow` will be passed to + `post_export_flow_with_metadata`. + """ + return response, metadata + def pre_get_flow( self, request: flow.GetFlowRequest, @@ -225,12 +271,33 @@ def pre_get_flow( def post_get_flow(self, response: flow.Flow) -> flow.Flow: """Post-rpc interceptor for get_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_get_flow` interceptor runs + before the `post_get_flow_with_metadata` interceptor. """ return response + def post_get_flow_with_metadata( + self, response: flow.Flow, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[flow.Flow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_get_flow_with_metadata` + interceptor in new development instead of the `post_get_flow` interceptor. + When both interceptors are used, this `post_get_flow_with_metadata` interceptor runs after the + `post_get_flow` interceptor. The (possibly modified) response returned by + `post_get_flow` will be passed to + `post_get_flow_with_metadata`. + """ + return response, metadata + def pre_get_flow_validation_result( self, request: flow.GetFlowValidationResultRequest, @@ -250,12 +317,35 @@ def post_get_flow_validation_result( ) -> flow.FlowValidationResult: """Post-rpc interceptor for get_flow_validation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_flow_validation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_get_flow_validation_result` interceptor runs + before the `post_get_flow_validation_result_with_metadata` interceptor. """ return response + def post_get_flow_validation_result_with_metadata( + self, + response: flow.FlowValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[flow.FlowValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_flow_validation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_get_flow_validation_result_with_metadata` + interceptor in new development instead of the `post_get_flow_validation_result` interceptor. + When both interceptors are used, this `post_get_flow_validation_result_with_metadata` interceptor runs after the + `post_get_flow_validation_result` interceptor. The (possibly modified) response returned by + `post_get_flow_validation_result` will be passed to + `post_get_flow_validation_result_with_metadata`. + """ + return response, metadata + def pre_import_flow( self, request: flow.ImportFlowRequest, @@ -273,12 +363,35 @@ def post_import_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_import_flow` interceptor runs + before the `post_import_flow_with_metadata` interceptor. """ return response + def post_import_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_import_flow_with_metadata` + interceptor in new development instead of the `post_import_flow` interceptor. + When both interceptors are used, this `post_import_flow_with_metadata` interceptor runs after the + `post_import_flow` interceptor. The (possibly modified) response returned by + `post_import_flow` will be passed to + `post_import_flow_with_metadata`. + """ + return response, metadata + def pre_list_flows( self, request: flow.ListFlowsRequest, @@ -296,12 +409,35 @@ def post_list_flows( ) -> flow.ListFlowsResponse: """Post-rpc interceptor for list_flows - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_flows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_list_flows` interceptor runs + before the `post_list_flows_with_metadata` interceptor. """ return response + def post_list_flows_with_metadata( + self, + response: flow.ListFlowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[flow.ListFlowsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_flows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_list_flows_with_metadata` + interceptor in new development instead of the `post_list_flows` interceptor. + When both interceptors are used, this `post_list_flows_with_metadata` interceptor runs after the + `post_list_flows` interceptor. The (possibly modified) response returned by + `post_list_flows` will be passed to + `post_list_flows_with_metadata`. + """ + return response, metadata + def pre_train_flow( self, request: flow.TrainFlowRequest, @@ -319,12 +455,35 @@ def post_train_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_train_flow` interceptor runs + before the `post_train_flow_with_metadata` interceptor. """ return response + def post_train_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_train_flow_with_metadata` + interceptor in new development instead of the `post_train_flow` interceptor. + When both interceptors are used, this `post_train_flow_with_metadata` interceptor runs after the + `post_train_flow` interceptor. The (possibly modified) response returned by + `post_train_flow` will be passed to + `post_train_flow_with_metadata`. + """ + return response, metadata + def pre_update_flow( self, request: gcdc_flow.UpdateFlowRequest, @@ -340,12 +499,35 @@ def pre_update_flow( def post_update_flow(self, response: gcdc_flow.Flow) -> gcdc_flow.Flow: """Post-rpc interceptor for update_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_update_flow` interceptor runs + before the `post_update_flow_with_metadata` interceptor. """ return response + def post_update_flow_with_metadata( + self, + response: gcdc_flow.Flow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_flow.Flow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_update_flow_with_metadata` + interceptor in new development instead of the `post_update_flow` interceptor. + When both interceptors are used, this `post_update_flow_with_metadata` interceptor runs after the + `post_update_flow` interceptor. The (possibly modified) response returned by + `post_update_flow` will be passed to + `post_update_flow_with_metadata`. + """ + return response, metadata + def pre_validate_flow( self, request: flow.ValidateFlowRequest, @@ -363,12 +545,35 @@ def post_validate_flow( ) -> flow.FlowValidationResult: """Post-rpc interceptor for validate_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_validate_flow` interceptor runs + before the `post_validate_flow_with_metadata` interceptor. """ return response + def post_validate_flow_with_metadata( + self, + response: flow.FlowValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[flow.FlowValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_validate_flow_with_metadata` + interceptor in new development instead of the `post_validate_flow` interceptor. + When both interceptors are used, this `post_validate_flow_with_metadata` interceptor runs after the + `post_validate_flow` interceptor. The (possibly modified) response returned by + `post_validate_flow` will be passed to + `post_validate_flow_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -785,6 +990,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1040,6 +1249,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1202,6 +1415,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1348,6 +1565,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_flow_validation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_flow_validation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1496,6 +1717,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1639,6 +1864,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_flows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_flows_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1787,6 +2016,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1957,6 +2190,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2106,6 +2343,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py index 7d76ad326392..e3ad4ada49d8 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1310,16 +1339,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1365,16 +1398,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1475,16 +1512,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1530,16 +1571,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/transports/rest.py index 5d67a080b7d0..5cfebc030b04 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/generators/transports/rest.py @@ -133,12 +133,35 @@ def post_create_generator( ) -> gcdc_generator.Generator: """Post-rpc interceptor for create_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_create_generator` interceptor runs + before the `post_create_generator_with_metadata` interceptor. """ return response + def post_create_generator_with_metadata( + self, + response: gcdc_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_create_generator_with_metadata` + interceptor in new development instead of the `post_create_generator` interceptor. + When both interceptors are used, this `post_create_generator_with_metadata` interceptor runs after the + `post_create_generator` interceptor. The (possibly modified) response returned by + `post_create_generator` will be passed to + `post_create_generator_with_metadata`. + """ + return response, metadata + def pre_delete_generator( self, request: generator.DeleteGeneratorRequest, @@ -168,12 +191,35 @@ def pre_get_generator( def post_get_generator(self, response: generator.Generator) -> generator.Generator: """Post-rpc interceptor for get_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_get_generator` interceptor runs + before the `post_get_generator_with_metadata` interceptor. """ return response + def post_get_generator_with_metadata( + self, + response: generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_get_generator_with_metadata` + interceptor in new development instead of the `post_get_generator` interceptor. + When both interceptors are used, this `post_get_generator_with_metadata` interceptor runs after the + `post_get_generator` interceptor. The (possibly modified) response returned by + `post_get_generator` will be passed to + `post_get_generator_with_metadata`. + """ + return response, metadata + def pre_list_generators( self, request: generator.ListGeneratorsRequest, @@ -193,12 +239,37 @@ def post_list_generators( ) -> generator.ListGeneratorsResponse: """Post-rpc interceptor for list_generators - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_generators_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_list_generators` interceptor runs + before the `post_list_generators_with_metadata` interceptor. """ return response + def post_list_generators_with_metadata( + self, + response: generator.ListGeneratorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generator.ListGeneratorsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_generators + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_list_generators_with_metadata` + interceptor in new development instead of the `post_list_generators` interceptor. + When both interceptors are used, this `post_list_generators_with_metadata` interceptor runs after the + `post_list_generators` interceptor. The (possibly modified) response returned by + `post_list_generators` will be passed to + `post_list_generators_with_metadata`. + """ + return response, metadata + def pre_update_generator( self, request: gcdc_generator.UpdateGeneratorRequest, @@ -218,12 +289,35 @@ def post_update_generator( ) -> gcdc_generator.Generator: """Post-rpc interceptor for update_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_update_generator` interceptor runs + before the `post_update_generator_with_metadata` interceptor. """ return response + def post_update_generator_with_metadata( + self, + response: gcdc_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_update_generator_with_metadata` + interceptor in new development instead of the `post_update_generator` interceptor. + When both interceptors are used, this `post_update_generator_with_metadata` interceptor runs after the + `post_update_generator` interceptor. The (possibly modified) response returned by + `post_update_generator` will be passed to + `post_update_generator_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -567,6 +661,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -827,6 +925,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -972,6 +1074,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_generators(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_generators_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1129,6 +1235,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py index 374426f40b2e..6449301f12d8 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1568,16 +1597,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1623,16 +1656,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1733,16 +1770,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1788,16 +1829,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py index affb4adc9583..3cfe2ed08a50 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/intents/transports/rest.py @@ -147,12 +147,35 @@ def pre_create_intent( def post_create_intent(self, response: gcdc_intent.Intent) -> gcdc_intent.Intent: """Post-rpc interceptor for create_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_create_intent` interceptor runs + before the `post_create_intent_with_metadata` interceptor. """ return response + def post_create_intent_with_metadata( + self, + response: gcdc_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_create_intent_with_metadata` + interceptor in new development instead of the `post_create_intent` interceptor. + When both interceptors are used, this `post_create_intent_with_metadata` interceptor runs after the + `post_create_intent` interceptor. The (possibly modified) response returned by + `post_create_intent` will be passed to + `post_create_intent_with_metadata`. + """ + return response, metadata + def pre_delete_intent( self, request: intent.DeleteIntentRequest, @@ -182,12 +205,35 @@ def post_export_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_export_intents` interceptor runs + before the `post_export_intents_with_metadata` interceptor. """ return response + def post_export_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_export_intents_with_metadata` + interceptor in new development instead of the `post_export_intents` interceptor. + When both interceptors are used, this `post_export_intents_with_metadata` interceptor runs after the + `post_export_intents` interceptor. The (possibly modified) response returned by + `post_export_intents` will be passed to + `post_export_intents_with_metadata`. + """ + return response, metadata + def pre_get_intent( self, request: intent.GetIntentRequest, @@ -203,12 +249,33 @@ def pre_get_intent( def post_get_intent(self, response: intent.Intent) -> intent.Intent: """Post-rpc interceptor for get_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_get_intent` interceptor runs + before the `post_get_intent_with_metadata` interceptor. """ return response + def post_get_intent_with_metadata( + self, response: intent.Intent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_get_intent_with_metadata` + interceptor in new development instead of the `post_get_intent` interceptor. + When both interceptors are used, this `post_get_intent_with_metadata` interceptor runs after the + `post_get_intent` interceptor. The (possibly modified) response returned by + `post_get_intent` will be passed to + `post_get_intent_with_metadata`. + """ + return response, metadata + def pre_import_intents( self, request: intent.ImportIntentsRequest, @@ -226,12 +293,35 @@ def post_import_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_import_intents` interceptor runs + before the `post_import_intents_with_metadata` interceptor. """ return response + def post_import_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_import_intents_with_metadata` + interceptor in new development instead of the `post_import_intents` interceptor. + When both interceptors are used, this `post_import_intents_with_metadata` interceptor runs after the + `post_import_intents` interceptor. The (possibly modified) response returned by + `post_import_intents` will be passed to + `post_import_intents_with_metadata`. + """ + return response, metadata + def pre_list_intents( self, request: intent.ListIntentsRequest, @@ -249,12 +339,35 @@ def post_list_intents( ) -> intent.ListIntentsResponse: """Post-rpc interceptor for list_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_list_intents` interceptor runs + before the `post_list_intents_with_metadata` interceptor. """ return response + def post_list_intents_with_metadata( + self, + response: intent.ListIntentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[intent.ListIntentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_list_intents_with_metadata` + interceptor in new development instead of the `post_list_intents` interceptor. + When both interceptors are used, this `post_list_intents_with_metadata` interceptor runs after the + `post_list_intents` interceptor. The (possibly modified) response returned by + `post_list_intents` will be passed to + `post_list_intents_with_metadata`. + """ + return response, metadata + def pre_update_intent( self, request: gcdc_intent.UpdateIntentRequest, @@ -272,12 +385,35 @@ def pre_update_intent( def post_update_intent(self, response: gcdc_intent.Intent) -> gcdc_intent.Intent: """Post-rpc interceptor for update_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_update_intent` interceptor runs + before the `post_update_intent_with_metadata` interceptor. """ return response + def post_update_intent_with_metadata( + self, + response: gcdc_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_update_intent_with_metadata` + interceptor in new development instead of the `post_update_intent` interceptor. + When both interceptors are used, this `post_update_intent_with_metadata` interceptor runs after the + `post_update_intent` interceptor. The (possibly modified) response returned by + `post_update_intent` will be passed to + `post_update_intent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -681,6 +817,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -940,6 +1080,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1232,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1238,6 +1386,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1383,6 +1535,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1539,6 +1695,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py index b9e474afe879..2ea524f287ce 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -614,6 +616,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1490,16 +1519,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1545,16 +1578,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1655,16 +1692,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1710,16 +1751,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py index 9c21494d9e33..43965978d4b0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/pages/transports/rest.py @@ -129,12 +129,35 @@ def pre_create_page( def post_create_page(self, response: gcdc_page.Page) -> gcdc_page.Page: """Post-rpc interceptor for create_page - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_page_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_create_page` interceptor runs + before the `post_create_page_with_metadata` interceptor. """ return response + def post_create_page_with_metadata( + self, + response: gcdc_page.Page, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_page.Page, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_page + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_create_page_with_metadata` + interceptor in new development instead of the `post_create_page` interceptor. + When both interceptors are used, this `post_create_page_with_metadata` interceptor runs after the + `post_create_page` interceptor. The (possibly modified) response returned by + `post_create_page` will be passed to + `post_create_page_with_metadata`. + """ + return response, metadata + def pre_delete_page( self, request: page.DeletePageRequest, @@ -162,12 +185,33 @@ def pre_get_page( def post_get_page(self, response: page.Page) -> page.Page: """Post-rpc interceptor for get_page - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_page_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_get_page` interceptor runs + before the `post_get_page_with_metadata` interceptor. """ return response + def post_get_page_with_metadata( + self, response: page.Page, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[page.Page, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_page + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_get_page_with_metadata` + interceptor in new development instead of the `post_get_page` interceptor. + When both interceptors are used, this `post_get_page_with_metadata` interceptor runs after the + `post_get_page` interceptor. The (possibly modified) response returned by + `post_get_page` will be passed to + `post_get_page_with_metadata`. + """ + return response, metadata + def pre_list_pages( self, request: page.ListPagesRequest, @@ -185,12 +229,35 @@ def post_list_pages( ) -> page.ListPagesResponse: """Post-rpc interceptor for list_pages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_pages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_list_pages` interceptor runs + before the `post_list_pages_with_metadata` interceptor. """ return response + def post_list_pages_with_metadata( + self, + response: page.ListPagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[page.ListPagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_pages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_list_pages_with_metadata` + interceptor in new development instead of the `post_list_pages` interceptor. + When both interceptors are used, this `post_list_pages_with_metadata` interceptor runs after the + `post_list_pages` interceptor. The (possibly modified) response returned by + `post_list_pages` will be passed to + `post_list_pages_with_metadata`. + """ + return response, metadata + def pre_update_page( self, request: gcdc_page.UpdatePageRequest, @@ -206,12 +273,35 @@ def pre_update_page( def post_update_page(self, response: gcdc_page.Page) -> gcdc_page.Page: """Post-rpc interceptor for update_page - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_page_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_update_page` interceptor runs + before the `post_update_page_with_metadata` interceptor. """ return response + def post_update_page_with_metadata( + self, + response: gcdc_page.Page, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_page.Page, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_page + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_update_page_with_metadata` + interceptor in new development instead of the `post_update_page` interceptor. + When both interceptors are used, this `post_update_page_with_metadata` interceptor runs after the + `post_update_page` interceptor. The (possibly modified) response returned by + `post_update_page` will be passed to + `post_update_page_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -567,6 +657,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_page(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_page_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -834,6 +928,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_page(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_page_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -977,6 +1075,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_pages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_pages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1145,6 +1247,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_page(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_page_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py index 858e1e052fcf..07e7350094ba 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -537,6 +539,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1393,16 +1422,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1448,16 +1481,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1558,16 +1595,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1613,16 +1654,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py index ca33a51acaa4..902fc90d72cc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/security_settings_service/transports/rest.py @@ -136,12 +136,37 @@ def post_create_security_settings( ) -> gcdc_security_settings.SecuritySettings: """Post-rpc interceptor for create_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_create_security_settings` interceptor runs + before the `post_create_security_settings_with_metadata` interceptor. """ return response + def post_create_security_settings_with_metadata( + self, + response: gcdc_security_settings.SecuritySettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_security_settings.SecuritySettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_create_security_settings_with_metadata` + interceptor in new development instead of the `post_create_security_settings` interceptor. + When both interceptors are used, this `post_create_security_settings_with_metadata` interceptor runs after the + `post_create_security_settings` interceptor. The (possibly modified) response returned by + `post_create_security_settings` will be passed to + `post_create_security_settings_with_metadata`. + """ + return response, metadata + def pre_delete_security_settings( self, request: security_settings.DeleteSecuritySettingsRequest, @@ -177,12 +202,37 @@ def post_get_security_settings( ) -> security_settings.SecuritySettings: """Post-rpc interceptor for get_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_get_security_settings` interceptor runs + before the `post_get_security_settings_with_metadata` interceptor. """ return response + def post_get_security_settings_with_metadata( + self, + response: security_settings.SecuritySettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_settings.SecuritySettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_get_security_settings_with_metadata` + interceptor in new development instead of the `post_get_security_settings` interceptor. + When both interceptors are used, this `post_get_security_settings_with_metadata` interceptor runs after the + `post_get_security_settings` interceptor. The (possibly modified) response returned by + `post_get_security_settings` will be passed to + `post_get_security_settings_with_metadata`. + """ + return response, metadata + def pre_list_security_settings( self, request: security_settings.ListSecuritySettingsRequest, @@ -203,12 +253,38 @@ def post_list_security_settings( ) -> security_settings.ListSecuritySettingsResponse: """Post-rpc interceptor for list_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_list_security_settings` interceptor runs + before the `post_list_security_settings_with_metadata` interceptor. """ return response + def post_list_security_settings_with_metadata( + self, + response: security_settings.ListSecuritySettingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_settings.ListSecuritySettingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_list_security_settings_with_metadata` + interceptor in new development instead of the `post_list_security_settings` interceptor. + When both interceptors are used, this `post_list_security_settings_with_metadata` interceptor runs after the + `post_list_security_settings` interceptor. The (possibly modified) response returned by + `post_list_security_settings` will be passed to + `post_list_security_settings_with_metadata`. + """ + return response, metadata + def pre_update_security_settings( self, request: gcdc_security_settings.UpdateSecuritySettingsRequest, @@ -229,12 +305,37 @@ def post_update_security_settings( ) -> gcdc_security_settings.SecuritySettings: """Post-rpc interceptor for update_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_update_security_settings` interceptor runs + before the `post_update_security_settings_with_metadata` interceptor. """ return response + def post_update_security_settings_with_metadata( + self, + response: gcdc_security_settings.SecuritySettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_security_settings.SecuritySettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_update_security_settings_with_metadata` + interceptor in new development instead of the `post_update_security_settings` interceptor. + When both interceptors are used, this `post_update_security_settings_with_metadata` interceptor runs after the + `post_update_security_settings` interceptor. The (possibly modified) response returned by + `post_update_security_settings` will be passed to + `post_update_security_settings_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -578,6 +679,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -842,6 +947,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -990,6 +1099,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1148,6 +1261,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py index 5ccbaa543e80..03b0030ddf15 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1411,16 +1440,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1466,16 +1499,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1576,16 +1613,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1631,16 +1672,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py index 997aba73d225..802f744614e1 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/session_entity_types/transports/rest.py @@ -136,12 +136,38 @@ def post_create_session_entity_type( ) -> gcdc_session_entity_type.SessionEntityType: """Post-rpc interceptor for create_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_session_entity_type` interceptor runs + before the `post_create_session_entity_type_with_metadata` interceptor. """ return response + def post_create_session_entity_type_with_metadata( + self, + response: gcdc_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_session_entity_type_with_metadata` + interceptor in new development instead of the `post_create_session_entity_type` interceptor. + When both interceptors are used, this `post_create_session_entity_type_with_metadata` interceptor runs after the + `post_create_session_entity_type` interceptor. The (possibly modified) response returned by + `post_create_session_entity_type` will be passed to + `post_create_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_session_entity_type( self, request: session_entity_type.DeleteSessionEntityTypeRequest, @@ -177,12 +203,37 @@ def post_get_session_entity_type( ) -> session_entity_type.SessionEntityType: """Post-rpc interceptor for get_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_session_entity_type` interceptor runs + before the `post_get_session_entity_type_with_metadata` interceptor. """ return response + def post_get_session_entity_type_with_metadata( + self, + response: session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.SessionEntityType, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_session_entity_type_with_metadata` + interceptor in new development instead of the `post_get_session_entity_type` interceptor. + When both interceptors are used, this `post_get_session_entity_type_with_metadata` interceptor runs after the + `post_get_session_entity_type` interceptor. The (possibly modified) response returned by + `post_get_session_entity_type` will be passed to + `post_get_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_list_session_entity_types( self, request: session_entity_type.ListSessionEntityTypesRequest, @@ -203,12 +254,38 @@ def post_list_session_entity_types( ) -> session_entity_type.ListSessionEntityTypesResponse: """Post-rpc interceptor for list_session_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_session_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_session_entity_types` interceptor runs + before the `post_list_session_entity_types_with_metadata` interceptor. """ return response + def post_list_session_entity_types_with_metadata( + self, + response: session_entity_type.ListSessionEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.ListSessionEntityTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_session_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_session_entity_types_with_metadata` + interceptor in new development instead of the `post_list_session_entity_types` interceptor. + When both interceptors are used, this `post_list_session_entity_types_with_metadata` interceptor runs after the + `post_list_session_entity_types` interceptor. The (possibly modified) response returned by + `post_list_session_entity_types` will be passed to + `post_list_session_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_session_entity_type( self, request: gcdc_session_entity_type.UpdateSessionEntityTypeRequest, @@ -229,12 +306,38 @@ def post_update_session_entity_type( ) -> gcdc_session_entity_type.SessionEntityType: """Post-rpc interceptor for update_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_session_entity_type` interceptor runs + before the `post_update_session_entity_type_with_metadata` interceptor. """ return response + def post_update_session_entity_type_with_metadata( + self, + response: gcdc_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_session_entity_type_with_metadata` + interceptor in new development instead of the `post_update_session_entity_type` interceptor. + When both interceptors are used, this `post_update_session_entity_type_with_metadata` interceptor runs after the + `post_update_session_entity_type` interceptor. The (possibly modified) response returned by + `post_update_session_entity_type` will be passed to + `post_update_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -593,6 +696,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -871,6 +978,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1021,6 +1132,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_session_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_session_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1195,6 +1310,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py index 65e55dd9ddb0..fe8991d9ade5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -714,6 +716,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1581,16 +1610,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1636,16 +1669,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1746,16 +1783,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1801,16 +1842,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py index 55a45d745acd..7dfa722c9abd 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/sessions/transports/rest.py @@ -133,12 +133,35 @@ def post_detect_intent( ) -> session.DetectIntentResponse: """Post-rpc interceptor for detect_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detect_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_detect_intent` interceptor runs + before the `post_detect_intent_with_metadata` interceptor. """ return response + def post_detect_intent_with_metadata( + self, + response: session.DetectIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.DetectIntentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detect_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_detect_intent_with_metadata` + interceptor in new development instead of the `post_detect_intent` interceptor. + When both interceptors are used, this `post_detect_intent_with_metadata` interceptor runs after the + `post_detect_intent` interceptor. The (possibly modified) response returned by + `post_detect_intent` will be passed to + `post_detect_intent_with_metadata`. + """ + return response, metadata + def pre_fulfill_intent( self, request: session.FulfillIntentRequest, @@ -156,12 +179,35 @@ def post_fulfill_intent( ) -> session.FulfillIntentResponse: """Post-rpc interceptor for fulfill_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fulfill_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_fulfill_intent` interceptor runs + before the `post_fulfill_intent_with_metadata` interceptor. """ return response + def post_fulfill_intent_with_metadata( + self, + response: session.FulfillIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.FulfillIntentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for fulfill_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_fulfill_intent_with_metadata` + interceptor in new development instead of the `post_fulfill_intent` interceptor. + When both interceptors are used, this `post_fulfill_intent_with_metadata` interceptor runs after the + `post_fulfill_intent` interceptor. The (possibly modified) response returned by + `post_fulfill_intent` will be passed to + `post_fulfill_intent_with_metadata`. + """ + return response, metadata + def pre_match_intent( self, request: session.MatchIntentRequest, @@ -179,12 +225,35 @@ def post_match_intent( ) -> session.MatchIntentResponse: """Post-rpc interceptor for match_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_match_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_match_intent` interceptor runs + before the `post_match_intent_with_metadata` interceptor. """ return response + def post_match_intent_with_metadata( + self, + response: session.MatchIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.MatchIntentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for match_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_match_intent_with_metadata` + interceptor in new development instead of the `post_match_intent` interceptor. + When both interceptors are used, this `post_match_intent_with_metadata` interceptor runs after the + `post_match_intent` interceptor. The (possibly modified) response returned by + `post_match_intent` will be passed to + `post_match_intent_with_metadata`. + """ + return response, metadata + def pre_server_streaming_detect_intent( self, request: session.DetectIntentRequest, @@ -202,12 +271,37 @@ def post_server_streaming_detect_intent( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for server_streaming_detect_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_server_streaming_detect_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_server_streaming_detect_intent` interceptor runs + before the `post_server_streaming_detect_intent_with_metadata` interceptor. """ return response + def post_server_streaming_detect_intent_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for server_streaming_detect_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_server_streaming_detect_intent_with_metadata` + interceptor in new development instead of the `post_server_streaming_detect_intent` interceptor. + When both interceptors are used, this `post_server_streaming_detect_intent_with_metadata` interceptor runs after the + `post_server_streaming_detect_intent` interceptor. The (possibly modified) response returned by + `post_server_streaming_detect_intent` will be passed to + `post_server_streaming_detect_intent_with_metadata`. + """ + return response, metadata + def pre_submit_answer_feedback( self, request: session.SubmitAnswerFeedbackRequest, @@ -227,12 +321,35 @@ def post_submit_answer_feedback( ) -> session.AnswerFeedback: """Post-rpc interceptor for submit_answer_feedback - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_submit_answer_feedback_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_submit_answer_feedback` interceptor runs + before the `post_submit_answer_feedback_with_metadata` interceptor. """ return response + def post_submit_answer_feedback_with_metadata( + self, + response: session.AnswerFeedback, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.AnswerFeedback, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for submit_answer_feedback + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_submit_answer_feedback_with_metadata` + interceptor in new development instead of the `post_submit_answer_feedback` interceptor. + When both interceptors are used, this `post_submit_answer_feedback_with_metadata` interceptor runs after the + `post_submit_answer_feedback` interceptor. The (possibly modified) response returned by + `post_submit_answer_feedback` will be passed to + `post_submit_answer_feedback_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -573,6 +690,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detect_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detect_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -723,6 +844,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fulfill_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fulfill_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -871,6 +996,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_match_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_match_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1022,6 +1151,13 @@ def __call__( ) resp = self._interceptor.post_server_streaming_detect_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_server_streaming_detect_intent_with_metadata( + resp, response_metadata + ) return resp class _StreamingDetectIntent( @@ -1170,6 +1306,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_submit_answer_feedback(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_submit_answer_feedback_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py index 13ef1b5e4c82..aae1f53172b3 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -715,6 +717,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2271,16 +2300,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2326,16 +2359,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2436,16 +2473,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2491,16 +2532,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py index fee7ab3ff04f..36c9383f0bc3 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/test_cases/transports/rest.py @@ -203,12 +203,35 @@ def post_batch_run_test_cases( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_run_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_run_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_batch_run_test_cases` interceptor runs + before the `post_batch_run_test_cases_with_metadata` interceptor. """ return response + def post_batch_run_test_cases_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_run_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_batch_run_test_cases_with_metadata` + interceptor in new development instead of the `post_batch_run_test_cases` interceptor. + When both interceptors are used, this `post_batch_run_test_cases_with_metadata` interceptor runs after the + `post_batch_run_test_cases` interceptor. The (possibly modified) response returned by + `post_batch_run_test_cases` will be passed to + `post_batch_run_test_cases_with_metadata`. + """ + return response, metadata + def pre_calculate_coverage( self, request: test_case.CalculateCoverageRequest, @@ -228,12 +251,37 @@ def post_calculate_coverage( ) -> test_case.CalculateCoverageResponse: """Post-rpc interceptor for calculate_coverage - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_calculate_coverage_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_calculate_coverage` interceptor runs + before the `post_calculate_coverage_with_metadata` interceptor. """ return response + def post_calculate_coverage_with_metadata( + self, + response: test_case.CalculateCoverageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + test_case.CalculateCoverageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for calculate_coverage + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_calculate_coverage_with_metadata` + interceptor in new development instead of the `post_calculate_coverage` interceptor. + When both interceptors are used, this `post_calculate_coverage_with_metadata` interceptor runs after the + `post_calculate_coverage` interceptor. The (possibly modified) response returned by + `post_calculate_coverage` will be passed to + `post_calculate_coverage_with_metadata`. + """ + return response, metadata + def pre_create_test_case( self, request: gcdc_test_case.CreateTestCaseRequest, @@ -253,12 +301,35 @@ def post_create_test_case( ) -> gcdc_test_case.TestCase: """Post-rpc interceptor for create_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_create_test_case` interceptor runs + before the `post_create_test_case_with_metadata` interceptor. """ return response + def post_create_test_case_with_metadata( + self, + response: gcdc_test_case.TestCase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_test_case.TestCase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_create_test_case_with_metadata` + interceptor in new development instead of the `post_create_test_case` interceptor. + When both interceptors are used, this `post_create_test_case_with_metadata` interceptor runs after the + `post_create_test_case` interceptor. The (possibly modified) response returned by + `post_create_test_case` will be passed to + `post_create_test_case_with_metadata`. + """ + return response, metadata + def pre_export_test_cases( self, request: test_case.ExportTestCasesRequest, @@ -278,12 +349,35 @@ def post_export_test_cases( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_export_test_cases` interceptor runs + before the `post_export_test_cases_with_metadata` interceptor. """ return response + def post_export_test_cases_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_export_test_cases_with_metadata` + interceptor in new development instead of the `post_export_test_cases` interceptor. + When both interceptors are used, this `post_export_test_cases_with_metadata` interceptor runs after the + `post_export_test_cases` interceptor. The (possibly modified) response returned by + `post_export_test_cases` will be passed to + `post_export_test_cases_with_metadata`. + """ + return response, metadata + def pre_get_test_case( self, request: test_case.GetTestCaseRequest, @@ -299,12 +393,35 @@ def pre_get_test_case( def post_get_test_case(self, response: test_case.TestCase) -> test_case.TestCase: """Post-rpc interceptor for get_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_get_test_case` interceptor runs + before the `post_get_test_case_with_metadata` interceptor. """ return response + def post_get_test_case_with_metadata( + self, + response: test_case.TestCase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[test_case.TestCase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_get_test_case_with_metadata` + interceptor in new development instead of the `post_get_test_case` interceptor. + When both interceptors are used, this `post_get_test_case_with_metadata` interceptor runs after the + `post_get_test_case` interceptor. The (possibly modified) response returned by + `post_get_test_case` will be passed to + `post_get_test_case_with_metadata`. + """ + return response, metadata + def pre_get_test_case_result( self, request: test_case.GetTestCaseResultRequest, @@ -324,12 +441,35 @@ def post_get_test_case_result( ) -> test_case.TestCaseResult: """Post-rpc interceptor for get_test_case_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_test_case_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_get_test_case_result` interceptor runs + before the `post_get_test_case_result_with_metadata` interceptor. """ return response + def post_get_test_case_result_with_metadata( + self, + response: test_case.TestCaseResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[test_case.TestCaseResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_test_case_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_get_test_case_result_with_metadata` + interceptor in new development instead of the `post_get_test_case_result` interceptor. + When both interceptors are used, this `post_get_test_case_result_with_metadata` interceptor runs after the + `post_get_test_case_result` interceptor. The (possibly modified) response returned by + `post_get_test_case_result` will be passed to + `post_get_test_case_result_with_metadata`. + """ + return response, metadata + def pre_import_test_cases( self, request: test_case.ImportTestCasesRequest, @@ -349,12 +489,35 @@ def post_import_test_cases( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_import_test_cases` interceptor runs + before the `post_import_test_cases_with_metadata` interceptor. """ return response + def post_import_test_cases_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_import_test_cases_with_metadata` + interceptor in new development instead of the `post_import_test_cases` interceptor. + When both interceptors are used, this `post_import_test_cases_with_metadata` interceptor runs after the + `post_import_test_cases` interceptor. The (possibly modified) response returned by + `post_import_test_cases` will be passed to + `post_import_test_cases_with_metadata`. + """ + return response, metadata + def pre_list_test_case_results( self, request: test_case.ListTestCaseResultsRequest, @@ -374,12 +537,37 @@ def post_list_test_case_results( ) -> test_case.ListTestCaseResultsResponse: """Post-rpc interceptor for list_test_case_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_test_case_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_list_test_case_results` interceptor runs + before the `post_list_test_case_results_with_metadata` interceptor. """ return response + def post_list_test_case_results_with_metadata( + self, + response: test_case.ListTestCaseResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + test_case.ListTestCaseResultsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_test_case_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_list_test_case_results_with_metadata` + interceptor in new development instead of the `post_list_test_case_results` interceptor. + When both interceptors are used, this `post_list_test_case_results_with_metadata` interceptor runs after the + `post_list_test_case_results` interceptor. The (possibly modified) response returned by + `post_list_test_case_results` will be passed to + `post_list_test_case_results_with_metadata`. + """ + return response, metadata + def pre_list_test_cases( self, request: test_case.ListTestCasesRequest, @@ -397,12 +585,37 @@ def post_list_test_cases( ) -> test_case.ListTestCasesResponse: """Post-rpc interceptor for list_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_list_test_cases` interceptor runs + before the `post_list_test_cases_with_metadata` interceptor. """ return response + def post_list_test_cases_with_metadata( + self, + response: test_case.ListTestCasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + test_case.ListTestCasesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_list_test_cases_with_metadata` + interceptor in new development instead of the `post_list_test_cases` interceptor. + When both interceptors are used, this `post_list_test_cases_with_metadata` interceptor runs after the + `post_list_test_cases` interceptor. The (possibly modified) response returned by + `post_list_test_cases` will be passed to + `post_list_test_cases_with_metadata`. + """ + return response, metadata + def pre_run_test_case( self, request: test_case.RunTestCaseRequest, @@ -420,12 +633,35 @@ def post_run_test_case( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_run_test_case` interceptor runs + before the `post_run_test_case_with_metadata` interceptor. """ return response + def post_run_test_case_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_run_test_case_with_metadata` + interceptor in new development instead of the `post_run_test_case` interceptor. + When both interceptors are used, this `post_run_test_case_with_metadata` interceptor runs after the + `post_run_test_case` interceptor. The (possibly modified) response returned by + `post_run_test_case` will be passed to + `post_run_test_case_with_metadata`. + """ + return response, metadata + def pre_update_test_case( self, request: gcdc_test_case.UpdateTestCaseRequest, @@ -445,12 +681,35 @@ def post_update_test_case( ) -> gcdc_test_case.TestCase: """Post-rpc interceptor for update_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_update_test_case` interceptor runs + before the `post_update_test_case_with_metadata` interceptor. """ return response + def post_update_test_case_with_metadata( + self, + response: gcdc_test_case.TestCase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_test_case.TestCase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_update_test_case_with_metadata` + interceptor in new development instead of the `post_update_test_case` interceptor. + When both interceptors are used, this `post_update_test_case_with_metadata` interceptor runs after the + `post_update_test_case` interceptor. The (possibly modified) response returned by + `post_update_test_case` will be passed to + `post_update_test_case_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -964,6 +1223,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_run_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_run_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1109,6 +1372,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_calculate_coverage(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_calculate_coverage_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1266,6 +1533,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1420,6 +1691,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1563,6 +1838,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1708,6 +1987,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_test_case_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_test_case_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1862,6 +2145,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2007,6 +2294,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_test_case_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_test_case_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2156,6 +2447,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2306,6 +2601,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2461,6 +2760,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py index f9858b9c6ea2..32c15e505bcc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -598,6 +600,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1490,16 +1519,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1545,16 +1578,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1655,16 +1692,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1710,16 +1751,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py index 9c2ac7a61985..32a2ce98ef38 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/transition_route_groups/transports/rest.py @@ -136,12 +136,38 @@ def post_create_transition_route_group( ) -> gcdc_transition_route_group.TransitionRouteGroup: """Post-rpc interceptor for create_transition_route_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_transition_route_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_create_transition_route_group` interceptor runs + before the `post_create_transition_route_group_with_metadata` interceptor. """ return response + def post_create_transition_route_group_with_metadata( + self, + response: gcdc_transition_route_group.TransitionRouteGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_transition_route_group.TransitionRouteGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_transition_route_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_create_transition_route_group_with_metadata` + interceptor in new development instead of the `post_create_transition_route_group` interceptor. + When both interceptors are used, this `post_create_transition_route_group_with_metadata` interceptor runs after the + `post_create_transition_route_group` interceptor. The (possibly modified) response returned by + `post_create_transition_route_group` will be passed to + `post_create_transition_route_group_with_metadata`. + """ + return response, metadata + def pre_delete_transition_route_group( self, request: transition_route_group.DeleteTransitionRouteGroupRequest, @@ -177,12 +203,38 @@ def post_get_transition_route_group( ) -> transition_route_group.TransitionRouteGroup: """Post-rpc interceptor for get_transition_route_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transition_route_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_get_transition_route_group` interceptor runs + before the `post_get_transition_route_group_with_metadata` interceptor. """ return response + def post_get_transition_route_group_with_metadata( + self, + response: transition_route_group.TransitionRouteGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + transition_route_group.TransitionRouteGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_transition_route_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_get_transition_route_group_with_metadata` + interceptor in new development instead of the `post_get_transition_route_group` interceptor. + When both interceptors are used, this `post_get_transition_route_group_with_metadata` interceptor runs after the + `post_get_transition_route_group` interceptor. The (possibly modified) response returned by + `post_get_transition_route_group` will be passed to + `post_get_transition_route_group_with_metadata`. + """ + return response, metadata + def pre_list_transition_route_groups( self, request: transition_route_group.ListTransitionRouteGroupsRequest, @@ -203,12 +255,38 @@ def post_list_transition_route_groups( ) -> transition_route_group.ListTransitionRouteGroupsResponse: """Post-rpc interceptor for list_transition_route_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transition_route_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_transition_route_groups` interceptor runs + before the `post_list_transition_route_groups_with_metadata` interceptor. """ return response + def post_list_transition_route_groups_with_metadata( + self, + response: transition_route_group.ListTransitionRouteGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + transition_route_group.ListTransitionRouteGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_transition_route_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_list_transition_route_groups_with_metadata` + interceptor in new development instead of the `post_list_transition_route_groups` interceptor. + When both interceptors are used, this `post_list_transition_route_groups_with_metadata` interceptor runs after the + `post_list_transition_route_groups` interceptor. The (possibly modified) response returned by + `post_list_transition_route_groups` will be passed to + `post_list_transition_route_groups_with_metadata`. + """ + return response, metadata + def pre_update_transition_route_group( self, request: gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, @@ -229,12 +307,38 @@ def post_update_transition_route_group( ) -> gcdc_transition_route_group.TransitionRouteGroup: """Post-rpc interceptor for update_transition_route_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_transition_route_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_update_transition_route_group` interceptor runs + before the `post_update_transition_route_group_with_metadata` interceptor. """ return response + def post_update_transition_route_group_with_metadata( + self, + response: gcdc_transition_route_group.TransitionRouteGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_transition_route_group.TransitionRouteGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_transition_route_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_update_transition_route_group_with_metadata` + interceptor in new development instead of the `post_update_transition_route_group` interceptor. + When both interceptors are used, this `post_update_transition_route_group_with_metadata` interceptor runs after the + `post_update_transition_route_group` interceptor. The (possibly modified) response returned by + `post_update_transition_route_group` will be passed to + `post_update_transition_route_group_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -578,6 +682,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_transition_route_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_transition_route_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -842,6 +953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transition_route_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transition_route_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -991,6 +1106,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transition_route_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transition_route_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1148,6 +1267,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_transition_route_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_transition_route_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py index 547607403536..a6bd01605621 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -499,6 +501,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1589,16 +1618,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1644,16 +1677,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1754,16 +1791,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1809,16 +1850,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py index d07d31984b31..103589a93385 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/versions/transports/rest.py @@ -147,12 +147,37 @@ def post_compare_versions( ) -> version.CompareVersionsResponse: """Post-rpc interceptor for compare_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_compare_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_compare_versions` interceptor runs + before the `post_compare_versions_with_metadata` interceptor. """ return response + def post_compare_versions_with_metadata( + self, + response: version.CompareVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + version.CompareVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for compare_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_compare_versions_with_metadata` + interceptor in new development instead of the `post_compare_versions` interceptor. + When both interceptors are used, this `post_compare_versions_with_metadata` interceptor runs after the + `post_compare_versions` interceptor. The (possibly modified) response returned by + `post_compare_versions` will be passed to + `post_compare_versions_with_metadata`. + """ + return response, metadata + def pre_create_version( self, request: gcdc_version.CreateVersionRequest, @@ -172,12 +197,35 @@ def post_create_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_create_version` interceptor runs + before the `post_create_version_with_metadata` interceptor. """ return response + def post_create_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_create_version_with_metadata` + interceptor in new development instead of the `post_create_version` interceptor. + When both interceptors are used, this `post_create_version_with_metadata` interceptor runs after the + `post_create_version` interceptor. The (possibly modified) response returned by + `post_create_version` will be passed to + `post_create_version_with_metadata`. + """ + return response, metadata + def pre_delete_version( self, request: version.DeleteVersionRequest, @@ -205,12 +253,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: version.ListVersionsRequest, @@ -228,12 +299,35 @@ def post_list_versions( ) -> version.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: version.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_load_version( self, request: version.LoadVersionRequest, @@ -251,12 +345,35 @@ def post_load_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for load_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_load_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_load_version` interceptor runs + before the `post_load_version_with_metadata` interceptor. """ return response + def post_load_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for load_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_load_version_with_metadata` + interceptor in new development instead of the `post_load_version` interceptor. + When both interceptors are used, this `post_load_version_with_metadata` interceptor runs after the + `post_load_version` interceptor. The (possibly modified) response returned by + `post_load_version` will be passed to + `post_load_version_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: gcdc_version.UpdateVersionRequest, @@ -276,12 +393,35 @@ def post_update_version( ) -> gcdc_version.Version: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: gcdc_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -686,6 +826,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_compare_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_compare_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -838,6 +982,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1092,6 +1240,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1237,6 +1389,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1387,6 +1543,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_load_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_load_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1538,6 +1698,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py index 1ffa69a54c39..22158cd2cd7c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1338,16 +1367,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1393,16 +1426,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1503,16 +1540,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1558,16 +1599,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py index 68852d55abc2..8224225a2d83 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3/services/webhooks/transports/rest.py @@ -133,12 +133,35 @@ def post_create_webhook( ) -> gcdc_webhook.Webhook: """Post-rpc interceptor for create_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_create_webhook` interceptor runs + before the `post_create_webhook_with_metadata` interceptor. """ return response + def post_create_webhook_with_metadata( + self, + response: gcdc_webhook.Webhook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_webhook.Webhook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_create_webhook_with_metadata` + interceptor in new development instead of the `post_create_webhook` interceptor. + When both interceptors are used, this `post_create_webhook_with_metadata` interceptor runs after the + `post_create_webhook` interceptor. The (possibly modified) response returned by + `post_create_webhook` will be passed to + `post_create_webhook_with_metadata`. + """ + return response, metadata + def pre_delete_webhook( self, request: webhook.DeleteWebhookRequest, @@ -166,12 +189,35 @@ def pre_get_webhook( def post_get_webhook(self, response: webhook.Webhook) -> webhook.Webhook: """Post-rpc interceptor for get_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_get_webhook` interceptor runs + before the `post_get_webhook_with_metadata` interceptor. """ return response + def post_get_webhook_with_metadata( + self, + response: webhook.Webhook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[webhook.Webhook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_get_webhook_with_metadata` + interceptor in new development instead of the `post_get_webhook` interceptor. + When both interceptors are used, this `post_get_webhook_with_metadata` interceptor runs after the + `post_get_webhook` interceptor. The (possibly modified) response returned by + `post_get_webhook` will be passed to + `post_get_webhook_with_metadata`. + """ + return response, metadata + def pre_list_webhooks( self, request: webhook.ListWebhooksRequest, @@ -189,12 +235,35 @@ def post_list_webhooks( ) -> webhook.ListWebhooksResponse: """Post-rpc interceptor for list_webhooks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_webhooks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_list_webhooks` interceptor runs + before the `post_list_webhooks_with_metadata` interceptor. """ return response + def post_list_webhooks_with_metadata( + self, + response: webhook.ListWebhooksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[webhook.ListWebhooksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_webhooks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_list_webhooks_with_metadata` + interceptor in new development instead of the `post_list_webhooks` interceptor. + When both interceptors are used, this `post_list_webhooks_with_metadata` interceptor runs after the + `post_list_webhooks` interceptor. The (possibly modified) response returned by + `post_list_webhooks` will be passed to + `post_list_webhooks_with_metadata`. + """ + return response, metadata + def pre_update_webhook( self, request: gcdc_webhook.UpdateWebhookRequest, @@ -214,12 +283,35 @@ def post_update_webhook( ) -> gcdc_webhook.Webhook: """Post-rpc interceptor for update_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_update_webhook` interceptor runs + before the `post_update_webhook_with_metadata` interceptor. """ return response + def post_update_webhook_with_metadata( + self, + response: gcdc_webhook.Webhook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_webhook.Webhook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_update_webhook_with_metadata` + interceptor in new development instead of the `post_update_webhook` interceptor. + When both interceptors are used, this `post_update_webhook_with_metadata` interceptor runs after the + `post_update_webhook` interceptor. The (possibly modified) response returned by + `post_update_webhook` will be passed to + `post_update_webhook_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -566,6 +658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -827,6 +923,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -972,6 +1072,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_webhooks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_webhooks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1130,6 +1234,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py index e553ae451f41..e589b34795e6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.38.0" # {x-release-please-version} +__version__ = "1.39.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py index b5c6e3bd3e6b..2e8bcf836510 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -684,6 +686,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2205,16 +2234,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2260,16 +2293,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2370,16 +2407,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2425,16 +2466,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/transports/rest.py index 42eba78e0bc9..567244067d7c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/agents/transports/rest.py @@ -181,12 +181,35 @@ def pre_create_agent( def post_create_agent(self, response: gcdc_agent.Agent) -> gcdc_agent.Agent: """Post-rpc interceptor for create_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_create_agent` interceptor runs + before the `post_create_agent_with_metadata` interceptor. """ return response + def post_create_agent_with_metadata( + self, + response: gcdc_agent.Agent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_create_agent_with_metadata` + interceptor in new development instead of the `post_create_agent` interceptor. + When both interceptors are used, this `post_create_agent_with_metadata` interceptor runs after the + `post_create_agent` interceptor. The (possibly modified) response returned by + `post_create_agent` will be passed to + `post_create_agent_with_metadata`. + """ + return response, metadata + def pre_delete_agent( self, request: agent.DeleteAgentRequest, @@ -216,12 +239,35 @@ def post_export_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_export_agent` interceptor runs + before the `post_export_agent_with_metadata` interceptor. """ return response + def post_export_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_export_agent_with_metadata` + interceptor in new development instead of the `post_export_agent` interceptor. + When both interceptors are used, this `post_export_agent_with_metadata` interceptor runs after the + `post_export_agent` interceptor. The (possibly modified) response returned by + `post_export_agent` will be passed to + `post_export_agent_with_metadata`. + """ + return response, metadata + def pre_get_agent( self, request: agent.GetAgentRequest, @@ -237,12 +283,33 @@ def pre_get_agent( def post_get_agent(self, response: agent.Agent) -> agent.Agent: """Post-rpc interceptor for get_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_agent` interceptor runs + before the `post_get_agent_with_metadata` interceptor. """ return response + def post_get_agent_with_metadata( + self, response: agent.Agent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_agent_with_metadata` + interceptor in new development instead of the `post_get_agent` interceptor. + When both interceptors are used, this `post_get_agent_with_metadata` interceptor runs after the + `post_get_agent` interceptor. The (possibly modified) response returned by + `post_get_agent` will be passed to + `post_get_agent_with_metadata`. + """ + return response, metadata + def pre_get_agent_validation_result( self, request: agent.GetAgentValidationResultRequest, @@ -262,12 +329,35 @@ def post_get_agent_validation_result( ) -> agent.AgentValidationResult: """Post-rpc interceptor for get_agent_validation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_agent_validation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_agent_validation_result` interceptor runs + before the `post_get_agent_validation_result_with_metadata` interceptor. """ return response + def post_get_agent_validation_result_with_metadata( + self, + response: agent.AgentValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.AgentValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_agent_validation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_agent_validation_result_with_metadata` + interceptor in new development instead of the `post_get_agent_validation_result` interceptor. + When both interceptors are used, this `post_get_agent_validation_result_with_metadata` interceptor runs after the + `post_get_agent_validation_result` interceptor. The (possibly modified) response returned by + `post_get_agent_validation_result` will be passed to + `post_get_agent_validation_result_with_metadata`. + """ + return response, metadata + def pre_get_generative_settings( self, request: agent.GetGenerativeSettingsRequest, @@ -287,12 +377,37 @@ def post_get_generative_settings( ) -> generative_settings.GenerativeSettings: """Post-rpc interceptor for get_generative_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_generative_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_generative_settings` interceptor runs + before the `post_get_generative_settings_with_metadata` interceptor. """ return response + def post_get_generative_settings_with_metadata( + self, + response: generative_settings.GenerativeSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generative_settings.GenerativeSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_generative_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_generative_settings_with_metadata` + interceptor in new development instead of the `post_get_generative_settings` interceptor. + When both interceptors are used, this `post_get_generative_settings_with_metadata` interceptor runs after the + `post_get_generative_settings` interceptor. The (possibly modified) response returned by + `post_get_generative_settings` will be passed to + `post_get_generative_settings_with_metadata`. + """ + return response, metadata + def pre_list_agents( self, request: agent.ListAgentsRequest, @@ -310,12 +425,35 @@ def post_list_agents( ) -> agent.ListAgentsResponse: """Post-rpc interceptor for list_agents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_agents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_list_agents` interceptor runs + before the `post_list_agents_with_metadata` interceptor. """ return response + def post_list_agents_with_metadata( + self, + response: agent.ListAgentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.ListAgentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_agents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_list_agents_with_metadata` + interceptor in new development instead of the `post_list_agents` interceptor. + When both interceptors are used, this `post_list_agents_with_metadata` interceptor runs after the + `post_list_agents` interceptor. The (possibly modified) response returned by + `post_list_agents` will be passed to + `post_list_agents_with_metadata`. + """ + return response, metadata + def pre_restore_agent( self, request: agent.RestoreAgentRequest, @@ -333,12 +471,35 @@ def post_restore_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_restore_agent` interceptor runs + before the `post_restore_agent_with_metadata` interceptor. """ return response + def post_restore_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_restore_agent_with_metadata` + interceptor in new development instead of the `post_restore_agent` interceptor. + When both interceptors are used, this `post_restore_agent_with_metadata` interceptor runs after the + `post_restore_agent` interceptor. The (possibly modified) response returned by + `post_restore_agent` will be passed to + `post_restore_agent_with_metadata`. + """ + return response, metadata + def pre_update_agent( self, request: gcdc_agent.UpdateAgentRequest, @@ -354,12 +515,35 @@ def pre_update_agent( def post_update_agent(self, response: gcdc_agent.Agent) -> gcdc_agent.Agent: """Post-rpc interceptor for update_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_update_agent` interceptor runs + before the `post_update_agent_with_metadata` interceptor. """ return response + def post_update_agent_with_metadata( + self, + response: gcdc_agent.Agent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_update_agent_with_metadata` + interceptor in new development instead of the `post_update_agent` interceptor. + When both interceptors are used, this `post_update_agent_with_metadata` interceptor runs after the + `post_update_agent` interceptor. The (possibly modified) response returned by + `post_update_agent` will be passed to + `post_update_agent_with_metadata`. + """ + return response, metadata + def pre_update_generative_settings( self, request: agent.UpdateGenerativeSettingsRequest, @@ -379,12 +563,38 @@ def post_update_generative_settings( ) -> gcdc_generative_settings.GenerativeSettings: """Post-rpc interceptor for update_generative_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_generative_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_update_generative_settings` interceptor runs + before the `post_update_generative_settings_with_metadata` interceptor. """ return response + def post_update_generative_settings_with_metadata( + self, + response: gcdc_generative_settings.GenerativeSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_generative_settings.GenerativeSettings, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_generative_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_update_generative_settings_with_metadata` + interceptor in new development instead of the `post_update_generative_settings` interceptor. + When both interceptors are used, this `post_update_generative_settings_with_metadata` interceptor runs after the + `post_update_generative_settings` interceptor. The (possibly modified) response returned by + `post_update_generative_settings` will be passed to + `post_update_generative_settings_with_metadata`. + """ + return response, metadata + def pre_validate_agent( self, request: agent.ValidateAgentRequest, @@ -402,12 +612,35 @@ def post_validate_agent( ) -> agent.AgentValidationResult: """Post-rpc interceptor for validate_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_validate_agent` interceptor runs + before the `post_validate_agent_with_metadata` interceptor. """ return response + def post_validate_agent_with_metadata( + self, + response: agent.AgentValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.AgentValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_validate_agent_with_metadata` + interceptor in new development instead of the `post_validate_agent` interceptor. + When both interceptors are used, this `post_validate_agent_with_metadata` interceptor runs after the + `post_validate_agent` interceptor. The (possibly modified) response returned by + `post_validate_agent` will be passed to + `post_validate_agent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -817,6 +1050,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1072,6 +1309,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1228,6 +1469,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1374,6 +1619,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_agent_validation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_agent_validation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1518,6 +1767,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_generative_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_generative_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1663,6 +1916,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_agents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_agents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1813,6 +2070,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1975,6 +2236,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2126,6 +2391,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_generative_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_generative_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2279,6 +2548,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/client.py index 17124cb8bb42..0fcd2e267b76 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -490,6 +492,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -965,16 +994,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1020,16 +1053,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1130,16 +1167,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1185,16 +1226,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/transports/rest.py index 70684498a666..69888a78074b 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/changelogs/transports/rest.py @@ -107,12 +107,35 @@ def pre_get_changelog( def post_get_changelog(self, response: changelog.Changelog) -> changelog.Changelog: """Post-rpc interceptor for get_changelog - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_changelog_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Changelogs server but before - it is returned to user code. + it is returned to user code. This `post_get_changelog` interceptor runs + before the `post_get_changelog_with_metadata` interceptor. """ return response + def post_get_changelog_with_metadata( + self, + response: changelog.Changelog, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[changelog.Changelog, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_changelog + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Changelogs server but before it is returned to user code. + + We recommend only using this `post_get_changelog_with_metadata` + interceptor in new development instead of the `post_get_changelog` interceptor. + When both interceptors are used, this `post_get_changelog_with_metadata` interceptor runs after the + `post_get_changelog` interceptor. The (possibly modified) response returned by + `post_get_changelog` will be passed to + `post_get_changelog_with_metadata`. + """ + return response, metadata + def pre_list_changelogs( self, request: changelog.ListChangelogsRequest, @@ -132,12 +155,37 @@ def post_list_changelogs( ) -> changelog.ListChangelogsResponse: """Post-rpc interceptor for list_changelogs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_changelogs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Changelogs server but before - it is returned to user code. + it is returned to user code. This `post_list_changelogs` interceptor runs + before the `post_list_changelogs_with_metadata` interceptor. """ return response + def post_list_changelogs_with_metadata( + self, + response: changelog.ListChangelogsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + changelog.ListChangelogsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_changelogs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Changelogs server but before it is returned to user code. + + We recommend only using this `post_list_changelogs_with_metadata` + interceptor in new development instead of the `post_list_changelogs` interceptor. + When both interceptors are used, this `post_list_changelogs_with_metadata` interceptor runs after the + `post_list_changelogs` interceptor. The (possibly modified) response returned by + `post_list_changelogs` will be passed to + `post_list_changelogs_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -473,6 +521,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_changelog(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_changelog_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -618,6 +670,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_changelogs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_changelogs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/client.py index c65885bf7b65..61b953edc57f 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -889,6 +891,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1470,16 +1499,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1525,16 +1558,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1635,16 +1672,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1690,16 +1731,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/transports/rest.py index 5f75ba65b14d..b6ad58506aba 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/conversation_history/transports/rest.py @@ -132,12 +132,37 @@ def post_get_conversation( ) -> conversation_history.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationHistory server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: conversation_history.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_history.Conversation, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationHistory server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: conversation_history.ListConversationsRequest, @@ -158,12 +183,38 @@ def post_list_conversations( ) -> conversation_history.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationHistory server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: conversation_history.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_history.ListConversationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationHistory server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -607,6 +658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -757,6 +812,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/client.py index 8f41ed229e35..4c12856ebb85 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -570,6 +572,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1056,16 +1085,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1111,16 +1144,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1221,16 +1258,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1276,16 +1317,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/transports/rest.py index 61cdb184c537..ed714a27b598 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/deployments/transports/rest.py @@ -111,12 +111,35 @@ def post_get_deployment( ) -> deployment.Deployment: """Post-rpc interceptor for get_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Deployments server but before - it is returned to user code. + it is returned to user code. This `post_get_deployment` interceptor runs + before the `post_get_deployment_with_metadata` interceptor. """ return response + def post_get_deployment_with_metadata( + self, + response: deployment.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[deployment.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Deployments server but before it is returned to user code. + + We recommend only using this `post_get_deployment_with_metadata` + interceptor in new development instead of the `post_get_deployment` interceptor. + When both interceptors are used, this `post_get_deployment_with_metadata` interceptor runs after the + `post_get_deployment` interceptor. The (possibly modified) response returned by + `post_get_deployment` will be passed to + `post_get_deployment_with_metadata`. + """ + return response, metadata + def pre_list_deployments( self, request: deployment.ListDeploymentsRequest, @@ -136,12 +159,37 @@ def post_list_deployments( ) -> deployment.ListDeploymentsResponse: """Post-rpc interceptor for list_deployments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Deployments server but before - it is returned to user code. + it is returned to user code. This `post_list_deployments` interceptor runs + before the `post_list_deployments_with_metadata` interceptor. """ return response + def post_list_deployments_with_metadata( + self, + response: deployment.ListDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + deployment.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Deployments server but before it is returned to user code. + + We recommend only using this `post_list_deployments_with_metadata` + interceptor in new development instead of the `post_list_deployments` interceptor. + When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the + `post_list_deployments` interceptor. The (possibly modified) response returned by + `post_list_deployments` will be passed to + `post_list_deployments_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -480,6 +528,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -625,6 +677,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deployments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/client.py index e9067958c2f1..fc6f10ac8954 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1604,16 +1633,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1659,16 +1692,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1769,16 +1806,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1824,16 +1865,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/transports/rest.py index ee2ebb79bf6d..a6919dbce3bf 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/entity_types/transports/rest.py @@ -150,12 +150,35 @@ def post_create_entity_type( ) -> gcdc_entity_type.EntityType: """Post-rpc interceptor for create_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_entity_type` interceptor runs + before the `post_create_entity_type_with_metadata` interceptor. """ return response + def post_create_entity_type_with_metadata( + self, + response: gcdc_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_entity_type_with_metadata` + interceptor in new development instead of the `post_create_entity_type` interceptor. + When both interceptors are used, this `post_create_entity_type_with_metadata` interceptor runs after the + `post_create_entity_type` interceptor. The (possibly modified) response returned by + `post_create_entity_type` will be passed to + `post_create_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_entity_type( self, request: entity_type.DeleteEntityTypeRequest, @@ -189,12 +212,35 @@ def post_export_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_export_entity_types` interceptor runs + before the `post_export_entity_types_with_metadata` interceptor. """ return response + def post_export_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_export_entity_types_with_metadata` + interceptor in new development instead of the `post_export_entity_types` interceptor. + When both interceptors are used, this `post_export_entity_types_with_metadata` interceptor runs after the + `post_export_entity_types` interceptor. The (possibly modified) response returned by + `post_export_entity_types` will be passed to + `post_export_entity_types_with_metadata`. + """ + return response, metadata + def pre_get_entity_type( self, request: entity_type.GetEntityTypeRequest, @@ -214,12 +260,35 @@ def post_get_entity_type( ) -> entity_type.EntityType: """Post-rpc interceptor for get_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_entity_type` interceptor runs + before the `post_get_entity_type_with_metadata` interceptor. """ return response + def post_get_entity_type_with_metadata( + self, + response: entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_entity_type_with_metadata` + interceptor in new development instead of the `post_get_entity_type` interceptor. + When both interceptors are used, this `post_get_entity_type_with_metadata` interceptor runs after the + `post_get_entity_type` interceptor. The (possibly modified) response returned by + `post_get_entity_type` will be passed to + `post_get_entity_type_with_metadata`. + """ + return response, metadata + def pre_import_entity_types( self, request: entity_type.ImportEntityTypesRequest, @@ -239,12 +308,35 @@ def post_import_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_import_entity_types` interceptor runs + before the `post_import_entity_types_with_metadata` interceptor. """ return response + def post_import_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_import_entity_types_with_metadata` + interceptor in new development instead of the `post_import_entity_types` interceptor. + When both interceptors are used, this `post_import_entity_types_with_metadata` interceptor runs after the + `post_import_entity_types` interceptor. The (possibly modified) response returned by + `post_import_entity_types` will be passed to + `post_import_entity_types_with_metadata`. + """ + return response, metadata + def pre_list_entity_types( self, request: entity_type.ListEntityTypesRequest, @@ -264,12 +356,37 @@ def post_list_entity_types( ) -> entity_type.ListEntityTypesResponse: """Post-rpc interceptor for list_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_entity_types` interceptor runs + before the `post_list_entity_types_with_metadata` interceptor. """ return response + def post_list_entity_types_with_metadata( + self, + response: entity_type.ListEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_type.ListEntityTypesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_entity_types_with_metadata` + interceptor in new development instead of the `post_list_entity_types` interceptor. + When both interceptors are used, this `post_list_entity_types_with_metadata` interceptor runs after the + `post_list_entity_types` interceptor. The (possibly modified) response returned by + `post_list_entity_types` will be passed to + `post_list_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_entity_type( self, request: gcdc_entity_type.UpdateEntityTypeRequest, @@ -290,12 +407,35 @@ def post_update_entity_type( ) -> gcdc_entity_type.EntityType: """Post-rpc interceptor for update_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_entity_type` interceptor runs + before the `post_update_entity_type_with_metadata` interceptor. """ return response + def post_update_entity_type_with_metadata( + self, + response: gcdc_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_entity_type_with_metadata` + interceptor in new development instead of the `post_update_entity_type` interceptor. + When both interceptors are used, this `post_update_entity_type_with_metadata` interceptor runs after the + `post_update_entity_type` interceptor. The (possibly modified) response returned by + `post_update_entity_type` will be passed to + `post_update_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -727,6 +867,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -986,6 +1130,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1164,6 +1312,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1314,6 +1466,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1459,6 +1615,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1645,6 +1805,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/client.py index cf0e4c147ce6..f85ca79fdd64 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -645,6 +647,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2021,16 +2050,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2076,16 +2109,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2186,16 +2223,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2241,16 +2282,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/transports/rest.py index 4f794357c8d3..6163818d34d6 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/environments/transports/rest.py @@ -166,12 +166,35 @@ def post_create_environment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_create_environment` interceptor runs + before the `post_create_environment_with_metadata` interceptor. """ return response + def post_create_environment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_create_environment_with_metadata` + interceptor in new development instead of the `post_create_environment` interceptor. + When both interceptors are used, this `post_create_environment_with_metadata` interceptor runs after the + `post_create_environment` interceptor. The (possibly modified) response returned by + `post_create_environment` will be passed to + `post_create_environment_with_metadata`. + """ + return response, metadata + def pre_delete_environment( self, request: environment.DeleteEnvironmentRequest, @@ -203,12 +226,35 @@ def post_deploy_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_deploy_flow` interceptor runs + before the `post_deploy_flow_with_metadata` interceptor. """ return response + def post_deploy_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_deploy_flow_with_metadata` + interceptor in new development instead of the `post_deploy_flow` interceptor. + When both interceptors are used, this `post_deploy_flow_with_metadata` interceptor runs after the + `post_deploy_flow` interceptor. The (possibly modified) response returned by + `post_deploy_flow` will be passed to + `post_deploy_flow_with_metadata`. + """ + return response, metadata + def pre_get_environment( self, request: environment.GetEnvironmentRequest, @@ -228,12 +274,35 @@ def post_get_environment( ) -> environment.Environment: """Post-rpc interceptor for get_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_get_environment` interceptor runs + before the `post_get_environment_with_metadata` interceptor. """ return response + def post_get_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_get_environment_with_metadata` + interceptor in new development instead of the `post_get_environment` interceptor. + When both interceptors are used, this `post_get_environment_with_metadata` interceptor runs after the + `post_get_environment` interceptor. The (possibly modified) response returned by + `post_get_environment` will be passed to + `post_get_environment_with_metadata`. + """ + return response, metadata + def pre_list_continuous_test_results( self, request: environment.ListContinuousTestResultsRequest, @@ -254,12 +323,38 @@ def post_list_continuous_test_results( ) -> environment.ListContinuousTestResultsResponse: """Post-rpc interceptor for list_continuous_test_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_continuous_test_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_list_continuous_test_results` interceptor runs + before the `post_list_continuous_test_results_with_metadata` interceptor. """ return response + def post_list_continuous_test_results_with_metadata( + self, + response: environment.ListContinuousTestResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.ListContinuousTestResultsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_continuous_test_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_list_continuous_test_results_with_metadata` + interceptor in new development instead of the `post_list_continuous_test_results` interceptor. + When both interceptors are used, this `post_list_continuous_test_results_with_metadata` interceptor runs after the + `post_list_continuous_test_results` interceptor. The (possibly modified) response returned by + `post_list_continuous_test_results` will be passed to + `post_list_continuous_test_results_with_metadata`. + """ + return response, metadata + def pre_list_environments( self, request: environment.ListEnvironmentsRequest, @@ -279,12 +374,37 @@ def post_list_environments( ) -> environment.ListEnvironmentsResponse: """Post-rpc interceptor for list_environments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_environments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_list_environments` interceptor runs + before the `post_list_environments_with_metadata` interceptor. """ return response + def post_list_environments_with_metadata( + self, + response: environment.ListEnvironmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.ListEnvironmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_environments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_list_environments_with_metadata` + interceptor in new development instead of the `post_list_environments` interceptor. + When both interceptors are used, this `post_list_environments_with_metadata` interceptor runs after the + `post_list_environments` interceptor. The (possibly modified) response returned by + `post_list_environments` will be passed to + `post_list_environments_with_metadata`. + """ + return response, metadata + def pre_lookup_environment_history( self, request: environment.LookupEnvironmentHistoryRequest, @@ -305,12 +425,38 @@ def post_lookup_environment_history( ) -> environment.LookupEnvironmentHistoryResponse: """Post-rpc interceptor for lookup_environment_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_environment_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_lookup_environment_history` interceptor runs + before the `post_lookup_environment_history_with_metadata` interceptor. """ return response + def post_lookup_environment_history_with_metadata( + self, + response: environment.LookupEnvironmentHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.LookupEnvironmentHistoryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for lookup_environment_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_lookup_environment_history_with_metadata` + interceptor in new development instead of the `post_lookup_environment_history` interceptor. + When both interceptors are used, this `post_lookup_environment_history_with_metadata` interceptor runs after the + `post_lookup_environment_history` interceptor. The (possibly modified) response returned by + `post_lookup_environment_history` will be passed to + `post_lookup_environment_history_with_metadata`. + """ + return response, metadata + def pre_run_continuous_test( self, request: environment.RunContinuousTestRequest, @@ -330,12 +476,35 @@ def post_run_continuous_test( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_continuous_test - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_continuous_test_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_run_continuous_test` interceptor runs + before the `post_run_continuous_test_with_metadata` interceptor. """ return response + def post_run_continuous_test_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_continuous_test + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_run_continuous_test_with_metadata` + interceptor in new development instead of the `post_run_continuous_test` interceptor. + When both interceptors are used, this `post_run_continuous_test_with_metadata` interceptor runs after the + `post_run_continuous_test` interceptor. The (possibly modified) response returned by + `post_run_continuous_test` will be passed to + `post_run_continuous_test_with_metadata`. + """ + return response, metadata + def pre_update_environment( self, request: gcdc_environment.UpdateEnvironmentRequest, @@ -356,12 +525,35 @@ def post_update_environment( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_update_environment` interceptor runs + before the `post_update_environment_with_metadata` interceptor. """ return response + def post_update_environment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_update_environment_with_metadata` + interceptor in new development instead of the `post_update_environment` interceptor. + When both interceptors are used, this `post_update_environment_with_metadata` interceptor runs after the + `post_update_environment` interceptor. The (possibly modified) response returned by + `post_update_environment` will be passed to + `post_update_environment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -759,6 +951,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1022,6 +1218,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1178,6 +1378,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1327,6 +1531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_continuous_test_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_continuous_test_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1474,6 +1682,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_environments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_environments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1625,6 +1837,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_environment_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_environment_history_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1777,6 +1993,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_continuous_test(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_continuous_test_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1927,6 +2147,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/client.py index 673ec91b0158..ab8c35624543 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -566,6 +568,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1387,16 +1416,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1442,16 +1475,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1552,16 +1589,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1607,16 +1648,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/transports/rest.py index 53a1d578fd0a..3b161503e716 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/examples/transports/rest.py @@ -133,12 +133,35 @@ def post_create_example( ) -> gcdc_example.Example: """Post-rpc interceptor for create_example - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_example_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Examples server but before - it is returned to user code. + it is returned to user code. This `post_create_example` interceptor runs + before the `post_create_example_with_metadata` interceptor. """ return response + def post_create_example_with_metadata( + self, + response: gcdc_example.Example, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_example.Example, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_example + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Examples server but before it is returned to user code. + + We recommend only using this `post_create_example_with_metadata` + interceptor in new development instead of the `post_create_example` interceptor. + When both interceptors are used, this `post_create_example_with_metadata` interceptor runs after the + `post_create_example` interceptor. The (possibly modified) response returned by + `post_create_example` will be passed to + `post_create_example_with_metadata`. + """ + return response, metadata + def pre_delete_example( self, request: example.DeleteExampleRequest, @@ -166,12 +189,35 @@ def pre_get_example( def post_get_example(self, response: example.Example) -> example.Example: """Post-rpc interceptor for get_example - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_example_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Examples server but before - it is returned to user code. + it is returned to user code. This `post_get_example` interceptor runs + before the `post_get_example_with_metadata` interceptor. """ return response + def post_get_example_with_metadata( + self, + response: example.Example, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[example.Example, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_example + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Examples server but before it is returned to user code. + + We recommend only using this `post_get_example_with_metadata` + interceptor in new development instead of the `post_get_example` interceptor. + When both interceptors are used, this `post_get_example_with_metadata` interceptor runs after the + `post_get_example` interceptor. The (possibly modified) response returned by + `post_get_example` will be passed to + `post_get_example_with_metadata`. + """ + return response, metadata + def pre_list_examples( self, request: example.ListExamplesRequest, @@ -189,12 +235,35 @@ def post_list_examples( ) -> example.ListExamplesResponse: """Post-rpc interceptor for list_examples - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_examples_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Examples server but before - it is returned to user code. + it is returned to user code. This `post_list_examples` interceptor runs + before the `post_list_examples_with_metadata` interceptor. """ return response + def post_list_examples_with_metadata( + self, + response: example.ListExamplesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[example.ListExamplesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_examples + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Examples server but before it is returned to user code. + + We recommend only using this `post_list_examples_with_metadata` + interceptor in new development instead of the `post_list_examples` interceptor. + When both interceptors are used, this `post_list_examples_with_metadata` interceptor runs after the + `post_list_examples` interceptor. The (possibly modified) response returned by + `post_list_examples` will be passed to + `post_list_examples_with_metadata`. + """ + return response, metadata + def pre_update_example( self, request: gcdc_example.UpdateExampleRequest, @@ -214,12 +283,35 @@ def post_update_example( ) -> gcdc_example.Example: """Post-rpc interceptor for update_example - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_example_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Examples server but before - it is returned to user code. + it is returned to user code. This `post_update_example` interceptor runs + before the `post_update_example_with_metadata` interceptor. """ return response + def post_update_example_with_metadata( + self, + response: gcdc_example.Example, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_example.Example, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_example + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Examples server but before it is returned to user code. + + We recommend only using this `post_update_example_with_metadata` + interceptor in new development instead of the `post_update_example` interceptor. + When both interceptors are used, this `post_update_example_with_metadata` interceptor runs after the + `post_update_example` interceptor. The (possibly modified) response returned by + `post_update_example` will be passed to + `post_update_example_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -565,6 +657,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_example(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_example_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -825,6 +921,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_example(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_example_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -970,6 +1070,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_examples(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_examples_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1127,6 +1231,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_example(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_example_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/client.py index ac941d1bc57a..21ad602745d4 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -521,6 +523,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1559,16 +1588,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1614,16 +1647,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1724,16 +1761,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1779,16 +1820,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/transports/rest.py index ddad4233a266..5617fc9ee8b5 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/experiments/transports/rest.py @@ -149,12 +149,35 @@ def post_create_experiment( ) -> gcdc_experiment.Experiment: """Post-rpc interceptor for create_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_create_experiment` interceptor runs + before the `post_create_experiment_with_metadata` interceptor. """ return response + def post_create_experiment_with_metadata( + self, + response: gcdc_experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_create_experiment_with_metadata` + interceptor in new development instead of the `post_create_experiment` interceptor. + When both interceptors are used, this `post_create_experiment_with_metadata` interceptor runs after the + `post_create_experiment` interceptor. The (possibly modified) response returned by + `post_create_experiment` will be passed to + `post_create_experiment_with_metadata`. + """ + return response, metadata + def pre_delete_experiment( self, request: experiment.DeleteExperimentRequest, @@ -188,12 +211,35 @@ def post_get_experiment( ) -> experiment.Experiment: """Post-rpc interceptor for get_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_get_experiment` interceptor runs + before the `post_get_experiment_with_metadata` interceptor. """ return response + def post_get_experiment_with_metadata( + self, + response: experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_get_experiment_with_metadata` + interceptor in new development instead of the `post_get_experiment` interceptor. + When both interceptors are used, this `post_get_experiment_with_metadata` interceptor runs after the + `post_get_experiment` interceptor. The (possibly modified) response returned by + `post_get_experiment` will be passed to + `post_get_experiment_with_metadata`. + """ + return response, metadata + def pre_list_experiments( self, request: experiment.ListExperimentsRequest, @@ -213,12 +259,37 @@ def post_list_experiments( ) -> experiment.ListExperimentsResponse: """Post-rpc interceptor for list_experiments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_experiments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_list_experiments` interceptor runs + before the `post_list_experiments_with_metadata` interceptor. """ return response + def post_list_experiments_with_metadata( + self, + response: experiment.ListExperimentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + experiment.ListExperimentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_experiments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_list_experiments_with_metadata` + interceptor in new development instead of the `post_list_experiments` interceptor. + When both interceptors are used, this `post_list_experiments_with_metadata` interceptor runs after the + `post_list_experiments` interceptor. The (possibly modified) response returned by + `post_list_experiments` will be passed to + `post_list_experiments_with_metadata`. + """ + return response, metadata + def pre_start_experiment( self, request: experiment.StartExperimentRequest, @@ -238,12 +309,35 @@ def post_start_experiment( ) -> experiment.Experiment: """Post-rpc interceptor for start_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_start_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_start_experiment` interceptor runs + before the `post_start_experiment_with_metadata` interceptor. """ return response + def post_start_experiment_with_metadata( + self, + response: experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for start_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_start_experiment_with_metadata` + interceptor in new development instead of the `post_start_experiment` interceptor. + When both interceptors are used, this `post_start_experiment_with_metadata` interceptor runs after the + `post_start_experiment` interceptor. The (possibly modified) response returned by + `post_start_experiment` will be passed to + `post_start_experiment_with_metadata`. + """ + return response, metadata + def pre_stop_experiment( self, request: experiment.StopExperimentRequest, @@ -263,12 +357,35 @@ def post_stop_experiment( ) -> experiment.Experiment: """Post-rpc interceptor for stop_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_stop_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_stop_experiment` interceptor runs + before the `post_stop_experiment_with_metadata` interceptor. """ return response + def post_stop_experiment_with_metadata( + self, + response: experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for stop_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_stop_experiment_with_metadata` + interceptor in new development instead of the `post_stop_experiment` interceptor. + When both interceptors are used, this `post_stop_experiment_with_metadata` interceptor runs after the + `post_stop_experiment` interceptor. The (possibly modified) response returned by + `post_stop_experiment` will be passed to + `post_stop_experiment_with_metadata`. + """ + return response, metadata + def pre_update_experiment( self, request: gcdc_experiment.UpdateExperimentRequest, @@ -288,12 +405,35 @@ def post_update_experiment( ) -> gcdc_experiment.Experiment: """Post-rpc interceptor for update_experiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_experiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Experiments server but before - it is returned to user code. + it is returned to user code. This `post_update_experiment` interceptor runs + before the `post_update_experiment_with_metadata` interceptor. """ return response + def post_update_experiment_with_metadata( + self, + response: gcdc_experiment.Experiment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_experiment.Experiment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_experiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Experiments server but before it is returned to user code. + + We recommend only using this `post_update_experiment_with_metadata` + interceptor in new development instead of the `post_update_experiment` interceptor. + When both interceptors are used, this `post_update_experiment_with_metadata` interceptor runs after the + `post_update_experiment` interceptor. The (possibly modified) response returned by + `post_update_experiment` will be passed to + `post_update_experiment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -633,6 +773,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -887,6 +1031,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1032,6 +1180,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_experiments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_experiments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1185,6 +1337,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_start_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_start_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1334,6 +1490,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_stop_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_stop_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +1645,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_experiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_experiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py index 1dd77ed0f356..52d7c93470a1 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -670,6 +672,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2107,16 +2136,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2162,16 +2195,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2272,16 +2309,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2327,16 +2368,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/transports/rest.py index b6a572a36f51..a02b924decac 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/flows/transports/rest.py @@ -169,12 +169,35 @@ def pre_create_flow( def post_create_flow(self, response: gcdc_flow.Flow) -> gcdc_flow.Flow: """Post-rpc interceptor for create_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_create_flow` interceptor runs + before the `post_create_flow_with_metadata` interceptor. """ return response + def post_create_flow_with_metadata( + self, + response: gcdc_flow.Flow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_flow.Flow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_create_flow_with_metadata` + interceptor in new development instead of the `post_create_flow` interceptor. + When both interceptors are used, this `post_create_flow_with_metadata` interceptor runs after the + `post_create_flow` interceptor. The (possibly modified) response returned by + `post_create_flow` will be passed to + `post_create_flow_with_metadata`. + """ + return response, metadata + def pre_delete_flow( self, request: flow.DeleteFlowRequest, @@ -204,12 +227,35 @@ def post_export_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_export_flow` interceptor runs + before the `post_export_flow_with_metadata` interceptor. """ return response + def post_export_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_export_flow_with_metadata` + interceptor in new development instead of the `post_export_flow` interceptor. + When both interceptors are used, this `post_export_flow_with_metadata` interceptor runs after the + `post_export_flow` interceptor. The (possibly modified) response returned by + `post_export_flow` will be passed to + `post_export_flow_with_metadata`. + """ + return response, metadata + def pre_get_flow( self, request: flow.GetFlowRequest, @@ -225,12 +271,33 @@ def pre_get_flow( def post_get_flow(self, response: flow.Flow) -> flow.Flow: """Post-rpc interceptor for get_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_get_flow` interceptor runs + before the `post_get_flow_with_metadata` interceptor. """ return response + def post_get_flow_with_metadata( + self, response: flow.Flow, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[flow.Flow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_get_flow_with_metadata` + interceptor in new development instead of the `post_get_flow` interceptor. + When both interceptors are used, this `post_get_flow_with_metadata` interceptor runs after the + `post_get_flow` interceptor. The (possibly modified) response returned by + `post_get_flow` will be passed to + `post_get_flow_with_metadata`. + """ + return response, metadata + def pre_get_flow_validation_result( self, request: flow.GetFlowValidationResultRequest, @@ -250,12 +317,35 @@ def post_get_flow_validation_result( ) -> flow.FlowValidationResult: """Post-rpc interceptor for get_flow_validation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_flow_validation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_get_flow_validation_result` interceptor runs + before the `post_get_flow_validation_result_with_metadata` interceptor. """ return response + def post_get_flow_validation_result_with_metadata( + self, + response: flow.FlowValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[flow.FlowValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_flow_validation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_get_flow_validation_result_with_metadata` + interceptor in new development instead of the `post_get_flow_validation_result` interceptor. + When both interceptors are used, this `post_get_flow_validation_result_with_metadata` interceptor runs after the + `post_get_flow_validation_result` interceptor. The (possibly modified) response returned by + `post_get_flow_validation_result` will be passed to + `post_get_flow_validation_result_with_metadata`. + """ + return response, metadata + def pre_import_flow( self, request: flow.ImportFlowRequest, @@ -273,12 +363,35 @@ def post_import_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_import_flow` interceptor runs + before the `post_import_flow_with_metadata` interceptor. """ return response + def post_import_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_import_flow_with_metadata` + interceptor in new development instead of the `post_import_flow` interceptor. + When both interceptors are used, this `post_import_flow_with_metadata` interceptor runs after the + `post_import_flow` interceptor. The (possibly modified) response returned by + `post_import_flow` will be passed to + `post_import_flow_with_metadata`. + """ + return response, metadata + def pre_list_flows( self, request: flow.ListFlowsRequest, @@ -296,12 +409,35 @@ def post_list_flows( ) -> flow.ListFlowsResponse: """Post-rpc interceptor for list_flows - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_flows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_list_flows` interceptor runs + before the `post_list_flows_with_metadata` interceptor. """ return response + def post_list_flows_with_metadata( + self, + response: flow.ListFlowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[flow.ListFlowsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_flows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_list_flows_with_metadata` + interceptor in new development instead of the `post_list_flows` interceptor. + When both interceptors are used, this `post_list_flows_with_metadata` interceptor runs after the + `post_list_flows` interceptor. The (possibly modified) response returned by + `post_list_flows` will be passed to + `post_list_flows_with_metadata`. + """ + return response, metadata + def pre_train_flow( self, request: flow.TrainFlowRequest, @@ -319,12 +455,35 @@ def post_train_flow( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_train_flow` interceptor runs + before the `post_train_flow_with_metadata` interceptor. """ return response + def post_train_flow_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_train_flow_with_metadata` + interceptor in new development instead of the `post_train_flow` interceptor. + When both interceptors are used, this `post_train_flow_with_metadata` interceptor runs after the + `post_train_flow` interceptor. The (possibly modified) response returned by + `post_train_flow` will be passed to + `post_train_flow_with_metadata`. + """ + return response, metadata + def pre_update_flow( self, request: gcdc_flow.UpdateFlowRequest, @@ -340,12 +499,35 @@ def pre_update_flow( def post_update_flow(self, response: gcdc_flow.Flow) -> gcdc_flow.Flow: """Post-rpc interceptor for update_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_update_flow` interceptor runs + before the `post_update_flow_with_metadata` interceptor. """ return response + def post_update_flow_with_metadata( + self, + response: gcdc_flow.Flow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_flow.Flow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_update_flow_with_metadata` + interceptor in new development instead of the `post_update_flow` interceptor. + When both interceptors are used, this `post_update_flow_with_metadata` interceptor runs after the + `post_update_flow` interceptor. The (possibly modified) response returned by + `post_update_flow` will be passed to + `post_update_flow_with_metadata`. + """ + return response, metadata + def pre_validate_flow( self, request: flow.ValidateFlowRequest, @@ -363,12 +545,35 @@ def post_validate_flow( ) -> flow.FlowValidationResult: """Post-rpc interceptor for validate_flow - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_flow_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Flows server but before - it is returned to user code. + it is returned to user code. This `post_validate_flow` interceptor runs + before the `post_validate_flow_with_metadata` interceptor. """ return response + def post_validate_flow_with_metadata( + self, + response: flow.FlowValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[flow.FlowValidationResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_flow + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Flows server but before it is returned to user code. + + We recommend only using this `post_validate_flow_with_metadata` + interceptor in new development instead of the `post_validate_flow` interceptor. + When both interceptors are used, this `post_validate_flow_with_metadata` interceptor runs after the + `post_validate_flow` interceptor. The (possibly modified) response returned by + `post_validate_flow` will be passed to + `post_validate_flow_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -786,6 +991,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1041,6 +1250,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1203,6 +1416,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1349,6 +1566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_flow_validation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_flow_validation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1497,6 +1718,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1640,6 +1865,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_flows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_flows_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1788,6 +2017,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1958,6 +2191,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2107,6 +2344,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_flow(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_flow_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/client.py index 2033765ce34f..bd0b651cb52c 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1311,16 +1340,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1366,16 +1399,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1476,16 +1513,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1531,16 +1572,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/transports/rest.py index 841db9475031..be43aa7b0613 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/generators/transports/rest.py @@ -133,12 +133,35 @@ def post_create_generator( ) -> gcdc_generator.Generator: """Post-rpc interceptor for create_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_create_generator` interceptor runs + before the `post_create_generator_with_metadata` interceptor. """ return response + def post_create_generator_with_metadata( + self, + response: gcdc_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_create_generator_with_metadata` + interceptor in new development instead of the `post_create_generator` interceptor. + When both interceptors are used, this `post_create_generator_with_metadata` interceptor runs after the + `post_create_generator` interceptor. The (possibly modified) response returned by + `post_create_generator` will be passed to + `post_create_generator_with_metadata`. + """ + return response, metadata + def pre_delete_generator( self, request: generator.DeleteGeneratorRequest, @@ -168,12 +191,35 @@ def pre_get_generator( def post_get_generator(self, response: generator.Generator) -> generator.Generator: """Post-rpc interceptor for get_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_get_generator` interceptor runs + before the `post_get_generator_with_metadata` interceptor. """ return response + def post_get_generator_with_metadata( + self, + response: generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_get_generator_with_metadata` + interceptor in new development instead of the `post_get_generator` interceptor. + When both interceptors are used, this `post_get_generator_with_metadata` interceptor runs after the + `post_get_generator` interceptor. The (possibly modified) response returned by + `post_get_generator` will be passed to + `post_get_generator_with_metadata`. + """ + return response, metadata + def pre_list_generators( self, request: generator.ListGeneratorsRequest, @@ -193,12 +239,37 @@ def post_list_generators( ) -> generator.ListGeneratorsResponse: """Post-rpc interceptor for list_generators - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_generators_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_list_generators` interceptor runs + before the `post_list_generators_with_metadata` interceptor. """ return response + def post_list_generators_with_metadata( + self, + response: generator.ListGeneratorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generator.ListGeneratorsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_generators + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_list_generators_with_metadata` + interceptor in new development instead of the `post_list_generators` interceptor. + When both interceptors are used, this `post_list_generators_with_metadata` interceptor runs after the + `post_list_generators` interceptor. The (possibly modified) response returned by + `post_list_generators` will be passed to + `post_list_generators_with_metadata`. + """ + return response, metadata + def pre_update_generator( self, request: gcdc_generator.UpdateGeneratorRequest, @@ -218,12 +289,35 @@ def post_update_generator( ) -> gcdc_generator.Generator: """Post-rpc interceptor for update_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_update_generator` interceptor runs + before the `post_update_generator_with_metadata` interceptor. """ return response + def post_update_generator_with_metadata( + self, + response: gcdc_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_update_generator_with_metadata` + interceptor in new development instead of the `post_update_generator` interceptor. + When both interceptors are used, this `post_update_generator_with_metadata` interceptor runs after the + `post_update_generator` interceptor. The (possibly modified) response returned by + `post_update_generator` will be passed to + `post_update_generator_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -567,6 +661,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -827,6 +925,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -972,6 +1074,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_generators(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_generators_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1129,6 +1235,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py index a71a83d0ebff..08d8c3c519c9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1568,16 +1597,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1623,16 +1656,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1733,16 +1770,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1788,16 +1829,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py index c66b49cee17c..93d4796e4443 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/intents/transports/rest.py @@ -147,12 +147,35 @@ def pre_create_intent( def post_create_intent(self, response: gcdc_intent.Intent) -> gcdc_intent.Intent: """Post-rpc interceptor for create_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_create_intent` interceptor runs + before the `post_create_intent_with_metadata` interceptor. """ return response + def post_create_intent_with_metadata( + self, + response: gcdc_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_create_intent_with_metadata` + interceptor in new development instead of the `post_create_intent` interceptor. + When both interceptors are used, this `post_create_intent_with_metadata` interceptor runs after the + `post_create_intent` interceptor. The (possibly modified) response returned by + `post_create_intent` will be passed to + `post_create_intent_with_metadata`. + """ + return response, metadata + def pre_delete_intent( self, request: intent.DeleteIntentRequest, @@ -182,12 +205,35 @@ def post_export_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_export_intents` interceptor runs + before the `post_export_intents_with_metadata` interceptor. """ return response + def post_export_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_export_intents_with_metadata` + interceptor in new development instead of the `post_export_intents` interceptor. + When both interceptors are used, this `post_export_intents_with_metadata` interceptor runs after the + `post_export_intents` interceptor. The (possibly modified) response returned by + `post_export_intents` will be passed to + `post_export_intents_with_metadata`. + """ + return response, metadata + def pre_get_intent( self, request: intent.GetIntentRequest, @@ -203,12 +249,33 @@ def pre_get_intent( def post_get_intent(self, response: intent.Intent) -> intent.Intent: """Post-rpc interceptor for get_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_get_intent` interceptor runs + before the `post_get_intent_with_metadata` interceptor. """ return response + def post_get_intent_with_metadata( + self, response: intent.Intent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_get_intent_with_metadata` + interceptor in new development instead of the `post_get_intent` interceptor. + When both interceptors are used, this `post_get_intent_with_metadata` interceptor runs after the + `post_get_intent` interceptor. The (possibly modified) response returned by + `post_get_intent` will be passed to + `post_get_intent_with_metadata`. + """ + return response, metadata + def pre_import_intents( self, request: intent.ImportIntentsRequest, @@ -226,12 +293,35 @@ def post_import_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_import_intents` interceptor runs + before the `post_import_intents_with_metadata` interceptor. """ return response + def post_import_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_import_intents_with_metadata` + interceptor in new development instead of the `post_import_intents` interceptor. + When both interceptors are used, this `post_import_intents_with_metadata` interceptor runs after the + `post_import_intents` interceptor. The (possibly modified) response returned by + `post_import_intents` will be passed to + `post_import_intents_with_metadata`. + """ + return response, metadata + def pre_list_intents( self, request: intent.ListIntentsRequest, @@ -249,12 +339,35 @@ def post_list_intents( ) -> intent.ListIntentsResponse: """Post-rpc interceptor for list_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_list_intents` interceptor runs + before the `post_list_intents_with_metadata` interceptor. """ return response + def post_list_intents_with_metadata( + self, + response: intent.ListIntentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[intent.ListIntentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_list_intents_with_metadata` + interceptor in new development instead of the `post_list_intents` interceptor. + When both interceptors are used, this `post_list_intents_with_metadata` interceptor runs after the + `post_list_intents` interceptor. The (possibly modified) response returned by + `post_list_intents` will be passed to + `post_list_intents_with_metadata`. + """ + return response, metadata + def pre_update_intent( self, request: gcdc_intent.UpdateIntentRequest, @@ -272,12 +385,35 @@ def pre_update_intent( def post_update_intent(self, response: gcdc_intent.Intent) -> gcdc_intent.Intent: """Post-rpc interceptor for update_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_update_intent` interceptor runs + before the `post_update_intent_with_metadata` interceptor. """ return response + def post_update_intent_with_metadata( + self, + response: gcdc_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_update_intent_with_metadata` + interceptor in new development instead of the `post_update_intent` interceptor. + When both interceptors are used, this `post_update_intent_with_metadata` interceptor runs after the + `post_update_intent` interceptor. The (possibly modified) response returned by + `post_update_intent` will be passed to + `post_update_intent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -681,6 +817,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -940,6 +1080,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1232,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1238,6 +1386,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1383,6 +1535,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1539,6 +1695,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py index f0a0cbf6d0a9..a7c637871584 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -664,6 +666,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1528,16 +1557,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1583,16 +1616,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1693,16 +1730,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1748,16 +1789,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/transports/rest.py index e52b8632a0d0..e4b3e363d057 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/pages/transports/rest.py @@ -129,12 +129,35 @@ def pre_create_page( def post_create_page(self, response: gcdc_page.Page) -> gcdc_page.Page: """Post-rpc interceptor for create_page - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_page_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_create_page` interceptor runs + before the `post_create_page_with_metadata` interceptor. """ return response + def post_create_page_with_metadata( + self, + response: gcdc_page.Page, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_page.Page, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_page + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_create_page_with_metadata` + interceptor in new development instead of the `post_create_page` interceptor. + When both interceptors are used, this `post_create_page_with_metadata` interceptor runs after the + `post_create_page` interceptor. The (possibly modified) response returned by + `post_create_page` will be passed to + `post_create_page_with_metadata`. + """ + return response, metadata + def pre_delete_page( self, request: page.DeletePageRequest, @@ -162,12 +185,33 @@ def pre_get_page( def post_get_page(self, response: page.Page) -> page.Page: """Post-rpc interceptor for get_page - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_page_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_get_page` interceptor runs + before the `post_get_page_with_metadata` interceptor. """ return response + def post_get_page_with_metadata( + self, response: page.Page, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[page.Page, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_page + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_get_page_with_metadata` + interceptor in new development instead of the `post_get_page` interceptor. + When both interceptors are used, this `post_get_page_with_metadata` interceptor runs after the + `post_get_page` interceptor. The (possibly modified) response returned by + `post_get_page` will be passed to + `post_get_page_with_metadata`. + """ + return response, metadata + def pre_list_pages( self, request: page.ListPagesRequest, @@ -185,12 +229,35 @@ def post_list_pages( ) -> page.ListPagesResponse: """Post-rpc interceptor for list_pages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_pages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_list_pages` interceptor runs + before the `post_list_pages_with_metadata` interceptor. """ return response + def post_list_pages_with_metadata( + self, + response: page.ListPagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[page.ListPagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_pages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_list_pages_with_metadata` + interceptor in new development instead of the `post_list_pages` interceptor. + When both interceptors are used, this `post_list_pages_with_metadata` interceptor runs after the + `post_list_pages` interceptor. The (possibly modified) response returned by + `post_list_pages` will be passed to + `post_list_pages_with_metadata`. + """ + return response, metadata + def pre_update_page( self, request: gcdc_page.UpdatePageRequest, @@ -206,12 +273,35 @@ def pre_update_page( def post_update_page(self, response: gcdc_page.Page) -> gcdc_page.Page: """Post-rpc interceptor for update_page - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_page_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Pages server but before - it is returned to user code. + it is returned to user code. This `post_update_page` interceptor runs + before the `post_update_page_with_metadata` interceptor. """ return response + def post_update_page_with_metadata( + self, + response: gcdc_page.Page, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_page.Page, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_page + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Pages server but before it is returned to user code. + + We recommend only using this `post_update_page_with_metadata` + interceptor in new development instead of the `post_update_page` interceptor. + When both interceptors are used, this `post_update_page_with_metadata` interceptor runs after the + `post_update_page` interceptor. The (possibly modified) response returned by + `post_update_page` will be passed to + `post_update_page_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -568,6 +658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_page(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_page_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -835,6 +929,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_page(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_page_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -978,6 +1076,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_pages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_pages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1146,6 +1248,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_page(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_page_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/client.py index 07d17c024723..1cd6432e6195 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -597,6 +599,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1870,16 +1899,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1925,16 +1958,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2035,16 +2072,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2090,16 +2131,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/transports/rest.py index c203b24c61d9..2a92868855f9 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/playbooks/transports/rest.py @@ -161,12 +161,35 @@ def post_create_playbook( ) -> gcdc_playbook.Playbook: """Post-rpc interceptor for create_playbook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_playbook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_create_playbook` interceptor runs + before the `post_create_playbook_with_metadata` interceptor. """ return response + def post_create_playbook_with_metadata( + self, + response: gcdc_playbook.Playbook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_playbook.Playbook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_playbook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_create_playbook_with_metadata` + interceptor in new development instead of the `post_create_playbook` interceptor. + When both interceptors are used, this `post_create_playbook_with_metadata` interceptor runs after the + `post_create_playbook` interceptor. The (possibly modified) response returned by + `post_create_playbook` will be passed to + `post_create_playbook_with_metadata`. + """ + return response, metadata + def pre_create_playbook_version( self, request: playbook.CreatePlaybookVersionRequest, @@ -186,12 +209,35 @@ def post_create_playbook_version( ) -> playbook.PlaybookVersion: """Post-rpc interceptor for create_playbook_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_playbook_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_create_playbook_version` interceptor runs + before the `post_create_playbook_version_with_metadata` interceptor. """ return response + def post_create_playbook_version_with_metadata( + self, + response: playbook.PlaybookVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[playbook.PlaybookVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_playbook_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_create_playbook_version_with_metadata` + interceptor in new development instead of the `post_create_playbook_version` interceptor. + When both interceptors are used, this `post_create_playbook_version_with_metadata` interceptor runs after the + `post_create_playbook_version` interceptor. The (possibly modified) response returned by + `post_create_playbook_version` will be passed to + `post_create_playbook_version_with_metadata`. + """ + return response, metadata + def pre_delete_playbook( self, request: playbook.DeletePlaybookRequest, @@ -233,12 +279,35 @@ def pre_get_playbook( def post_get_playbook(self, response: playbook.Playbook) -> playbook.Playbook: """Post-rpc interceptor for get_playbook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_playbook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_get_playbook` interceptor runs + before the `post_get_playbook_with_metadata` interceptor. """ return response + def post_get_playbook_with_metadata( + self, + response: playbook.Playbook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[playbook.Playbook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_playbook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_get_playbook_with_metadata` + interceptor in new development instead of the `post_get_playbook` interceptor. + When both interceptors are used, this `post_get_playbook_with_metadata` interceptor runs after the + `post_get_playbook` interceptor. The (possibly modified) response returned by + `post_get_playbook` will be passed to + `post_get_playbook_with_metadata`. + """ + return response, metadata + def pre_get_playbook_version( self, request: playbook.GetPlaybookVersionRequest, @@ -258,12 +327,35 @@ def post_get_playbook_version( ) -> playbook.PlaybookVersion: """Post-rpc interceptor for get_playbook_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_playbook_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_get_playbook_version` interceptor runs + before the `post_get_playbook_version_with_metadata` interceptor. """ return response + def post_get_playbook_version_with_metadata( + self, + response: playbook.PlaybookVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[playbook.PlaybookVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_playbook_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_get_playbook_version_with_metadata` + interceptor in new development instead of the `post_get_playbook_version` interceptor. + When both interceptors are used, this `post_get_playbook_version_with_metadata` interceptor runs after the + `post_get_playbook_version` interceptor. The (possibly modified) response returned by + `post_get_playbook_version` will be passed to + `post_get_playbook_version_with_metadata`. + """ + return response, metadata + def pre_list_playbooks( self, request: playbook.ListPlaybooksRequest, @@ -281,12 +373,35 @@ def post_list_playbooks( ) -> playbook.ListPlaybooksResponse: """Post-rpc interceptor for list_playbooks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_playbooks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_list_playbooks` interceptor runs + before the `post_list_playbooks_with_metadata` interceptor. """ return response + def post_list_playbooks_with_metadata( + self, + response: playbook.ListPlaybooksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[playbook.ListPlaybooksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_playbooks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_list_playbooks_with_metadata` + interceptor in new development instead of the `post_list_playbooks` interceptor. + When both interceptors are used, this `post_list_playbooks_with_metadata` interceptor runs after the + `post_list_playbooks` interceptor. The (possibly modified) response returned by + `post_list_playbooks` will be passed to + `post_list_playbooks_with_metadata`. + """ + return response, metadata + def pre_list_playbook_versions( self, request: playbook.ListPlaybookVersionsRequest, @@ -306,12 +421,37 @@ def post_list_playbook_versions( ) -> playbook.ListPlaybookVersionsResponse: """Post-rpc interceptor for list_playbook_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_playbook_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_list_playbook_versions` interceptor runs + before the `post_list_playbook_versions_with_metadata` interceptor. """ return response + def post_list_playbook_versions_with_metadata( + self, + response: playbook.ListPlaybookVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + playbook.ListPlaybookVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_playbook_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_list_playbook_versions_with_metadata` + interceptor in new development instead of the `post_list_playbook_versions` interceptor. + When both interceptors are used, this `post_list_playbook_versions_with_metadata` interceptor runs after the + `post_list_playbook_versions` interceptor. The (possibly modified) response returned by + `post_list_playbook_versions` will be passed to + `post_list_playbook_versions_with_metadata`. + """ + return response, metadata + def pre_update_playbook( self, request: gcdc_playbook.UpdatePlaybookRequest, @@ -331,12 +471,35 @@ def post_update_playbook( ) -> gcdc_playbook.Playbook: """Post-rpc interceptor for update_playbook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_playbook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Playbooks server but before - it is returned to user code. + it is returned to user code. This `post_update_playbook` interceptor runs + before the `post_update_playbook_with_metadata` interceptor. """ return response + def post_update_playbook_with_metadata( + self, + response: gcdc_playbook.Playbook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_playbook.Playbook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_playbook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Playbooks server but before it is returned to user code. + + We recommend only using this `post_update_playbook_with_metadata` + interceptor in new development instead of the `post_update_playbook` interceptor. + When both interceptors are used, this `post_update_playbook_with_metadata` interceptor runs after the + `post_update_playbook` interceptor. The (possibly modified) response returned by + `post_update_playbook` will be passed to + `post_update_playbook_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -691,6 +854,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_playbook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_playbook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -842,6 +1009,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_playbook_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_playbook_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1218,6 +1389,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_playbook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_playbook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1363,6 +1538,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_playbook_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_playbook_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1510,6 +1689,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_playbooks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_playbooks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1655,6 +1838,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_playbook_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_playbook_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1823,6 +2010,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_playbook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_playbook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/client.py index 292c4492164c..d3f4fe1adc9d 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -537,6 +539,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1393,16 +1422,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1448,16 +1481,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1558,16 +1595,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1613,16 +1654,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/transports/rest.py index 9f2405bf8779..e3eb9875d8ad 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/security_settings_service/transports/rest.py @@ -136,12 +136,37 @@ def post_create_security_settings( ) -> gcdc_security_settings.SecuritySettings: """Post-rpc interceptor for create_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_create_security_settings` interceptor runs + before the `post_create_security_settings_with_metadata` interceptor. """ return response + def post_create_security_settings_with_metadata( + self, + response: gcdc_security_settings.SecuritySettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_security_settings.SecuritySettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_create_security_settings_with_metadata` + interceptor in new development instead of the `post_create_security_settings` interceptor. + When both interceptors are used, this `post_create_security_settings_with_metadata` interceptor runs after the + `post_create_security_settings` interceptor. The (possibly modified) response returned by + `post_create_security_settings` will be passed to + `post_create_security_settings_with_metadata`. + """ + return response, metadata + def pre_delete_security_settings( self, request: security_settings.DeleteSecuritySettingsRequest, @@ -177,12 +202,37 @@ def post_get_security_settings( ) -> security_settings.SecuritySettings: """Post-rpc interceptor for get_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_get_security_settings` interceptor runs + before the `post_get_security_settings_with_metadata` interceptor. """ return response + def post_get_security_settings_with_metadata( + self, + response: security_settings.SecuritySettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_settings.SecuritySettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_get_security_settings_with_metadata` + interceptor in new development instead of the `post_get_security_settings` interceptor. + When both interceptors are used, this `post_get_security_settings_with_metadata` interceptor runs after the + `post_get_security_settings` interceptor. The (possibly modified) response returned by + `post_get_security_settings` will be passed to + `post_get_security_settings_with_metadata`. + """ + return response, metadata + def pre_list_security_settings( self, request: security_settings.ListSecuritySettingsRequest, @@ -203,12 +253,38 @@ def post_list_security_settings( ) -> security_settings.ListSecuritySettingsResponse: """Post-rpc interceptor for list_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_list_security_settings` interceptor runs + before the `post_list_security_settings_with_metadata` interceptor. """ return response + def post_list_security_settings_with_metadata( + self, + response: security_settings.ListSecuritySettingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + security_settings.ListSecuritySettingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_list_security_settings_with_metadata` + interceptor in new development instead of the `post_list_security_settings` interceptor. + When both interceptors are used, this `post_list_security_settings_with_metadata` interceptor runs after the + `post_list_security_settings` interceptor. The (possibly modified) response returned by + `post_list_security_settings` will be passed to + `post_list_security_settings_with_metadata`. + """ + return response, metadata + def pre_update_security_settings( self, request: gcdc_security_settings.UpdateSecuritySettingsRequest, @@ -229,12 +305,37 @@ def post_update_security_settings( ) -> gcdc_security_settings.SecuritySettings: """Post-rpc interceptor for update_security_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_security_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SecuritySettingsService server but before - it is returned to user code. + it is returned to user code. This `post_update_security_settings` interceptor runs + before the `post_update_security_settings_with_metadata` interceptor. """ return response + def post_update_security_settings_with_metadata( + self, + response: gcdc_security_settings.SecuritySettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_security_settings.SecuritySettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_security_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SecuritySettingsService server but before it is returned to user code. + + We recommend only using this `post_update_security_settings_with_metadata` + interceptor in new development instead of the `post_update_security_settings` interceptor. + When both interceptors are used, this `post_update_security_settings_with_metadata` interceptor runs after the + `post_update_security_settings` interceptor. The (possibly modified) response returned by + `post_update_security_settings` will be passed to + `post_update_security_settings_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -578,6 +679,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -842,6 +947,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -990,6 +1099,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1148,6 +1261,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_security_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_security_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/client.py index e3ad26395795..3d196540ff9f 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1414,16 +1443,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1469,16 +1502,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1579,16 +1616,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1634,16 +1675,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/transports/rest.py index 5b317a4ef4eb..550b6f313f19 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/session_entity_types/transports/rest.py @@ -136,12 +136,38 @@ def post_create_session_entity_type( ) -> gcdc_session_entity_type.SessionEntityType: """Post-rpc interceptor for create_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_session_entity_type` interceptor runs + before the `post_create_session_entity_type_with_metadata` interceptor. """ return response + def post_create_session_entity_type_with_metadata( + self, + response: gcdc_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_session_entity_type_with_metadata` + interceptor in new development instead of the `post_create_session_entity_type` interceptor. + When both interceptors are used, this `post_create_session_entity_type_with_metadata` interceptor runs after the + `post_create_session_entity_type` interceptor. The (possibly modified) response returned by + `post_create_session_entity_type` will be passed to + `post_create_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_session_entity_type( self, request: session_entity_type.DeleteSessionEntityTypeRequest, @@ -177,12 +203,37 @@ def post_get_session_entity_type( ) -> session_entity_type.SessionEntityType: """Post-rpc interceptor for get_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_session_entity_type` interceptor runs + before the `post_get_session_entity_type_with_metadata` interceptor. """ return response + def post_get_session_entity_type_with_metadata( + self, + response: session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.SessionEntityType, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_session_entity_type_with_metadata` + interceptor in new development instead of the `post_get_session_entity_type` interceptor. + When both interceptors are used, this `post_get_session_entity_type_with_metadata` interceptor runs after the + `post_get_session_entity_type` interceptor. The (possibly modified) response returned by + `post_get_session_entity_type` will be passed to + `post_get_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_list_session_entity_types( self, request: session_entity_type.ListSessionEntityTypesRequest, @@ -203,12 +254,38 @@ def post_list_session_entity_types( ) -> session_entity_type.ListSessionEntityTypesResponse: """Post-rpc interceptor for list_session_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_session_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_session_entity_types` interceptor runs + before the `post_list_session_entity_types_with_metadata` interceptor. """ return response + def post_list_session_entity_types_with_metadata( + self, + response: session_entity_type.ListSessionEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.ListSessionEntityTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_session_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_session_entity_types_with_metadata` + interceptor in new development instead of the `post_list_session_entity_types` interceptor. + When both interceptors are used, this `post_list_session_entity_types_with_metadata` interceptor runs after the + `post_list_session_entity_types` interceptor. The (possibly modified) response returned by + `post_list_session_entity_types` will be passed to + `post_list_session_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_session_entity_type( self, request: gcdc_session_entity_type.UpdateSessionEntityTypeRequest, @@ -229,12 +306,38 @@ def post_update_session_entity_type( ) -> gcdc_session_entity_type.SessionEntityType: """Post-rpc interceptor for update_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_session_entity_type` interceptor runs + before the `post_update_session_entity_type_with_metadata` interceptor. """ return response + def post_update_session_entity_type_with_metadata( + self, + response: gcdc_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_session_entity_type_with_metadata` + interceptor in new development instead of the `post_update_session_entity_type` interceptor. + When both interceptors are used, this `post_update_session_entity_type_with_metadata` interceptor runs after the + `post_update_session_entity_type` interceptor. The (possibly modified) response returned by + `post_update_session_entity_type` will be passed to + `post_update_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -593,6 +696,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -871,6 +978,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1021,6 +1132,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_session_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_session_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1195,6 +1310,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/client.py index f265c3e505f1..e7fc88e4e825 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -788,6 +790,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1655,16 +1684,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1710,16 +1743,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1820,16 +1857,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1875,16 +1916,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/transports/rest.py index 73c2f451354c..f5e500ab5628 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/sessions/transports/rest.py @@ -133,12 +133,35 @@ def post_detect_intent( ) -> session.DetectIntentResponse: """Post-rpc interceptor for detect_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detect_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_detect_intent` interceptor runs + before the `post_detect_intent_with_metadata` interceptor. """ return response + def post_detect_intent_with_metadata( + self, + response: session.DetectIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.DetectIntentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for detect_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_detect_intent_with_metadata` + interceptor in new development instead of the `post_detect_intent` interceptor. + When both interceptors are used, this `post_detect_intent_with_metadata` interceptor runs after the + `post_detect_intent` interceptor. The (possibly modified) response returned by + `post_detect_intent` will be passed to + `post_detect_intent_with_metadata`. + """ + return response, metadata + def pre_fulfill_intent( self, request: session.FulfillIntentRequest, @@ -156,12 +179,35 @@ def post_fulfill_intent( ) -> session.FulfillIntentResponse: """Post-rpc interceptor for fulfill_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fulfill_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_fulfill_intent` interceptor runs + before the `post_fulfill_intent_with_metadata` interceptor. """ return response + def post_fulfill_intent_with_metadata( + self, + response: session.FulfillIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.FulfillIntentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for fulfill_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_fulfill_intent_with_metadata` + interceptor in new development instead of the `post_fulfill_intent` interceptor. + When both interceptors are used, this `post_fulfill_intent_with_metadata` interceptor runs after the + `post_fulfill_intent` interceptor. The (possibly modified) response returned by + `post_fulfill_intent` will be passed to + `post_fulfill_intent_with_metadata`. + """ + return response, metadata + def pre_match_intent( self, request: session.MatchIntentRequest, @@ -179,12 +225,35 @@ def post_match_intent( ) -> session.MatchIntentResponse: """Post-rpc interceptor for match_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_match_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_match_intent` interceptor runs + before the `post_match_intent_with_metadata` interceptor. """ return response + def post_match_intent_with_metadata( + self, + response: session.MatchIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.MatchIntentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for match_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_match_intent_with_metadata` + interceptor in new development instead of the `post_match_intent` interceptor. + When both interceptors are used, this `post_match_intent_with_metadata` interceptor runs after the + `post_match_intent` interceptor. The (possibly modified) response returned by + `post_match_intent` will be passed to + `post_match_intent_with_metadata`. + """ + return response, metadata + def pre_server_streaming_detect_intent( self, request: session.DetectIntentRequest, @@ -202,12 +271,37 @@ def post_server_streaming_detect_intent( ) -> rest_streaming.ResponseIterator: """Post-rpc interceptor for server_streaming_detect_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_server_streaming_detect_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_server_streaming_detect_intent` interceptor runs + before the `post_server_streaming_detect_intent_with_metadata` interceptor. """ return response + def post_server_streaming_detect_intent_with_metadata( + self, + response: rest_streaming.ResponseIterator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + rest_streaming.ResponseIterator, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for server_streaming_detect_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_server_streaming_detect_intent_with_metadata` + interceptor in new development instead of the `post_server_streaming_detect_intent` interceptor. + When both interceptors are used, this `post_server_streaming_detect_intent_with_metadata` interceptor runs after the + `post_server_streaming_detect_intent` interceptor. The (possibly modified) response returned by + `post_server_streaming_detect_intent` will be passed to + `post_server_streaming_detect_intent_with_metadata`. + """ + return response, metadata + def pre_submit_answer_feedback( self, request: session.SubmitAnswerFeedbackRequest, @@ -227,12 +321,35 @@ def post_submit_answer_feedback( ) -> session.AnswerFeedback: """Post-rpc interceptor for submit_answer_feedback - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_submit_answer_feedback_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_submit_answer_feedback` interceptor runs + before the `post_submit_answer_feedback_with_metadata` interceptor. """ return response + def post_submit_answer_feedback_with_metadata( + self, + response: session.AnswerFeedback, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.AnswerFeedback, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for submit_answer_feedback + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_submit_answer_feedback_with_metadata` + interceptor in new development instead of the `post_submit_answer_feedback` interceptor. + When both interceptors are used, this `post_submit_answer_feedback_with_metadata` interceptor runs after the + `post_submit_answer_feedback` interceptor. The (possibly modified) response returned by + `post_submit_answer_feedback` will be passed to + `post_submit_answer_feedback_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -573,6 +690,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detect_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detect_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -723,6 +844,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fulfill_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fulfill_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -871,6 +996,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_match_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_match_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1022,6 +1151,13 @@ def __call__( ) resp = self._interceptor.post_server_streaming_detect_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_server_streaming_detect_intent_with_metadata( + resp, response_metadata + ) return resp class _StreamingDetectIntent( @@ -1170,6 +1306,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_submit_answer_feedback(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_submit_answer_feedback_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/client.py index c4b9bba7e1ae..e33f2b9fedf0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -763,6 +765,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2319,16 +2348,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2374,16 +2407,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2484,16 +2521,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2539,16 +2580,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/transports/rest.py index 72a125fe1b36..c8035d1f89bc 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/test_cases/transports/rest.py @@ -203,12 +203,35 @@ def post_batch_run_test_cases( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_run_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_run_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_batch_run_test_cases` interceptor runs + before the `post_batch_run_test_cases_with_metadata` interceptor. """ return response + def post_batch_run_test_cases_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_run_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_batch_run_test_cases_with_metadata` + interceptor in new development instead of the `post_batch_run_test_cases` interceptor. + When both interceptors are used, this `post_batch_run_test_cases_with_metadata` interceptor runs after the + `post_batch_run_test_cases` interceptor. The (possibly modified) response returned by + `post_batch_run_test_cases` will be passed to + `post_batch_run_test_cases_with_metadata`. + """ + return response, metadata + def pre_calculate_coverage( self, request: test_case.CalculateCoverageRequest, @@ -228,12 +251,37 @@ def post_calculate_coverage( ) -> test_case.CalculateCoverageResponse: """Post-rpc interceptor for calculate_coverage - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_calculate_coverage_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_calculate_coverage` interceptor runs + before the `post_calculate_coverage_with_metadata` interceptor. """ return response + def post_calculate_coverage_with_metadata( + self, + response: test_case.CalculateCoverageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + test_case.CalculateCoverageResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for calculate_coverage + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_calculate_coverage_with_metadata` + interceptor in new development instead of the `post_calculate_coverage` interceptor. + When both interceptors are used, this `post_calculate_coverage_with_metadata` interceptor runs after the + `post_calculate_coverage` interceptor. The (possibly modified) response returned by + `post_calculate_coverage` will be passed to + `post_calculate_coverage_with_metadata`. + """ + return response, metadata + def pre_create_test_case( self, request: gcdc_test_case.CreateTestCaseRequest, @@ -253,12 +301,35 @@ def post_create_test_case( ) -> gcdc_test_case.TestCase: """Post-rpc interceptor for create_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_create_test_case` interceptor runs + before the `post_create_test_case_with_metadata` interceptor. """ return response + def post_create_test_case_with_metadata( + self, + response: gcdc_test_case.TestCase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_test_case.TestCase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_create_test_case_with_metadata` + interceptor in new development instead of the `post_create_test_case` interceptor. + When both interceptors are used, this `post_create_test_case_with_metadata` interceptor runs after the + `post_create_test_case` interceptor. The (possibly modified) response returned by + `post_create_test_case` will be passed to + `post_create_test_case_with_metadata`. + """ + return response, metadata + def pre_export_test_cases( self, request: test_case.ExportTestCasesRequest, @@ -278,12 +349,35 @@ def post_export_test_cases( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_export_test_cases` interceptor runs + before the `post_export_test_cases_with_metadata` interceptor. """ return response + def post_export_test_cases_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_export_test_cases_with_metadata` + interceptor in new development instead of the `post_export_test_cases` interceptor. + When both interceptors are used, this `post_export_test_cases_with_metadata` interceptor runs after the + `post_export_test_cases` interceptor. The (possibly modified) response returned by + `post_export_test_cases` will be passed to + `post_export_test_cases_with_metadata`. + """ + return response, metadata + def pre_get_test_case( self, request: test_case.GetTestCaseRequest, @@ -299,12 +393,35 @@ def pre_get_test_case( def post_get_test_case(self, response: test_case.TestCase) -> test_case.TestCase: """Post-rpc interceptor for get_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_get_test_case` interceptor runs + before the `post_get_test_case_with_metadata` interceptor. """ return response + def post_get_test_case_with_metadata( + self, + response: test_case.TestCase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[test_case.TestCase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_get_test_case_with_metadata` + interceptor in new development instead of the `post_get_test_case` interceptor. + When both interceptors are used, this `post_get_test_case_with_metadata` interceptor runs after the + `post_get_test_case` interceptor. The (possibly modified) response returned by + `post_get_test_case` will be passed to + `post_get_test_case_with_metadata`. + """ + return response, metadata + def pre_get_test_case_result( self, request: test_case.GetTestCaseResultRequest, @@ -324,12 +441,35 @@ def post_get_test_case_result( ) -> test_case.TestCaseResult: """Post-rpc interceptor for get_test_case_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_test_case_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_get_test_case_result` interceptor runs + before the `post_get_test_case_result_with_metadata` interceptor. """ return response + def post_get_test_case_result_with_metadata( + self, + response: test_case.TestCaseResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[test_case.TestCaseResult, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_test_case_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_get_test_case_result_with_metadata` + interceptor in new development instead of the `post_get_test_case_result` interceptor. + When both interceptors are used, this `post_get_test_case_result_with_metadata` interceptor runs after the + `post_get_test_case_result` interceptor. The (possibly modified) response returned by + `post_get_test_case_result` will be passed to + `post_get_test_case_result_with_metadata`. + """ + return response, metadata + def pre_import_test_cases( self, request: test_case.ImportTestCasesRequest, @@ -349,12 +489,35 @@ def post_import_test_cases( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_import_test_cases` interceptor runs + before the `post_import_test_cases_with_metadata` interceptor. """ return response + def post_import_test_cases_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_import_test_cases_with_metadata` + interceptor in new development instead of the `post_import_test_cases` interceptor. + When both interceptors are used, this `post_import_test_cases_with_metadata` interceptor runs after the + `post_import_test_cases` interceptor. The (possibly modified) response returned by + `post_import_test_cases` will be passed to + `post_import_test_cases_with_metadata`. + """ + return response, metadata + def pre_list_test_case_results( self, request: test_case.ListTestCaseResultsRequest, @@ -374,12 +537,37 @@ def post_list_test_case_results( ) -> test_case.ListTestCaseResultsResponse: """Post-rpc interceptor for list_test_case_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_test_case_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_list_test_case_results` interceptor runs + before the `post_list_test_case_results_with_metadata` interceptor. """ return response + def post_list_test_case_results_with_metadata( + self, + response: test_case.ListTestCaseResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + test_case.ListTestCaseResultsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_test_case_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_list_test_case_results_with_metadata` + interceptor in new development instead of the `post_list_test_case_results` interceptor. + When both interceptors are used, this `post_list_test_case_results_with_metadata` interceptor runs after the + `post_list_test_case_results` interceptor. The (possibly modified) response returned by + `post_list_test_case_results` will be passed to + `post_list_test_case_results_with_metadata`. + """ + return response, metadata + def pre_list_test_cases( self, request: test_case.ListTestCasesRequest, @@ -397,12 +585,37 @@ def post_list_test_cases( ) -> test_case.ListTestCasesResponse: """Post-rpc interceptor for list_test_cases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_test_cases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_list_test_cases` interceptor runs + before the `post_list_test_cases_with_metadata` interceptor. """ return response + def post_list_test_cases_with_metadata( + self, + response: test_case.ListTestCasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + test_case.ListTestCasesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_test_cases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_list_test_cases_with_metadata` + interceptor in new development instead of the `post_list_test_cases` interceptor. + When both interceptors are used, this `post_list_test_cases_with_metadata` interceptor runs after the + `post_list_test_cases` interceptor. The (possibly modified) response returned by + `post_list_test_cases` will be passed to + `post_list_test_cases_with_metadata`. + """ + return response, metadata + def pre_run_test_case( self, request: test_case.RunTestCaseRequest, @@ -420,12 +633,35 @@ def post_run_test_case( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_run_test_case` interceptor runs + before the `post_run_test_case_with_metadata` interceptor. """ return response + def post_run_test_case_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_run_test_case_with_metadata` + interceptor in new development instead of the `post_run_test_case` interceptor. + When both interceptors are used, this `post_run_test_case_with_metadata` interceptor runs after the + `post_run_test_case` interceptor. The (possibly modified) response returned by + `post_run_test_case` will be passed to + `post_run_test_case_with_metadata`. + """ + return response, metadata + def pre_update_test_case( self, request: gcdc_test_case.UpdateTestCaseRequest, @@ -445,12 +681,35 @@ def post_update_test_case( ) -> gcdc_test_case.TestCase: """Post-rpc interceptor for update_test_case - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_test_case_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TestCases server but before - it is returned to user code. + it is returned to user code. This `post_update_test_case` interceptor runs + before the `post_update_test_case_with_metadata` interceptor. """ return response + def post_update_test_case_with_metadata( + self, + response: gcdc_test_case.TestCase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_test_case.TestCase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_test_case + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TestCases server but before it is returned to user code. + + We recommend only using this `post_update_test_case_with_metadata` + interceptor in new development instead of the `post_update_test_case` interceptor. + When both interceptors are used, this `post_update_test_case_with_metadata` interceptor runs after the + `post_update_test_case` interceptor. The (possibly modified) response returned by + `post_update_test_case` will be passed to + `post_update_test_case_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -964,6 +1223,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_run_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_run_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1109,6 +1372,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_calculate_coverage(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_calculate_coverage_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1266,6 +1533,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1420,6 +1691,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1563,6 +1838,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1708,6 +1987,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_test_case_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_test_case_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1862,6 +2145,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2007,6 +2294,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_test_case_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_test_case_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2156,6 +2447,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_test_cases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_test_cases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2306,6 +2601,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2461,6 +2760,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_test_case(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_test_case_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/client.py index dd2088a4a928..89befef87029 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1443,16 +1472,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1498,16 +1531,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1608,16 +1645,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1663,16 +1704,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/transports/rest.py index 79597df379b0..ef8f48426c2e 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/tools/transports/rest.py @@ -137,12 +137,35 @@ def pre_create_tool( def post_create_tool(self, response: gcdc_tool.Tool) -> gcdc_tool.Tool: """Post-rpc interceptor for create_tool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Tools server but before - it is returned to user code. + it is returned to user code. This `post_create_tool` interceptor runs + before the `post_create_tool_with_metadata` interceptor. """ return response + def post_create_tool_with_metadata( + self, + response: gcdc_tool.Tool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_tool.Tool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Tools server but before it is returned to user code. + + We recommend only using this `post_create_tool_with_metadata` + interceptor in new development instead of the `post_create_tool` interceptor. + When both interceptors are used, this `post_create_tool_with_metadata` interceptor runs after the + `post_create_tool` interceptor. The (possibly modified) response returned by + `post_create_tool` will be passed to + `post_create_tool_with_metadata`. + """ + return response, metadata + def pre_delete_tool( self, request: tool.DeleteToolRequest, @@ -172,12 +195,35 @@ def post_export_tools( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_tools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_tools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Tools server but before - it is returned to user code. + it is returned to user code. This `post_export_tools` interceptor runs + before the `post_export_tools_with_metadata` interceptor. """ return response + def post_export_tools_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_tools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Tools server but before it is returned to user code. + + We recommend only using this `post_export_tools_with_metadata` + interceptor in new development instead of the `post_export_tools` interceptor. + When both interceptors are used, this `post_export_tools_with_metadata` interceptor runs after the + `post_export_tools` interceptor. The (possibly modified) response returned by + `post_export_tools` will be passed to + `post_export_tools_with_metadata`. + """ + return response, metadata + def pre_get_tool( self, request: tool.GetToolRequest, @@ -193,12 +239,33 @@ def pre_get_tool( def post_get_tool(self, response: tool.Tool) -> tool.Tool: """Post-rpc interceptor for get_tool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_tool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Tools server but before - it is returned to user code. + it is returned to user code. This `post_get_tool` interceptor runs + before the `post_get_tool_with_metadata` interceptor. """ return response + def post_get_tool_with_metadata( + self, response: tool.Tool, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[tool.Tool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Tools server but before it is returned to user code. + + We recommend only using this `post_get_tool_with_metadata` + interceptor in new development instead of the `post_get_tool` interceptor. + When both interceptors are used, this `post_get_tool_with_metadata` interceptor runs after the + `post_get_tool` interceptor. The (possibly modified) response returned by + `post_get_tool` will be passed to + `post_get_tool_with_metadata`. + """ + return response, metadata + def pre_list_tools( self, request: tool.ListToolsRequest, @@ -216,12 +283,35 @@ def post_list_tools( ) -> tool.ListToolsResponse: """Post-rpc interceptor for list_tools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Tools server but before - it is returned to user code. + it is returned to user code. This `post_list_tools` interceptor runs + before the `post_list_tools_with_metadata` interceptor. """ return response + def post_list_tools_with_metadata( + self, + response: tool.ListToolsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[tool.ListToolsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_tools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Tools server but before it is returned to user code. + + We recommend only using this `post_list_tools_with_metadata` + interceptor in new development instead of the `post_list_tools` interceptor. + When both interceptors are used, this `post_list_tools_with_metadata` interceptor runs after the + `post_list_tools` interceptor. The (possibly modified) response returned by + `post_list_tools` will be passed to + `post_list_tools_with_metadata`. + """ + return response, metadata + def pre_update_tool( self, request: gcdc_tool.UpdateToolRequest, @@ -237,12 +327,35 @@ def pre_update_tool( def post_update_tool(self, response: gcdc_tool.Tool) -> gcdc_tool.Tool: """Post-rpc interceptor for update_tool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_tool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Tools server but before - it is returned to user code. + it is returned to user code. This `post_update_tool` interceptor runs + before the `post_update_tool_with_metadata` interceptor. """ return response + def post_update_tool_with_metadata( + self, + response: gcdc_tool.Tool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_tool.Tool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Tools server but before it is returned to user code. + + We recommend only using this `post_update_tool_with_metadata` + interceptor in new development instead of the `post_update_tool` interceptor. + When both interceptors are used, this `post_update_tool_with_metadata` interceptor runs after the + `post_update_tool` interceptor. The (possibly modified) response returned by + `post_update_tool` will be passed to + `post_update_tool_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -643,6 +756,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_tool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -898,6 +1015,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_tools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_tools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1043,6 +1164,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_tool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1186,6 +1311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1339,6 +1468,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_tool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py index d0e9932a29f0..ca5c205aefaa 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -622,6 +624,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1514,16 +1543,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1569,16 +1602,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1679,16 +1716,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1734,16 +1775,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/transports/rest.py index 67ea7095fa23..28e344dd6358 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/transition_route_groups/transports/rest.py @@ -136,12 +136,38 @@ def post_create_transition_route_group( ) -> gcdc_transition_route_group.TransitionRouteGroup: """Post-rpc interceptor for create_transition_route_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_transition_route_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_create_transition_route_group` interceptor runs + before the `post_create_transition_route_group_with_metadata` interceptor. """ return response + def post_create_transition_route_group_with_metadata( + self, + response: gcdc_transition_route_group.TransitionRouteGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_transition_route_group.TransitionRouteGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_transition_route_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_create_transition_route_group_with_metadata` + interceptor in new development instead of the `post_create_transition_route_group` interceptor. + When both interceptors are used, this `post_create_transition_route_group_with_metadata` interceptor runs after the + `post_create_transition_route_group` interceptor. The (possibly modified) response returned by + `post_create_transition_route_group` will be passed to + `post_create_transition_route_group_with_metadata`. + """ + return response, metadata + def pre_delete_transition_route_group( self, request: transition_route_group.DeleteTransitionRouteGroupRequest, @@ -177,12 +203,38 @@ def post_get_transition_route_group( ) -> transition_route_group.TransitionRouteGroup: """Post-rpc interceptor for get_transition_route_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_transition_route_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_get_transition_route_group` interceptor runs + before the `post_get_transition_route_group_with_metadata` interceptor. """ return response + def post_get_transition_route_group_with_metadata( + self, + response: transition_route_group.TransitionRouteGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + transition_route_group.TransitionRouteGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_transition_route_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_get_transition_route_group_with_metadata` + interceptor in new development instead of the `post_get_transition_route_group` interceptor. + When both interceptors are used, this `post_get_transition_route_group_with_metadata` interceptor runs after the + `post_get_transition_route_group` interceptor. The (possibly modified) response returned by + `post_get_transition_route_group` will be passed to + `post_get_transition_route_group_with_metadata`. + """ + return response, metadata + def pre_list_transition_route_groups( self, request: transition_route_group.ListTransitionRouteGroupsRequest, @@ -203,12 +255,38 @@ def post_list_transition_route_groups( ) -> transition_route_group.ListTransitionRouteGroupsResponse: """Post-rpc interceptor for list_transition_route_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_transition_route_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_list_transition_route_groups` interceptor runs + before the `post_list_transition_route_groups_with_metadata` interceptor. """ return response + def post_list_transition_route_groups_with_metadata( + self, + response: transition_route_group.ListTransitionRouteGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + transition_route_group.ListTransitionRouteGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_transition_route_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_list_transition_route_groups_with_metadata` + interceptor in new development instead of the `post_list_transition_route_groups` interceptor. + When both interceptors are used, this `post_list_transition_route_groups_with_metadata` interceptor runs after the + `post_list_transition_route_groups` interceptor. The (possibly modified) response returned by + `post_list_transition_route_groups` will be passed to + `post_list_transition_route_groups_with_metadata`. + """ + return response, metadata + def pre_update_transition_route_group( self, request: gcdc_transition_route_group.UpdateTransitionRouteGroupRequest, @@ -229,12 +307,38 @@ def post_update_transition_route_group( ) -> gcdc_transition_route_group.TransitionRouteGroup: """Post-rpc interceptor for update_transition_route_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_transition_route_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the TransitionRouteGroups server but before - it is returned to user code. + it is returned to user code. This `post_update_transition_route_group` interceptor runs + before the `post_update_transition_route_group_with_metadata` interceptor. """ return response + def post_update_transition_route_group_with_metadata( + self, + response: gcdc_transition_route_group.TransitionRouteGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcdc_transition_route_group.TransitionRouteGroup, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_transition_route_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TransitionRouteGroups server but before it is returned to user code. + + We recommend only using this `post_update_transition_route_group_with_metadata` + interceptor in new development instead of the `post_update_transition_route_group` interceptor. + When both interceptors are used, this `post_update_transition_route_group_with_metadata` interceptor runs after the + `post_update_transition_route_group` interceptor. The (possibly modified) response returned by + `post_update_transition_route_group` will be passed to + `post_update_transition_route_group_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -578,6 +682,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_transition_route_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_transition_route_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -842,6 +953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_transition_route_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_transition_route_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -991,6 +1106,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_transition_route_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_transition_route_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1148,6 +1267,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_transition_route_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_transition_route_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/client.py index e16564327773..44501ea87993 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -499,6 +501,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1592,16 +1621,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1647,16 +1680,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1757,16 +1794,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1812,16 +1853,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/transports/rest.py index 60321cbd7453..ac498b77fdf7 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/versions/transports/rest.py @@ -147,12 +147,37 @@ def post_compare_versions( ) -> version.CompareVersionsResponse: """Post-rpc interceptor for compare_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_compare_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_compare_versions` interceptor runs + before the `post_compare_versions_with_metadata` interceptor. """ return response + def post_compare_versions_with_metadata( + self, + response: version.CompareVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + version.CompareVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for compare_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_compare_versions_with_metadata` + interceptor in new development instead of the `post_compare_versions` interceptor. + When both interceptors are used, this `post_compare_versions_with_metadata` interceptor runs after the + `post_compare_versions` interceptor. The (possibly modified) response returned by + `post_compare_versions` will be passed to + `post_compare_versions_with_metadata`. + """ + return response, metadata + def pre_create_version( self, request: gcdc_version.CreateVersionRequest, @@ -172,12 +197,35 @@ def post_create_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_create_version` interceptor runs + before the `post_create_version_with_metadata` interceptor. """ return response + def post_create_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_create_version_with_metadata` + interceptor in new development instead of the `post_create_version` interceptor. + When both interceptors are used, this `post_create_version_with_metadata` interceptor runs after the + `post_create_version` interceptor. The (possibly modified) response returned by + `post_create_version` will be passed to + `post_create_version_with_metadata`. + """ + return response, metadata + def pre_delete_version( self, request: version.DeleteVersionRequest, @@ -205,12 +253,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: version.ListVersionsRequest, @@ -228,12 +299,35 @@ def post_list_versions( ) -> version.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: version.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_load_version( self, request: version.LoadVersionRequest, @@ -251,12 +345,35 @@ def post_load_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for load_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_load_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_load_version` interceptor runs + before the `post_load_version_with_metadata` interceptor. """ return response + def post_load_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for load_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_load_version_with_metadata` + interceptor in new development instead of the `post_load_version` interceptor. + When both interceptors are used, this `post_load_version_with_metadata` interceptor runs after the + `post_load_version` interceptor. The (possibly modified) response returned by + `post_load_version` will be passed to + `post_load_version_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: gcdc_version.UpdateVersionRequest, @@ -276,12 +393,35 @@ def post_update_version( ) -> gcdc_version.Version: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: gcdc_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -686,6 +826,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_compare_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_compare_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -838,6 +982,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1092,6 +1240,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1237,6 +1389,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1387,6 +1543,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_load_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_load_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1538,6 +1698,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/client.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/client.py index 83870d02d64a..7c57c9c862ed 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/client.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1338,16 +1367,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1393,16 +1426,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1503,16 +1540,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1558,16 +1599,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/transports/rest.py b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/transports/rest.py index b844072f006e..544a300a67d0 100644 --- a/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/transports/rest.py +++ b/packages/google-cloud-dialogflow-cx/google/cloud/dialogflowcx_v3beta1/services/webhooks/transports/rest.py @@ -133,12 +133,35 @@ def post_create_webhook( ) -> gcdc_webhook.Webhook: """Post-rpc interceptor for create_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_create_webhook` interceptor runs + before the `post_create_webhook_with_metadata` interceptor. """ return response + def post_create_webhook_with_metadata( + self, + response: gcdc_webhook.Webhook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_webhook.Webhook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_create_webhook_with_metadata` + interceptor in new development instead of the `post_create_webhook` interceptor. + When both interceptors are used, this `post_create_webhook_with_metadata` interceptor runs after the + `post_create_webhook` interceptor. The (possibly modified) response returned by + `post_create_webhook` will be passed to + `post_create_webhook_with_metadata`. + """ + return response, metadata + def pre_delete_webhook( self, request: webhook.DeleteWebhookRequest, @@ -166,12 +189,35 @@ def pre_get_webhook( def post_get_webhook(self, response: webhook.Webhook) -> webhook.Webhook: """Post-rpc interceptor for get_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_get_webhook` interceptor runs + before the `post_get_webhook_with_metadata` interceptor. """ return response + def post_get_webhook_with_metadata( + self, + response: webhook.Webhook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[webhook.Webhook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_get_webhook_with_metadata` + interceptor in new development instead of the `post_get_webhook` interceptor. + When both interceptors are used, this `post_get_webhook_with_metadata` interceptor runs after the + `post_get_webhook` interceptor. The (possibly modified) response returned by + `post_get_webhook` will be passed to + `post_get_webhook_with_metadata`. + """ + return response, metadata + def pre_list_webhooks( self, request: webhook.ListWebhooksRequest, @@ -189,12 +235,35 @@ def post_list_webhooks( ) -> webhook.ListWebhooksResponse: """Post-rpc interceptor for list_webhooks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_webhooks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_list_webhooks` interceptor runs + before the `post_list_webhooks_with_metadata` interceptor. """ return response + def post_list_webhooks_with_metadata( + self, + response: webhook.ListWebhooksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[webhook.ListWebhooksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_webhooks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_list_webhooks_with_metadata` + interceptor in new development instead of the `post_list_webhooks` interceptor. + When both interceptors are used, this `post_list_webhooks_with_metadata` interceptor runs after the + `post_list_webhooks` interceptor. The (possibly modified) response returned by + `post_list_webhooks` will be passed to + `post_list_webhooks_with_metadata`. + """ + return response, metadata + def pre_update_webhook( self, request: gcdc_webhook.UpdateWebhookRequest, @@ -214,12 +283,35 @@ def post_update_webhook( ) -> gcdc_webhook.Webhook: """Post-rpc interceptor for update_webhook - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_webhook_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Webhooks server but before - it is returned to user code. + it is returned to user code. This `post_update_webhook` interceptor runs + before the `post_update_webhook_with_metadata` interceptor. """ return response + def post_update_webhook_with_metadata( + self, + response: gcdc_webhook.Webhook, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcdc_webhook.Webhook, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_webhook + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Webhooks server but before it is returned to user code. + + We recommend only using this `post_update_webhook_with_metadata` + interceptor in new development instead of the `post_update_webhook` interceptor. + When both interceptors are used, this `post_update_webhook_with_metadata` interceptor runs after the + `post_update_webhook` interceptor. The (possibly modified) response returned by + `post_update_webhook` will be passed to + `post_update_webhook_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -566,6 +658,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -827,6 +923,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -972,6 +1072,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_webhooks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_webhooks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1130,6 +1234,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_webhook(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_webhook_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json index b445daf1063d..bd808b7ce432 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.38.0" + "version": "1.39.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json index c26c15425384..9052dd9dd090 100644 --- a/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json +++ b/packages/google-cloud-dialogflow-cx/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.cx.v3beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow-cx", - "version": "1.38.0" + "version": "1.39.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py index 5626caa01b19..c4139338b971 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_agents.py @@ -83,6 +83,13 @@ from google.cloud.dialogflowcx_v3.types import generative_settings from google.cloud.dialogflowcx_v3.types import safety_settings +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7446,10 +7496,13 @@ def test_list_agents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_list_agents" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_list_agents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_list_agents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ListAgentsRequest.pb(agent.ListAgentsRequest()) transcode.return_value = { "method": "post", @@ -7471,6 +7524,7 @@ def test_list_agents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.ListAgentsResponse() + post_with_metadata.return_value = agent.ListAgentsResponse(), metadata client.list_agents( request, @@ -7482,6 +7536,7 @@ def test_list_agents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_agent_rest_bad_request(request_type=agent.GetAgentRequest): @@ -7586,10 +7641,13 @@ def test_get_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetAgentRequest.pb(agent.GetAgentRequest()) transcode.return_value = { "method": "post", @@ -7611,6 +7669,7 @@ def test_get_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.Agent() + post_with_metadata.return_value = agent.Agent(), metadata client.get_agent( request, @@ -7622,6 +7681,7 @@ def test_get_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_agent_rest_bad_request(request_type=gcdc_agent.CreateAgentRequest): @@ -7851,10 +7911,13 @@ def test_create_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_create_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_create_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_create_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_agent.CreateAgentRequest.pb(gcdc_agent.CreateAgentRequest()) transcode.return_value = { "method": "post", @@ -7876,6 +7939,7 @@ def test_create_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_agent.Agent() + post_with_metadata.return_value = gcdc_agent.Agent(), metadata client.create_agent( request, @@ -7887,6 +7951,7 @@ def test_create_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_agent_rest_bad_request(request_type=gcdc_agent.UpdateAgentRequest): @@ -8120,10 +8185,13 @@ def test_update_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_update_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_update_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_update_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_agent.UpdateAgentRequest.pb(gcdc_agent.UpdateAgentRequest()) transcode.return_value = { "method": "post", @@ -8145,6 +8213,7 @@ def test_update_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_agent.Agent() + post_with_metadata.return_value = gcdc_agent.Agent(), metadata client.update_agent( request, @@ -8156,6 +8225,7 @@ def test_update_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_agent_rest_bad_request(request_type=agent.DeleteAgentRequest): @@ -8335,10 +8405,13 @@ def test_export_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_export_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_export_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_export_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ExportAgentRequest.pb(agent.ExportAgentRequest()) transcode.return_value = { "method": "post", @@ -8360,6 +8433,7 @@ def test_export_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_agent( request, @@ -8371,6 +8445,7 @@ def test_export_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_agent_rest_bad_request(request_type=agent.RestoreAgentRequest): @@ -8447,10 +8522,13 @@ def test_restore_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_restore_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_restore_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_restore_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.RestoreAgentRequest.pb(agent.RestoreAgentRequest()) transcode.return_value = { "method": "post", @@ -8472,6 +8550,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_agent( request, @@ -8483,6 +8562,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_agent_rest_bad_request(request_type=agent.ValidateAgentRequest): @@ -8563,10 +8643,13 @@ def test_validate_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_validate_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_validate_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_validate_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ValidateAgentRequest.pb(agent.ValidateAgentRequest()) transcode.return_value = { "method": "post", @@ -8590,6 +8673,7 @@ def test_validate_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.AgentValidationResult() + post_with_metadata.return_value = agent.AgentValidationResult(), metadata client.validate_agent( request, @@ -8601,6 +8685,7 @@ def test_validate_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_agent_validation_result_rest_bad_request( @@ -8687,10 +8772,14 @@ def test_get_agent_validation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_agent_validation_result" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, + "post_get_agent_validation_result_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_agent_validation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetAgentValidationResultRequest.pb( agent.GetAgentValidationResultRequest() ) @@ -8716,6 +8805,7 @@ def test_get_agent_validation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.AgentValidationResult() + post_with_metadata.return_value = agent.AgentValidationResult(), metadata client.get_agent_validation_result( request, @@ -8727,6 +8817,7 @@ def test_get_agent_validation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_generative_settings_rest_bad_request( @@ -8815,10 +8906,13 @@ def test_get_generative_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_generative_settings" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_generative_settings_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_generative_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetGenerativeSettingsRequest.pb( agent.GetGenerativeSettingsRequest() ) @@ -8844,6 +8938,10 @@ def test_get_generative_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_settings.GenerativeSettings() + post_with_metadata.return_value = ( + generative_settings.GenerativeSettings(), + metadata, + ) client.get_generative_settings( request, @@ -8855,6 +8953,7 @@ def test_get_generative_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_generative_settings_rest_bad_request( @@ -9043,10 +9142,14 @@ def test_update_generative_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_update_generative_settings" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, + "post_update_generative_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_update_generative_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.UpdateGenerativeSettingsRequest.pb( agent.UpdateGenerativeSettingsRequest() ) @@ -9072,6 +9175,10 @@ def test_update_generative_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_generative_settings.GenerativeSettings() + post_with_metadata.return_value = ( + gcdc_generative_settings.GenerativeSettings(), + metadata, + ) client.update_generative_settings( request, @@ -9083,6 +9190,7 @@ def test_update_generative_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py index 3cf9940bb7bb..aaccdf4e4c23 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_changelogs.py @@ -63,6 +63,13 @@ ) from google.cloud.dialogflowcx_v3.types import changelog +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ChangelogsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ChangelogsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2632,10 +2682,13 @@ def test_list_changelogs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChangelogsRestInterceptor, "post_list_changelogs" ) as post, mock.patch.object( + transports.ChangelogsRestInterceptor, "post_list_changelogs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChangelogsRestInterceptor, "pre_list_changelogs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = changelog.ListChangelogsRequest.pb( changelog.ListChangelogsRequest() ) @@ -2661,6 +2714,7 @@ def test_list_changelogs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = changelog.ListChangelogsResponse() + post_with_metadata.return_value = changelog.ListChangelogsResponse(), metadata client.list_changelogs( request, @@ -2672,6 +2726,7 @@ def test_list_changelogs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_changelog_rest_bad_request(request_type=changelog.GetChangelogRequest): @@ -2770,10 +2825,13 @@ def test_get_changelog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChangelogsRestInterceptor, "post_get_changelog" ) as post, mock.patch.object( + transports.ChangelogsRestInterceptor, "post_get_changelog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChangelogsRestInterceptor, "pre_get_changelog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = changelog.GetChangelogRequest.pb(changelog.GetChangelogRequest()) transcode.return_value = { "method": "post", @@ -2795,6 +2853,7 @@ def test_get_changelog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = changelog.Changelog() + post_with_metadata.return_value = changelog.Changelog(), metadata client.get_changelog( request, @@ -2806,6 +2865,7 @@ def test_get_changelog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py index a71209537279..1d45c0c6a8dc 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_deployments.py @@ -63,6 +63,13 @@ ) from google.cloud.dialogflowcx_v3.types import deployment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -302,6 +309,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DeploymentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DeploymentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2634,10 +2684,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeploymentsRestInterceptor, "post_list_deployments" ) as post, mock.patch.object( + transports.DeploymentsRestInterceptor, "post_list_deployments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DeploymentsRestInterceptor, "pre_list_deployments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = deployment.ListDeploymentsRequest.pb( deployment.ListDeploymentsRequest() ) @@ -2663,6 +2716,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = deployment.ListDeploymentsResponse() + post_with_metadata.return_value = deployment.ListDeploymentsResponse(), metadata client.list_deployments( request, @@ -2674,6 +2728,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deployment_rest_bad_request(request_type=deployment.GetDeploymentRequest): @@ -2764,10 +2819,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeploymentsRestInterceptor, "post_get_deployment" ) as post, mock.patch.object( + transports.DeploymentsRestInterceptor, "post_get_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DeploymentsRestInterceptor, "pre_get_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = deployment.GetDeploymentRequest.pb( deployment.GetDeploymentRequest() ) @@ -2791,6 +2849,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = deployment.Deployment() + post_with_metadata.return_value = deployment.Deployment(), metadata client.get_deployment( request, @@ -2802,6 +2861,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py index 0baeb7aad661..11e7f021161f 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_entity_types.py @@ -74,6 +74,13 @@ from google.cloud.dialogflowcx_v3.types import entity_type as gcdc_entity_type from google.cloud.dialogflowcx_v3.types import inline +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5431,10 +5481,13 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_get_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_get_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_get_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.GetEntityTypeRequest.pb( entity_type.GetEntityTypeRequest() ) @@ -5458,6 +5511,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.EntityType() + post_with_metadata.return_value = entity_type.EntityType(), metadata client.get_entity_type( request, @@ -5469,6 +5523,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_entity_type_rest_bad_request( @@ -5645,10 +5700,13 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_create_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_create_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_create_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_entity_type.CreateEntityTypeRequest.pb( gcdc_entity_type.CreateEntityTypeRequest() ) @@ -5674,6 +5732,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_entity_type.EntityType() + post_with_metadata.return_value = gcdc_entity_type.EntityType(), metadata client.create_entity_type( request, @@ -5685,6 +5744,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_entity_type_rest_bad_request( @@ -5869,10 +5929,13 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_update_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_update_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_update_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_entity_type.UpdateEntityTypeRequest.pb( gcdc_entity_type.UpdateEntityTypeRequest() ) @@ -5898,6 +5961,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_entity_type.EntityType() + post_with_metadata.return_value = gcdc_entity_type.EntityType(), metadata client.update_entity_type( request, @@ -5909,6 +5973,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_entity_type_rest_bad_request( @@ -6106,10 +6171,13 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_list_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_list_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_list_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ListEntityTypesRequest.pb( entity_type.ListEntityTypesRequest() ) @@ -6135,6 +6203,10 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.ListEntityTypesResponse() + post_with_metadata.return_value = ( + entity_type.ListEntityTypesResponse(), + metadata, + ) client.list_entity_types( request, @@ -6146,6 +6218,7 @@ def test_list_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_entity_types_rest_bad_request( @@ -6226,10 +6299,13 @@ def test_export_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_export_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_export_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_export_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ExportEntityTypesRequest.pb( entity_type.ExportEntityTypesRequest() ) @@ -6253,6 +6329,7 @@ def test_export_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_entity_types( request, @@ -6264,6 +6341,7 @@ def test_export_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_entity_types_rest_bad_request( @@ -6344,10 +6422,13 @@ def test_import_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_import_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_import_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_import_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ImportEntityTypesRequest.pb( entity_type.ImportEntityTypesRequest() ) @@ -6371,6 +6452,7 @@ def test_import_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_entity_types( request, @@ -6382,6 +6464,7 @@ def test_import_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py index 9c7912bf4c11..66d5fcd65915 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_environments.py @@ -77,6 +77,13 @@ from google.cloud.dialogflowcx_v3.types import environment as gcdc_environment from google.cloud.dialogflowcx_v3.types import webhook +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7023,10 +7073,13 @@ def test_list_environments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_list_environments" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_list_environments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_list_environments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.ListEnvironmentsRequest.pb( environment.ListEnvironmentsRequest() ) @@ -7052,6 +7105,10 @@ def test_list_environments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.ListEnvironmentsResponse() + post_with_metadata.return_value = ( + environment.ListEnvironmentsResponse(), + metadata, + ) client.list_environments( request, @@ -7063,6 +7120,7 @@ def test_list_environments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_environment_rest_bad_request( @@ -7155,10 +7213,13 @@ def test_get_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_get_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_get_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_get_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.GetEnvironmentRequest.pb( environment.GetEnvironmentRequest() ) @@ -7182,6 +7243,7 @@ def test_get_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.get_environment( request, @@ -7193,6 +7255,7 @@ def test_get_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_environment_rest_bad_request( @@ -7387,10 +7450,13 @@ def test_create_environment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_create_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_create_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_create_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_environment.CreateEnvironmentRequest.pb( gcdc_environment.CreateEnvironmentRequest() ) @@ -7414,6 +7480,7 @@ def test_create_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_environment( request, @@ -7425,6 +7492,7 @@ def test_create_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_environment_rest_bad_request( @@ -7627,10 +7695,13 @@ def test_update_environment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_update_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_update_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_update_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_environment.UpdateEnvironmentRequest.pb( gcdc_environment.UpdateEnvironmentRequest() ) @@ -7654,6 +7725,7 @@ def test_update_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_environment( request, @@ -7665,6 +7737,7 @@ def test_update_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_environment_rest_bad_request( @@ -7866,10 +7939,14 @@ def test_lookup_environment_history_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_lookup_environment_history" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, + "post_lookup_environment_history_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_lookup_environment_history" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.LookupEnvironmentHistoryRequest.pb( environment.LookupEnvironmentHistoryRequest() ) @@ -7895,6 +7972,10 @@ def test_lookup_environment_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.LookupEnvironmentHistoryResponse() + post_with_metadata.return_value = ( + environment.LookupEnvironmentHistoryResponse(), + metadata, + ) client.lookup_environment_history( request, @@ -7906,6 +7987,7 @@ def test_lookup_environment_history_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_continuous_test_rest_bad_request( @@ -7990,10 +8072,13 @@ def test_run_continuous_test_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_run_continuous_test" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_run_continuous_test_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_run_continuous_test" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.RunContinuousTestRequest.pb( environment.RunContinuousTestRequest() ) @@ -8017,6 +8102,7 @@ def test_run_continuous_test_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_continuous_test( request, @@ -8028,6 +8114,7 @@ def test_run_continuous_test_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_continuous_test_results_rest_bad_request( @@ -8116,10 +8203,14 @@ def test_list_continuous_test_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_list_continuous_test_results" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, + "post_list_continuous_test_results_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_list_continuous_test_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.ListContinuousTestResultsRequest.pb( environment.ListContinuousTestResultsRequest() ) @@ -8145,6 +8236,10 @@ def test_list_continuous_test_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.ListContinuousTestResultsResponse() + post_with_metadata.return_value = ( + environment.ListContinuousTestResultsResponse(), + metadata, + ) client.list_continuous_test_results( request, @@ -8156,6 +8251,7 @@ def test_list_continuous_test_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_flow_rest_bad_request(request_type=environment.DeployFlowRequest): @@ -8238,10 +8334,13 @@ def test_deploy_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_deploy_flow" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_deploy_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_deploy_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.DeployFlowRequest.pb(environment.DeployFlowRequest()) transcode.return_value = { "method": "post", @@ -8263,6 +8362,7 @@ def test_deploy_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_flow( request, @@ -8274,6 +8374,7 @@ def test_deploy_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py index d449a8ba8ba8..96553f404571 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_experiments.py @@ -66,6 +66,13 @@ from google.cloud.dialogflowcx_v3.types import experiment from google.cloud.dialogflowcx_v3.types import experiment as gcdc_experiment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ExperimentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ExperimentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5572,10 +5622,13 @@ def test_list_experiments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_list_experiments" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_list_experiments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_list_experiments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.ListExperimentsRequest.pb( experiment.ListExperimentsRequest() ) @@ -5601,6 +5654,7 @@ def test_list_experiments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.ListExperimentsResponse() + post_with_metadata.return_value = experiment.ListExperimentsResponse(), metadata client.list_experiments( request, @@ -5612,6 +5666,7 @@ def test_list_experiments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_experiment_rest_bad_request(request_type=experiment.GetExperimentRequest): @@ -5706,10 +5761,13 @@ def test_get_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_get_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_get_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_get_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.GetExperimentRequest.pb( experiment.GetExperimentRequest() ) @@ -5733,6 +5791,7 @@ def test_get_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.Experiment() + post_with_metadata.return_value = experiment.Experiment(), metadata client.get_experiment( request, @@ -5744,6 +5803,7 @@ def test_get_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_experiment_rest_bad_request( @@ -5971,10 +6031,13 @@ def test_create_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_create_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_create_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_create_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_experiment.CreateExperimentRequest.pb( gcdc_experiment.CreateExperimentRequest() ) @@ -5998,6 +6061,7 @@ def test_create_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_experiment.Experiment() + post_with_metadata.return_value = gcdc_experiment.Experiment(), metadata client.create_experiment( request, @@ -6009,6 +6073,7 @@ def test_create_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_experiment_rest_bad_request( @@ -6240,10 +6305,13 @@ def test_update_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_update_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_update_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_update_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_experiment.UpdateExperimentRequest.pb( gcdc_experiment.UpdateExperimentRequest() ) @@ -6267,6 +6335,7 @@ def test_update_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_experiment.Experiment() + post_with_metadata.return_value = gcdc_experiment.Experiment(), metadata client.update_experiment( request, @@ -6278,6 +6347,7 @@ def test_update_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_experiment_rest_bad_request( @@ -6487,10 +6557,13 @@ def test_start_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_start_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_start_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_start_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.StartExperimentRequest.pb( experiment.StartExperimentRequest() ) @@ -6514,6 +6587,7 @@ def test_start_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.Experiment() + post_with_metadata.return_value = experiment.Experiment(), metadata client.start_experiment( request, @@ -6525,6 +6599,7 @@ def test_start_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_experiment_rest_bad_request( @@ -6621,10 +6696,13 @@ def test_stop_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_stop_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_stop_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_stop_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.StopExperimentRequest.pb( experiment.StopExperimentRequest() ) @@ -6648,6 +6726,7 @@ def test_stop_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.Experiment() + post_with_metadata.return_value = experiment.Experiment(), metadata client.stop_experiment( request, @@ -6659,6 +6738,7 @@ def test_stop_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py index 4de6779d70c5..2fd2b1639881 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_flows.py @@ -86,6 +86,13 @@ from google.cloud.dialogflowcx_v3.types import flow from google.cloud.dialogflowcx_v3.types import flow as gcdc_flow +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FlowsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FlowsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6900,10 +6950,13 @@ def test_create_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_create_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_create_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_create_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_flow.CreateFlowRequest.pb(gcdc_flow.CreateFlowRequest()) transcode.return_value = { "method": "post", @@ -6925,6 +6978,7 @@ def test_create_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_flow.Flow() + post_with_metadata.return_value = gcdc_flow.Flow(), metadata client.create_flow( request, @@ -6936,6 +6990,7 @@ def test_create_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_flow_rest_bad_request(request_type=flow.DeleteFlowRequest): @@ -7123,10 +7178,13 @@ def test_list_flows_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_list_flows" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_list_flows_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_list_flows" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ListFlowsRequest.pb(flow.ListFlowsRequest()) transcode.return_value = { "method": "post", @@ -7148,6 +7206,7 @@ def test_list_flows_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.ListFlowsResponse() + post_with_metadata.return_value = flow.ListFlowsResponse(), metadata client.list_flows( request, @@ -7159,6 +7218,7 @@ def test_list_flows_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_flow_rest_bad_request(request_type=flow.GetFlowRequest): @@ -7251,10 +7311,13 @@ def test_get_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_get_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_get_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_get_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.GetFlowRequest.pb(flow.GetFlowRequest()) transcode.return_value = { "method": "post", @@ -7276,6 +7339,7 @@ def test_get_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.Flow() + post_with_metadata.return_value = flow.Flow(), metadata client.get_flow( request, @@ -7287,6 +7351,7 @@ def test_get_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_flow_rest_bad_request(request_type=gcdc_flow.UpdateFlowRequest): @@ -7589,10 +7654,13 @@ def test_update_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_update_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_update_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_update_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_flow.UpdateFlowRequest.pb(gcdc_flow.UpdateFlowRequest()) transcode.return_value = { "method": "post", @@ -7614,6 +7682,7 @@ def test_update_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_flow.Flow() + post_with_metadata.return_value = gcdc_flow.Flow(), metadata client.update_flow( request, @@ -7625,6 +7694,7 @@ def test_update_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_train_flow_rest_bad_request(request_type=flow.TrainFlowRequest): @@ -7705,10 +7775,13 @@ def test_train_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FlowsRestInterceptor, "post_train_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_train_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_train_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.TrainFlowRequest.pb(flow.TrainFlowRequest()) transcode.return_value = { "method": "post", @@ -7730,6 +7803,7 @@ def test_train_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_flow( request, @@ -7741,6 +7815,7 @@ def test_train_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_flow_rest_bad_request(request_type=flow.ValidateFlowRequest): @@ -7825,10 +7900,13 @@ def test_validate_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_validate_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_validate_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_validate_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ValidateFlowRequest.pb(flow.ValidateFlowRequest()) transcode.return_value = { "method": "post", @@ -7850,6 +7928,7 @@ def test_validate_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.FlowValidationResult() + post_with_metadata.return_value = flow.FlowValidationResult(), metadata client.validate_flow( request, @@ -7861,6 +7940,7 @@ def test_validate_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_flow_validation_result_rest_bad_request( @@ -7947,10 +8027,13 @@ def test_get_flow_validation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_get_flow_validation_result" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_get_flow_validation_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_get_flow_validation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.GetFlowValidationResultRequest.pb( flow.GetFlowValidationResultRequest() ) @@ -7974,6 +8057,7 @@ def test_get_flow_validation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.FlowValidationResult() + post_with_metadata.return_value = flow.FlowValidationResult(), metadata client.get_flow_validation_result( request, @@ -7985,6 +8069,7 @@ def test_get_flow_validation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_flow_rest_bad_request(request_type=flow.ImportFlowRequest): @@ -8061,10 +8146,13 @@ def test_import_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FlowsRestInterceptor, "post_import_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_import_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_import_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ImportFlowRequest.pb(flow.ImportFlowRequest()) transcode.return_value = { "method": "post", @@ -8086,6 +8174,7 @@ def test_import_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_flow( request, @@ -8097,6 +8186,7 @@ def test_import_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_flow_rest_bad_request(request_type=flow.ExportFlowRequest): @@ -8177,10 +8267,13 @@ def test_export_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FlowsRestInterceptor, "post_export_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_export_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_export_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ExportFlowRequest.pb(flow.ExportFlowRequest()) transcode.return_value = { "method": "post", @@ -8202,6 +8295,7 @@ def test_export_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_flow( request, @@ -8213,6 +8307,7 @@ def test_export_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py index 9b1d756bf30c..3c7337162dce 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_generators.py @@ -64,6 +64,13 @@ from google.cloud.dialogflowcx_v3.types import generator from google.cloud.dialogflowcx_v3.types import generator as gcdc_generator +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4316,10 +4366,13 @@ def test_list_generators_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_list_generators" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_list_generators_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_list_generators" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.ListGeneratorsRequest.pb( generator.ListGeneratorsRequest() ) @@ -4345,6 +4398,7 @@ def test_list_generators_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.ListGeneratorsResponse() + post_with_metadata.return_value = generator.ListGeneratorsResponse(), metadata client.list_generators( request, @@ -4356,6 +4410,7 @@ def test_list_generators_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_generator_rest_bad_request(request_type=generator.GetGeneratorRequest): @@ -4444,10 +4499,13 @@ def test_get_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_get_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_get_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_get_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.GetGeneratorRequest.pb(generator.GetGeneratorRequest()) transcode.return_value = { "method": "post", @@ -4469,6 +4527,7 @@ def test_get_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.Generator() + post_with_metadata.return_value = generator.Generator(), metadata client.get_generator( request, @@ -4480,6 +4539,7 @@ def test_get_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_generator_rest_bad_request( @@ -4645,10 +4705,13 @@ def test_create_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_create_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_create_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_create_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_generator.CreateGeneratorRequest.pb( gcdc_generator.CreateGeneratorRequest() ) @@ -4672,6 +4735,7 @@ def test_create_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_generator.Generator() + post_with_metadata.return_value = gcdc_generator.Generator(), metadata client.create_generator( request, @@ -4683,6 +4747,7 @@ def test_create_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_generator_rest_bad_request( @@ -4856,10 +4921,13 @@ def test_update_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_update_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_update_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_update_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_generator.UpdateGeneratorRequest.pb( gcdc_generator.UpdateGeneratorRequest() ) @@ -4883,6 +4951,7 @@ def test_update_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_generator.Generator() + post_with_metadata.return_value = gcdc_generator.Generator(), metadata client.update_generator( request, @@ -4894,6 +4963,7 @@ def test_update_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_generator_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py index 11f348f04901..47a0e7f4e886 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_intents.py @@ -74,6 +74,13 @@ from google.cloud.dialogflowcx_v3.types import intent from google.cloud.dialogflowcx_v3.types import intent as gcdc_intent +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -297,6 +304,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5155,10 +5205,13 @@ def test_list_intents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_list_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_list_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_list_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ListIntentsRequest.pb(intent.ListIntentsRequest()) transcode.return_value = { "method": "post", @@ -5180,6 +5233,7 @@ def test_list_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.ListIntentsResponse() + post_with_metadata.return_value = intent.ListIntentsResponse(), metadata client.list_intents( request, @@ -5191,6 +5245,7 @@ def test_list_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_intent_rest_bad_request(request_type=intent.GetIntentRequest): @@ -5283,10 +5338,13 @@ def test_get_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_get_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_get_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_get_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.GetIntentRequest.pb(intent.GetIntentRequest()) transcode.return_value = { "method": "post", @@ -5308,6 +5366,7 @@ def test_get_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.Intent() + post_with_metadata.return_value = intent.Intent(), metadata client.get_intent( request, @@ -5319,6 +5378,7 @@ def test_get_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_intent_rest_bad_request(request_type=gcdc_intent.CreateIntentRequest): @@ -5497,10 +5557,13 @@ def test_create_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_create_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_create_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_create_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_intent.CreateIntentRequest.pb( gcdc_intent.CreateIntentRequest() ) @@ -5524,6 +5587,7 @@ def test_create_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_intent.Intent() + post_with_metadata.return_value = gcdc_intent.Intent(), metadata client.create_intent( request, @@ -5535,6 +5599,7 @@ def test_create_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_intent_rest_bad_request(request_type=gcdc_intent.UpdateIntentRequest): @@ -5721,10 +5786,13 @@ def test_update_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_update_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_update_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_update_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_intent.UpdateIntentRequest.pb( gcdc_intent.UpdateIntentRequest() ) @@ -5748,6 +5816,7 @@ def test_update_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_intent.Intent() + post_with_metadata.return_value = gcdc_intent.Intent(), metadata client.update_intent( request, @@ -5759,6 +5828,7 @@ def test_update_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_intent_rest_bad_request(request_type=intent.DeleteIntentRequest): @@ -5942,10 +6012,13 @@ def test_import_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_import_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_import_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_import_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ImportIntentsRequest.pb(intent.ImportIntentsRequest()) transcode.return_value = { "method": "post", @@ -5967,6 +6040,7 @@ def test_import_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_intents( request, @@ -5978,6 +6052,7 @@ def test_import_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_intents_rest_bad_request(request_type=intent.ExportIntentsRequest): @@ -6054,10 +6129,13 @@ def test_export_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_export_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_export_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_export_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ExportIntentsRequest.pb(intent.ExportIntentsRequest()) transcode.return_value = { "method": "post", @@ -6079,6 +6157,7 @@ def test_export_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_intents( request, @@ -6090,6 +6169,7 @@ def test_export_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py index 56d3d8be60ee..7125dd9038b0 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_pages.py @@ -73,6 +73,13 @@ from google.cloud.dialogflowcx_v3.types import page as gcdc_page from google.cloud.dialogflowcx_v3.types import response_message +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -291,6 +298,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4300,10 +4350,13 @@ def test_list_pages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_list_pages" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_list_pages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_list_pages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = page.ListPagesRequest.pb(page.ListPagesRequest()) transcode.return_value = { "method": "post", @@ -4325,6 +4378,7 @@ def test_list_pages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = page.ListPagesResponse() + post_with_metadata.return_value = page.ListPagesResponse(), metadata client.list_pages( request, @@ -4336,6 +4390,7 @@ def test_list_pages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_page_rest_bad_request(request_type=page.GetPageRequest): @@ -4426,10 +4481,13 @@ def test_get_page_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_get_page" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_get_page_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_get_page" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = page.GetPageRequest.pb(page.GetPageRequest()) transcode.return_value = { "method": "post", @@ -4451,6 +4509,7 @@ def test_get_page_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = page.Page() + post_with_metadata.return_value = page.Page(), metadata client.get_page( request, @@ -4462,6 +4521,7 @@ def test_get_page_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_page_rest_bad_request(request_type=gcdc_page.CreatePageRequest): @@ -4759,10 +4819,13 @@ def test_create_page_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_create_page" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_create_page_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_create_page" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_page.CreatePageRequest.pb(gcdc_page.CreatePageRequest()) transcode.return_value = { "method": "post", @@ -4784,6 +4847,7 @@ def test_create_page_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_page.Page() + post_with_metadata.return_value = gcdc_page.Page(), metadata client.create_page( request, @@ -4795,6 +4859,7 @@ def test_create_page_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_page_rest_bad_request(request_type=gcdc_page.UpdatePageRequest): @@ -5096,10 +5161,13 @@ def test_update_page_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_update_page" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_update_page_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_update_page" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_page.UpdatePageRequest.pb(gcdc_page.UpdatePageRequest()) transcode.return_value = { "method": "post", @@ -5121,6 +5189,7 @@ def test_update_page_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_page.Page() + post_with_metadata.return_value = gcdc_page.Page(), metadata client.update_page( request, @@ -5132,6 +5201,7 @@ def test_update_page_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_page_rest_bad_request(request_type=page.DeletePageRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py index 30662aba9886..ac34dba19ade 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_security_settings_service.py @@ -66,6 +66,13 @@ ) from google.cloud.dialogflowcx_v3.types import security_settings +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SecuritySettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SecuritySettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4833,11 +4883,15 @@ def test_create_security_settings_rest_interceptors(null_interceptor): transports.SecuritySettingsServiceRestInterceptor, "post_create_security_settings", ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_create_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_create_security_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_security_settings.CreateSecuritySettingsRequest.pb( gcdc_security_settings.CreateSecuritySettingsRequest() ) @@ -4863,6 +4917,10 @@ def test_create_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_security_settings.SecuritySettings() + post_with_metadata.return_value = ( + gcdc_security_settings.SecuritySettings(), + metadata, + ) client.create_security_settings( request, @@ -4874,6 +4932,7 @@ def test_create_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_security_settings_rest_bad_request( @@ -4985,10 +5044,14 @@ def test_get_security_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "post_get_security_settings" ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_get_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_get_security_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = security_settings.GetSecuritySettingsRequest.pb( security_settings.GetSecuritySettingsRequest() ) @@ -5014,6 +5077,7 @@ def test_get_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = security_settings.SecuritySettings() + post_with_metadata.return_value = security_settings.SecuritySettings(), metadata client.get_security_settings( request, @@ -5025,6 +5089,7 @@ def test_get_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_security_settings_rest_bad_request( @@ -5229,11 +5294,15 @@ def test_update_security_settings_rest_interceptors(null_interceptor): transports.SecuritySettingsServiceRestInterceptor, "post_update_security_settings", ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_update_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_update_security_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_security_settings.UpdateSecuritySettingsRequest.pb( gcdc_security_settings.UpdateSecuritySettingsRequest() ) @@ -5259,6 +5328,10 @@ def test_update_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_security_settings.SecuritySettings() + post_with_metadata.return_value = ( + gcdc_security_settings.SecuritySettings(), + metadata, + ) client.update_security_settings( request, @@ -5270,6 +5343,7 @@ def test_update_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_security_settings_rest_bad_request( @@ -5354,10 +5428,14 @@ def test_list_security_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "post_list_security_settings" ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_list_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_list_security_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = security_settings.ListSecuritySettingsRequest.pb( security_settings.ListSecuritySettingsRequest() ) @@ -5383,6 +5461,10 @@ def test_list_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = security_settings.ListSecuritySettingsResponse() + post_with_metadata.return_value = ( + security_settings.ListSecuritySettingsResponse(), + metadata, + ) client.list_security_settings( request, @@ -5394,6 +5476,7 @@ def test_list_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_security_settings_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py index 904e42c1e1b6..2b77743432e1 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_session_entity_types.py @@ -67,6 +67,13 @@ from google.cloud.dialogflowcx_v3.types import entity_type from google.cloud.dialogflowcx_v3.types import session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +345,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4589,10 +4639,14 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_list_session_entity_types" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_list_session_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_list_session_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.ListSessionEntityTypesRequest.pb( session_entity_type.ListSessionEntityTypesRequest() ) @@ -4618,6 +4672,10 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.ListSessionEntityTypesResponse() + post_with_metadata.return_value = ( + session_entity_type.ListSessionEntityTypesResponse(), + metadata, + ) client.list_session_entity_types( request, @@ -4629,6 +4687,7 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_entity_type_rest_bad_request( @@ -4722,10 +4781,14 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_get_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_get_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_get_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.GetSessionEntityTypeRequest.pb( session_entity_type.GetSessionEntityTypeRequest() ) @@ -4751,6 +4814,10 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + session_entity_type.SessionEntityType(), + metadata, + ) client.get_session_entity_type( request, @@ -4762,6 +4829,7 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_entity_type_rest_bad_request( @@ -4931,10 +4999,14 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_create_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_create_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_create_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_session_entity_type.CreateSessionEntityTypeRequest.pb( gcdc_session_entity_type.CreateSessionEntityTypeRequest() ) @@ -4960,6 +5032,10 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcdc_session_entity_type.SessionEntityType(), + metadata, + ) client.create_session_entity_type( request, @@ -4971,6 +5047,7 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_session_entity_type_rest_bad_request( @@ -5144,10 +5221,14 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_update_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_update_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_update_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_session_entity_type.UpdateSessionEntityTypeRequest.pb( gcdc_session_entity_type.UpdateSessionEntityTypeRequest() ) @@ -5173,6 +5254,10 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcdc_session_entity_type.SessionEntityType(), + metadata, + ) client.update_session_entity_type( request, @@ -5184,6 +5269,7 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_entity_type_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py index 20647d02f39b..631ab308b1f8 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_sessions.py @@ -72,6 +72,13 @@ session_entity_type, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -302,6 +309,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3481,10 +3531,13 @@ def test_detect_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_detect_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_detect_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_detect_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.DetectIntentRequest.pb(session.DetectIntentRequest()) transcode.return_value = { "method": "post", @@ -3508,6 +3561,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.DetectIntentResponse() + post_with_metadata.return_value = session.DetectIntentResponse(), metadata client.detect_intent( request, @@ -3519,6 +3573,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_server_streaming_detect_intent_rest_bad_request( @@ -3615,10 +3670,14 @@ def test_server_streaming_detect_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_server_streaming_detect_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, + "post_server_streaming_detect_intent_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_server_streaming_detect_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.DetectIntentRequest.pb(session.DetectIntentRequest()) transcode.return_value = { "method": "post", @@ -3642,6 +3701,7 @@ def test_server_streaming_detect_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.DetectIntentResponse() + post_with_metadata.return_value = session.DetectIntentResponse(), metadata client.server_streaming_detect_intent( request, @@ -3653,6 +3713,7 @@ def test_server_streaming_detect_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_detect_intent_rest_error(): @@ -3748,10 +3809,13 @@ def test_match_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_match_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_match_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_match_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.MatchIntentRequest.pb(session.MatchIntentRequest()) transcode.return_value = { "method": "post", @@ -3775,6 +3839,7 @@ def test_match_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.MatchIntentResponse() + post_with_metadata.return_value = session.MatchIntentResponse(), metadata client.match_intent( request, @@ -3786,6 +3851,7 @@ def test_match_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fulfill_intent_rest_bad_request(request_type=session.FulfillIntentRequest): @@ -3876,10 +3942,13 @@ def test_fulfill_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_fulfill_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_fulfill_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_fulfill_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.FulfillIntentRequest.pb(session.FulfillIntentRequest()) transcode.return_value = { "method": "post", @@ -3903,6 +3972,7 @@ def test_fulfill_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.FulfillIntentResponse() + post_with_metadata.return_value = session.FulfillIntentResponse(), metadata client.fulfill_intent( request, @@ -3914,6 +3984,7 @@ def test_fulfill_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_submit_answer_feedback_rest_bad_request( @@ -4002,10 +4073,13 @@ def test_submit_answer_feedback_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_submit_answer_feedback" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_submit_answer_feedback_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_submit_answer_feedback" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.SubmitAnswerFeedbackRequest.pb( session.SubmitAnswerFeedbackRequest() ) @@ -4029,6 +4103,7 @@ def test_submit_answer_feedback_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.AnswerFeedback() + post_with_metadata.return_value = session.AnswerFeedback(), metadata client.submit_answer_feedback( request, @@ -4040,6 +4115,7 @@ def test_submit_answer_feedback_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py index e073f1ac6722..be283d09ced4 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_test_cases.py @@ -89,6 +89,13 @@ from google.cloud.dialogflowcx_v3.types import test_case from google.cloud.dialogflowcx_v3.types import test_case as gcdc_test_case +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -322,6 +329,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TestCasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TestCasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8006,10 +8056,13 @@ def test_list_test_cases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_list_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_list_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_list_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ListTestCasesRequest.pb(test_case.ListTestCasesRequest()) transcode.return_value = { "method": "post", @@ -8033,6 +8086,7 @@ def test_list_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.ListTestCasesResponse() + post_with_metadata.return_value = test_case.ListTestCasesResponse(), metadata client.list_test_cases( request, @@ -8044,6 +8098,7 @@ def test_list_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_test_cases_rest_bad_request( @@ -8241,10 +8296,13 @@ def test_get_test_case_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_get_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_get_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_get_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.GetTestCaseRequest.pb(test_case.GetTestCaseRequest()) transcode.return_value = { "method": "post", @@ -8266,6 +8324,7 @@ def test_get_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.TestCase() + post_with_metadata.return_value = test_case.TestCase(), metadata client.get_test_case( request, @@ -8277,6 +8336,7 @@ def test_get_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_test_case_rest_bad_request( @@ -8684,10 +8744,13 @@ def test_create_test_case_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_create_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_create_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_create_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_test_case.CreateTestCaseRequest.pb( gcdc_test_case.CreateTestCaseRequest() ) @@ -8711,6 +8774,7 @@ def test_create_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_test_case.TestCase() + post_with_metadata.return_value = gcdc_test_case.TestCase(), metadata client.create_test_case( request, @@ -8722,6 +8786,7 @@ def test_create_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_test_case_rest_bad_request( @@ -9137,10 +9202,13 @@ def test_update_test_case_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_update_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_update_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_update_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_test_case.UpdateTestCaseRequest.pb( gcdc_test_case.UpdateTestCaseRequest() ) @@ -9164,6 +9232,7 @@ def test_update_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_test_case.TestCase() + post_with_metadata.return_value = gcdc_test_case.TestCase(), metadata client.update_test_case( request, @@ -9175,6 +9244,7 @@ def test_update_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_test_case_rest_bad_request(request_type=test_case.RunTestCaseRequest): @@ -9255,10 +9325,13 @@ def test_run_test_case_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_run_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_run_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_run_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.RunTestCaseRequest.pb(test_case.RunTestCaseRequest()) transcode.return_value = { "method": "post", @@ -9280,6 +9353,7 @@ def test_run_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_test_case( request, @@ -9291,6 +9365,7 @@ def test_run_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_run_test_cases_rest_bad_request( @@ -9369,10 +9444,13 @@ def test_batch_run_test_cases_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_batch_run_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_batch_run_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_batch_run_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.BatchRunTestCasesRequest.pb( test_case.BatchRunTestCasesRequest() ) @@ -9396,6 +9474,7 @@ def test_batch_run_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_run_test_cases( request, @@ -9407,6 +9486,7 @@ def test_batch_run_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_calculate_coverage_rest_bad_request( @@ -9489,10 +9569,13 @@ def test_calculate_coverage_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_calculate_coverage" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_calculate_coverage_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_calculate_coverage" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.CalculateCoverageRequest.pb( test_case.CalculateCoverageRequest() ) @@ -9518,6 +9601,10 @@ def test_calculate_coverage_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.CalculateCoverageResponse() + post_with_metadata.return_value = ( + test_case.CalculateCoverageResponse(), + metadata, + ) client.calculate_coverage( request, @@ -9529,6 +9616,7 @@ def test_calculate_coverage_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_test_cases_rest_bad_request( @@ -9607,10 +9695,13 @@ def test_import_test_cases_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_import_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_import_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_import_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ImportTestCasesRequest.pb( test_case.ImportTestCasesRequest() ) @@ -9634,6 +9725,7 @@ def test_import_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_test_cases( request, @@ -9645,6 +9737,7 @@ def test_import_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_test_cases_rest_bad_request( @@ -9723,10 +9816,13 @@ def test_export_test_cases_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_export_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_export_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_export_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ExportTestCasesRequest.pb( test_case.ExportTestCasesRequest() ) @@ -9750,6 +9846,7 @@ def test_export_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_test_cases( request, @@ -9761,6 +9858,7 @@ def test_export_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_test_case_results_rest_bad_request( @@ -9847,10 +9945,13 @@ def test_list_test_case_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_list_test_case_results" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_list_test_case_results_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_list_test_case_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ListTestCaseResultsRequest.pb( test_case.ListTestCaseResultsRequest() ) @@ -9876,6 +9977,10 @@ def test_list_test_case_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.ListTestCaseResultsResponse() + post_with_metadata.return_value = ( + test_case.ListTestCaseResultsResponse(), + metadata, + ) client.list_test_case_results( request, @@ -9887,6 +9992,7 @@ def test_list_test_case_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_test_case_result_rest_bad_request( @@ -9977,10 +10083,13 @@ def test_get_test_case_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_get_test_case_result" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_get_test_case_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_get_test_case_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.GetTestCaseResultRequest.pb( test_case.GetTestCaseResultRequest() ) @@ -10004,6 +10113,7 @@ def test_get_test_case_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.TestCaseResult() + post_with_metadata.return_value = test_case.TestCaseResult(), metadata client.get_test_case_result( request, @@ -10015,6 +10125,7 @@ def test_get_test_case_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py index 57bb7a0d3e07..74cb07719332 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_transition_route_groups.py @@ -75,6 +75,13 @@ ) from google.cloud.dialogflowcx_v3.types import transition_route_group +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -350,6 +357,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TransitionRouteGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TransitionRouteGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4642,11 +4692,15 @@ def test_list_transition_route_groups_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_list_transition_route_groups", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_list_transition_route_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_list_transition_route_groups", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = transition_route_group.ListTransitionRouteGroupsRequest.pb( transition_route_group.ListTransitionRouteGroupsRequest() ) @@ -4672,6 +4726,10 @@ def test_list_transition_route_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transition_route_group.ListTransitionRouteGroupsResponse() + post_with_metadata.return_value = ( + transition_route_group.ListTransitionRouteGroupsResponse(), + metadata, + ) client.list_transition_route_groups( request, @@ -4683,6 +4741,7 @@ def test_list_transition_route_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transition_route_group_rest_bad_request( @@ -4774,11 +4833,15 @@ def test_get_transition_route_group_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_get_transition_route_group", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_get_transition_route_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_get_transition_route_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = transition_route_group.GetTransitionRouteGroupRequest.pb( transition_route_group.GetTransitionRouteGroupRequest() ) @@ -4804,6 +4867,10 @@ def test_get_transition_route_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transition_route_group.TransitionRouteGroup() + post_with_metadata.return_value = ( + transition_route_group.TransitionRouteGroup(), + metadata, + ) client.get_transition_route_group( request, @@ -4815,6 +4882,7 @@ def test_get_transition_route_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_transition_route_group_rest_bad_request( @@ -5081,11 +5149,15 @@ def test_create_transition_route_group_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_create_transition_route_group", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_create_transition_route_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_create_transition_route_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_transition_route_group.CreateTransitionRouteGroupRequest.pb( gcdc_transition_route_group.CreateTransitionRouteGroupRequest() ) @@ -5111,6 +5183,10 @@ def test_create_transition_route_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_transition_route_group.TransitionRouteGroup() + post_with_metadata.return_value = ( + gcdc_transition_route_group.TransitionRouteGroup(), + metadata, + ) client.create_transition_route_group( request, @@ -5122,6 +5198,7 @@ def test_create_transition_route_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_transition_route_group_rest_bad_request( @@ -5392,11 +5469,15 @@ def test_update_transition_route_group_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_update_transition_route_group", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_update_transition_route_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_update_transition_route_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_transition_route_group.UpdateTransitionRouteGroupRequest.pb( gcdc_transition_route_group.UpdateTransitionRouteGroupRequest() ) @@ -5422,6 +5503,10 @@ def test_update_transition_route_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_transition_route_group.TransitionRouteGroup() + post_with_metadata.return_value = ( + gcdc_transition_route_group.TransitionRouteGroup(), + metadata, + ) client.update_transition_route_group( request, @@ -5433,6 +5518,7 @@ def test_update_transition_route_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_transition_route_group_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py index 21f6ee5dd0b8..b783a10a120f 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_versions.py @@ -77,6 +77,13 @@ from google.cloud.dialogflowcx_v3.types import version from google.cloud.dialogflowcx_v3.types import version as gcdc_version +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -307,6 +314,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5427,10 +5477,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -5454,6 +5507,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.ListVersionsResponse() + post_with_metadata.return_value = version.ListVersionsResponse(), metadata client.list_versions( request, @@ -5465,6 +5519,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): @@ -5555,10 +5610,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -5580,6 +5638,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -5591,6 +5650,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_version_rest_bad_request( @@ -5752,10 +5812,13 @@ def test_create_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_create_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_create_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_create_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_version.CreateVersionRequest.pb( gcdc_version.CreateVersionRequest() ) @@ -5779,6 +5842,7 @@ def test_create_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_version( request, @@ -5790,6 +5854,7 @@ def test_create_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request( @@ -5965,10 +6030,13 @@ def test_update_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_version.UpdateVersionRequest.pb( gcdc_version.UpdateVersionRequest() ) @@ -5992,6 +6060,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_version.Version() + post_with_metadata.return_value = gcdc_version.Version(), metadata client.update_version( request, @@ -6003,6 +6072,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): @@ -6190,10 +6260,13 @@ def test_load_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_load_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_load_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_load_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.LoadVersionRequest.pb(version.LoadVersionRequest()) transcode.return_value = { "method": "post", @@ -6215,6 +6288,7 @@ def test_load_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.load_version( request, @@ -6226,6 +6300,7 @@ def test_load_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_compare_versions_rest_bad_request(request_type=version.CompareVersionsRequest): @@ -6312,10 +6387,13 @@ def test_compare_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_compare_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_compare_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_compare_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.CompareVersionsRequest.pb(version.CompareVersionsRequest()) transcode.return_value = { "method": "post", @@ -6339,6 +6417,7 @@ def test_compare_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.CompareVersionsResponse() + post_with_metadata.return_value = version.CompareVersionsResponse(), metadata client.compare_versions( request, @@ -6350,6 +6429,7 @@ def test_compare_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py index 0773790513b0..807133723757 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3/test_webhooks.py @@ -65,6 +65,13 @@ from google.cloud.dialogflowcx_v3.types import webhook from google.cloud.dialogflowcx_v3.types import webhook as gcdc_webhook +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = WebhooksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = WebhooksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4270,10 +4320,13 @@ def test_list_webhooks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_list_webhooks" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_list_webhooks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_list_webhooks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = webhook.ListWebhooksRequest.pb(webhook.ListWebhooksRequest()) transcode.return_value = { "method": "post", @@ -4297,6 +4350,7 @@ def test_list_webhooks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = webhook.ListWebhooksResponse() + post_with_metadata.return_value = webhook.ListWebhooksResponse(), metadata client.list_webhooks( request, @@ -4308,6 +4362,7 @@ def test_list_webhooks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_webhook_rest_bad_request(request_type=webhook.GetWebhookRequest): @@ -4396,10 +4451,13 @@ def test_get_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_get_webhook" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_get_webhook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_get_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = webhook.GetWebhookRequest.pb(webhook.GetWebhookRequest()) transcode.return_value = { "method": "post", @@ -4421,6 +4479,7 @@ def test_get_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = webhook.Webhook() + post_with_metadata.return_value = webhook.Webhook(), metadata client.get_webhook( request, @@ -4432,6 +4491,7 @@ def test_get_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_webhook_rest_bad_request( @@ -4610,10 +4670,13 @@ def test_create_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_create_webhook" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_create_webhook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_create_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_webhook.CreateWebhookRequest.pb( gcdc_webhook.CreateWebhookRequest() ) @@ -4637,6 +4700,7 @@ def test_create_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_webhook.Webhook() + post_with_metadata.return_value = gcdc_webhook.Webhook(), metadata client.create_webhook( request, @@ -4648,6 +4712,7 @@ def test_create_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_webhook_rest_bad_request( @@ -4834,10 +4899,13 @@ def test_update_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_update_webhook" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_update_webhook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_update_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_webhook.UpdateWebhookRequest.pb( gcdc_webhook.UpdateWebhookRequest() ) @@ -4861,6 +4929,7 @@ def test_update_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_webhook.Webhook() + post_with_metadata.return_value = gcdc_webhook.Webhook(), metadata client.update_webhook( request, @@ -4872,6 +4941,7 @@ def test_update_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_webhook_rest_bad_request(request_type=webhook.DeleteWebhookRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py index c1eaf397da79..e73021a6adb2 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_agents.py @@ -88,6 +88,13 @@ from google.cloud.dialogflowcx_v3beta1.types import generative_settings from google.cloud.dialogflowcx_v3beta1.types import safety_settings +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -310,6 +317,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7443,10 +7493,13 @@ def test_list_agents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_list_agents" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_list_agents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_list_agents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ListAgentsRequest.pb(agent.ListAgentsRequest()) transcode.return_value = { "method": "post", @@ -7468,6 +7521,7 @@ def test_list_agents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.ListAgentsResponse() + post_with_metadata.return_value = agent.ListAgentsResponse(), metadata client.list_agents( request, @@ -7479,6 +7533,7 @@ def test_list_agents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_agent_rest_bad_request(request_type=agent.GetAgentRequest): @@ -7582,10 +7637,13 @@ def test_get_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetAgentRequest.pb(agent.GetAgentRequest()) transcode.return_value = { "method": "post", @@ -7607,6 +7665,7 @@ def test_get_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.Agent() + post_with_metadata.return_value = agent.Agent(), metadata client.get_agent( request, @@ -7618,6 +7677,7 @@ def test_get_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_agent_rest_bad_request(request_type=gcdc_agent.CreateAgentRequest): @@ -7851,10 +7911,13 @@ def test_create_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_create_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_create_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_create_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_agent.CreateAgentRequest.pb(gcdc_agent.CreateAgentRequest()) transcode.return_value = { "method": "post", @@ -7876,6 +7939,7 @@ def test_create_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_agent.Agent() + post_with_metadata.return_value = gcdc_agent.Agent(), metadata client.create_agent( request, @@ -7887,6 +7951,7 @@ def test_create_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_agent_rest_bad_request(request_type=gcdc_agent.UpdateAgentRequest): @@ -8124,10 +8189,13 @@ def test_update_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_update_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_update_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_update_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_agent.UpdateAgentRequest.pb(gcdc_agent.UpdateAgentRequest()) transcode.return_value = { "method": "post", @@ -8149,6 +8217,7 @@ def test_update_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_agent.Agent() + post_with_metadata.return_value = gcdc_agent.Agent(), metadata client.update_agent( request, @@ -8160,6 +8229,7 @@ def test_update_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_agent_rest_bad_request(request_type=agent.DeleteAgentRequest): @@ -8339,10 +8409,13 @@ def test_export_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_export_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_export_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_export_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ExportAgentRequest.pb(agent.ExportAgentRequest()) transcode.return_value = { "method": "post", @@ -8364,6 +8437,7 @@ def test_export_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_agent( request, @@ -8375,6 +8449,7 @@ def test_export_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_agent_rest_bad_request(request_type=agent.RestoreAgentRequest): @@ -8451,10 +8526,13 @@ def test_restore_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_restore_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_restore_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_restore_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.RestoreAgentRequest.pb(agent.RestoreAgentRequest()) transcode.return_value = { "method": "post", @@ -8476,6 +8554,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_agent( request, @@ -8487,6 +8566,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_agent_rest_bad_request(request_type=agent.ValidateAgentRequest): @@ -8567,10 +8647,13 @@ def test_validate_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_validate_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_validate_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_validate_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ValidateAgentRequest.pb(agent.ValidateAgentRequest()) transcode.return_value = { "method": "post", @@ -8594,6 +8677,7 @@ def test_validate_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.AgentValidationResult() + post_with_metadata.return_value = agent.AgentValidationResult(), metadata client.validate_agent( request, @@ -8605,6 +8689,7 @@ def test_validate_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_agent_validation_result_rest_bad_request( @@ -8691,10 +8776,14 @@ def test_get_agent_validation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_agent_validation_result" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, + "post_get_agent_validation_result_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_agent_validation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetAgentValidationResultRequest.pb( agent.GetAgentValidationResultRequest() ) @@ -8720,6 +8809,7 @@ def test_get_agent_validation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.AgentValidationResult() + post_with_metadata.return_value = agent.AgentValidationResult(), metadata client.get_agent_validation_result( request, @@ -8731,6 +8821,7 @@ def test_get_agent_validation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_generative_settings_rest_bad_request( @@ -8819,10 +8910,13 @@ def test_get_generative_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_generative_settings" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_generative_settings_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_generative_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetGenerativeSettingsRequest.pb( agent.GetGenerativeSettingsRequest() ) @@ -8848,6 +8942,10 @@ def test_get_generative_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generative_settings.GenerativeSettings() + post_with_metadata.return_value = ( + generative_settings.GenerativeSettings(), + metadata, + ) client.get_generative_settings( request, @@ -8859,6 +8957,7 @@ def test_get_generative_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_generative_settings_rest_bad_request( @@ -9052,10 +9151,14 @@ def test_update_generative_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_update_generative_settings" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, + "post_update_generative_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_update_generative_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.UpdateGenerativeSettingsRequest.pb( agent.UpdateGenerativeSettingsRequest() ) @@ -9081,6 +9184,10 @@ def test_update_generative_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_generative_settings.GenerativeSettings() + post_with_metadata.return_value = ( + gcdc_generative_settings.GenerativeSettings(), + metadata, + ) client.update_generative_settings( request, @@ -9092,6 +9199,7 @@ def test_update_generative_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py index aa2e69efc579..0ba413c8d85f 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_changelogs.py @@ -63,6 +63,13 @@ ) from google.cloud.dialogflowcx_v3beta1.types import changelog +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ChangelogsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ChangelogsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2632,10 +2682,13 @@ def test_list_changelogs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChangelogsRestInterceptor, "post_list_changelogs" ) as post, mock.patch.object( + transports.ChangelogsRestInterceptor, "post_list_changelogs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChangelogsRestInterceptor, "pre_list_changelogs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = changelog.ListChangelogsRequest.pb( changelog.ListChangelogsRequest() ) @@ -2661,6 +2714,7 @@ def test_list_changelogs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = changelog.ListChangelogsResponse() + post_with_metadata.return_value = changelog.ListChangelogsResponse(), metadata client.list_changelogs( request, @@ -2672,6 +2726,7 @@ def test_list_changelogs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_changelog_rest_bad_request(request_type=changelog.GetChangelogRequest): @@ -2770,10 +2825,13 @@ def test_get_changelog_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChangelogsRestInterceptor, "post_get_changelog" ) as post, mock.patch.object( + transports.ChangelogsRestInterceptor, "post_get_changelog_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChangelogsRestInterceptor, "pre_get_changelog" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = changelog.GetChangelogRequest.pb(changelog.GetChangelogRequest()) transcode.return_value = { "method": "post", @@ -2795,6 +2853,7 @@ def test_get_changelog_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = changelog.Changelog() + post_with_metadata.return_value = changelog.Changelog(), metadata client.get_changelog( request, @@ -2806,6 +2865,7 @@ def test_get_changelog_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py index e8fefb13e582..c1e64b724c7b 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_conversation_history.py @@ -70,6 +70,13 @@ page, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationHistoryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationHistoryClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3329,10 +3379,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationHistoryRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ConversationHistoryRestInterceptor, + "post_list_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationHistoryRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_history.ListConversationsRequest.pb( conversation_history.ListConversationsRequest() ) @@ -3358,6 +3412,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_history.ListConversationsResponse() + post_with_metadata.return_value = ( + conversation_history.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -3369,6 +3427,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -3461,10 +3520,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationHistoryRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ConversationHistoryRestInterceptor, + "post_get_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationHistoryRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_history.GetConversationRequest.pb( conversation_history.GetConversationRequest() ) @@ -3490,6 +3553,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_history.Conversation() + post_with_metadata.return_value = conversation_history.Conversation(), metadata client.get_conversation( request, @@ -3501,6 +3565,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py index b21e561e6c08..fe03ea53f1da 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_deployments.py @@ -63,6 +63,13 @@ ) from google.cloud.dialogflowcx_v3beta1.types import deployment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -302,6 +309,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DeploymentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DeploymentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2634,10 +2684,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeploymentsRestInterceptor, "post_list_deployments" ) as post, mock.patch.object( + transports.DeploymentsRestInterceptor, "post_list_deployments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DeploymentsRestInterceptor, "pre_list_deployments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = deployment.ListDeploymentsRequest.pb( deployment.ListDeploymentsRequest() ) @@ -2663,6 +2716,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = deployment.ListDeploymentsResponse() + post_with_metadata.return_value = deployment.ListDeploymentsResponse(), metadata client.list_deployments( request, @@ -2674,6 +2728,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deployment_rest_bad_request(request_type=deployment.GetDeploymentRequest): @@ -2764,10 +2819,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DeploymentsRestInterceptor, "post_get_deployment" ) as post, mock.patch.object( + transports.DeploymentsRestInterceptor, "post_get_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DeploymentsRestInterceptor, "pre_get_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = deployment.GetDeploymentRequest.pb( deployment.GetDeploymentRequest() ) @@ -2791,6 +2849,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = deployment.Deployment() + post_with_metadata.return_value = deployment.Deployment(), metadata client.get_deployment( request, @@ -2802,6 +2861,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py index 85d85f8be9f6..72aafda702a9 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_entity_types.py @@ -74,6 +74,13 @@ from google.cloud.dialogflowcx_v3beta1.types import entity_type from google.cloud.dialogflowcx_v3beta1.types import inline +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5431,10 +5481,13 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_get_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_get_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_get_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.GetEntityTypeRequest.pb( entity_type.GetEntityTypeRequest() ) @@ -5458,6 +5511,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.EntityType() + post_with_metadata.return_value = entity_type.EntityType(), metadata client.get_entity_type( request, @@ -5469,6 +5523,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_entity_type_rest_bad_request( @@ -5645,10 +5700,13 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_create_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_create_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_create_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_entity_type.CreateEntityTypeRequest.pb( gcdc_entity_type.CreateEntityTypeRequest() ) @@ -5674,6 +5732,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_entity_type.EntityType() + post_with_metadata.return_value = gcdc_entity_type.EntityType(), metadata client.create_entity_type( request, @@ -5685,6 +5744,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_entity_type_rest_bad_request( @@ -5869,10 +5929,13 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_update_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_update_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_update_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_entity_type.UpdateEntityTypeRequest.pb( gcdc_entity_type.UpdateEntityTypeRequest() ) @@ -5898,6 +5961,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_entity_type.EntityType() + post_with_metadata.return_value = gcdc_entity_type.EntityType(), metadata client.update_entity_type( request, @@ -5909,6 +5973,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_entity_type_rest_bad_request( @@ -6106,10 +6171,13 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_list_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_list_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_list_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ListEntityTypesRequest.pb( entity_type.ListEntityTypesRequest() ) @@ -6135,6 +6203,10 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.ListEntityTypesResponse() + post_with_metadata.return_value = ( + entity_type.ListEntityTypesResponse(), + metadata, + ) client.list_entity_types( request, @@ -6146,6 +6218,7 @@ def test_list_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_entity_types_rest_bad_request( @@ -6226,10 +6299,13 @@ def test_export_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_export_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_export_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_export_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ExportEntityTypesRequest.pb( entity_type.ExportEntityTypesRequest() ) @@ -6253,6 +6329,7 @@ def test_export_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_entity_types( request, @@ -6264,6 +6341,7 @@ def test_export_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_entity_types_rest_bad_request( @@ -6344,10 +6422,13 @@ def test_import_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_import_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_import_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_import_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ImportEntityTypesRequest.pb( entity_type.ImportEntityTypesRequest() ) @@ -6371,6 +6452,7 @@ def test_import_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_entity_types( request, @@ -6382,6 +6464,7 @@ def test_import_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py index e8c1cb3915bb..228e3adc4921 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_environments.py @@ -77,6 +77,13 @@ from google.cloud.dialogflowcx_v3beta1.types import environment from google.cloud.dialogflowcx_v3beta1.types import webhook +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -320,6 +327,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7023,10 +7073,13 @@ def test_list_environments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_list_environments" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_list_environments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_list_environments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.ListEnvironmentsRequest.pb( environment.ListEnvironmentsRequest() ) @@ -7052,6 +7105,10 @@ def test_list_environments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.ListEnvironmentsResponse() + post_with_metadata.return_value = ( + environment.ListEnvironmentsResponse(), + metadata, + ) client.list_environments( request, @@ -7063,6 +7120,7 @@ def test_list_environments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_environment_rest_bad_request( @@ -7155,10 +7213,13 @@ def test_get_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_get_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_get_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_get_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.GetEnvironmentRequest.pb( environment.GetEnvironmentRequest() ) @@ -7182,6 +7243,7 @@ def test_get_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.get_environment( request, @@ -7193,6 +7255,7 @@ def test_get_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_environment_rest_bad_request( @@ -7387,10 +7450,13 @@ def test_create_environment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_create_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_create_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_create_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_environment.CreateEnvironmentRequest.pb( gcdc_environment.CreateEnvironmentRequest() ) @@ -7414,6 +7480,7 @@ def test_create_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_environment( request, @@ -7425,6 +7492,7 @@ def test_create_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_environment_rest_bad_request( @@ -7627,10 +7695,13 @@ def test_update_environment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_update_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_update_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_update_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_environment.UpdateEnvironmentRequest.pb( gcdc_environment.UpdateEnvironmentRequest() ) @@ -7654,6 +7725,7 @@ def test_update_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_environment( request, @@ -7665,6 +7737,7 @@ def test_update_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_environment_rest_bad_request( @@ -7866,10 +7939,14 @@ def test_lookup_environment_history_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_lookup_environment_history" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, + "post_lookup_environment_history_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_lookup_environment_history" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.LookupEnvironmentHistoryRequest.pb( environment.LookupEnvironmentHistoryRequest() ) @@ -7895,6 +7972,10 @@ def test_lookup_environment_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.LookupEnvironmentHistoryResponse() + post_with_metadata.return_value = ( + environment.LookupEnvironmentHistoryResponse(), + metadata, + ) client.lookup_environment_history( request, @@ -7906,6 +7987,7 @@ def test_lookup_environment_history_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_continuous_test_rest_bad_request( @@ -7990,10 +8072,13 @@ def test_run_continuous_test_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_run_continuous_test" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_run_continuous_test_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_run_continuous_test" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.RunContinuousTestRequest.pb( environment.RunContinuousTestRequest() ) @@ -8017,6 +8102,7 @@ def test_run_continuous_test_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_continuous_test( request, @@ -8028,6 +8114,7 @@ def test_run_continuous_test_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_continuous_test_results_rest_bad_request( @@ -8116,10 +8203,14 @@ def test_list_continuous_test_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_list_continuous_test_results" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, + "post_list_continuous_test_results_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_list_continuous_test_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.ListContinuousTestResultsRequest.pb( environment.ListContinuousTestResultsRequest() ) @@ -8145,6 +8236,10 @@ def test_list_continuous_test_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.ListContinuousTestResultsResponse() + post_with_metadata.return_value = ( + environment.ListContinuousTestResultsResponse(), + metadata, + ) client.list_continuous_test_results( request, @@ -8156,6 +8251,7 @@ def test_list_continuous_test_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_flow_rest_bad_request(request_type=environment.DeployFlowRequest): @@ -8238,10 +8334,13 @@ def test_deploy_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EnvironmentsRestInterceptor, "post_deploy_flow" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_deploy_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_deploy_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.DeployFlowRequest.pb(environment.DeployFlowRequest()) transcode.return_value = { "method": "post", @@ -8263,6 +8362,7 @@ def test_deploy_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_flow( request, @@ -8274,6 +8374,7 @@ def test_deploy_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py index 6a32a0516de0..832a1d0ca7d6 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_examples.py @@ -66,6 +66,13 @@ from google.cloud.dialogflowcx_v3beta1.types import example from google.cloud.dialogflowcx_v3beta1.types import example as gcdc_example +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -296,6 +303,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ExamplesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ExamplesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4449,10 +4499,13 @@ def test_create_example_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExamplesRestInterceptor, "post_create_example" ) as post, mock.patch.object( + transports.ExamplesRestInterceptor, "post_create_example_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExamplesRestInterceptor, "pre_create_example" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_example.CreateExampleRequest.pb( gcdc_example.CreateExampleRequest() ) @@ -4476,6 +4529,7 @@ def test_create_example_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_example.Example() + post_with_metadata.return_value = gcdc_example.Example(), metadata client.create_example( request, @@ -4487,6 +4541,7 @@ def test_create_example_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_example_rest_bad_request(request_type=example.DeleteExampleRequest): @@ -4678,10 +4733,13 @@ def test_list_examples_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExamplesRestInterceptor, "post_list_examples" ) as post, mock.patch.object( + transports.ExamplesRestInterceptor, "post_list_examples_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExamplesRestInterceptor, "pre_list_examples" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = example.ListExamplesRequest.pb(example.ListExamplesRequest()) transcode.return_value = { "method": "post", @@ -4705,6 +4763,7 @@ def test_list_examples_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = example.ListExamplesResponse() + post_with_metadata.return_value = example.ListExamplesResponse(), metadata client.list_examples( request, @@ -4716,6 +4775,7 @@ def test_list_examples_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_example_rest_bad_request(request_type=example.GetExampleRequest): @@ -4810,10 +4870,13 @@ def test_get_example_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExamplesRestInterceptor, "post_get_example" ) as post, mock.patch.object( + transports.ExamplesRestInterceptor, "post_get_example_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExamplesRestInterceptor, "pre_get_example" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = example.GetExampleRequest.pb(example.GetExampleRequest()) transcode.return_value = { "method": "post", @@ -4835,6 +4898,7 @@ def test_get_example_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = example.Example() + post_with_metadata.return_value = example.Example(), metadata client.get_example( request, @@ -4846,6 +4910,7 @@ def test_get_example_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_example_rest_bad_request( @@ -5055,10 +5120,13 @@ def test_update_example_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExamplesRestInterceptor, "post_update_example" ) as post, mock.patch.object( + transports.ExamplesRestInterceptor, "post_update_example_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExamplesRestInterceptor, "pre_update_example" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_example.UpdateExampleRequest.pb( gcdc_example.UpdateExampleRequest() ) @@ -5082,6 +5150,7 @@ def test_update_example_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_example.Example() + post_with_metadata.return_value = gcdc_example.Example(), metadata client.update_example( request, @@ -5093,6 +5162,7 @@ def test_update_example_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py index aeb282ea4337..eb3c0083b93b 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_experiments.py @@ -66,6 +66,13 @@ from google.cloud.dialogflowcx_v3beta1.types import experiment as gcdc_experiment from google.cloud.dialogflowcx_v3beta1.types import experiment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ExperimentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ExperimentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5572,10 +5622,13 @@ def test_list_experiments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_list_experiments" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_list_experiments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_list_experiments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.ListExperimentsRequest.pb( experiment.ListExperimentsRequest() ) @@ -5601,6 +5654,7 @@ def test_list_experiments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.ListExperimentsResponse() + post_with_metadata.return_value = experiment.ListExperimentsResponse(), metadata client.list_experiments( request, @@ -5612,6 +5666,7 @@ def test_list_experiments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_experiment_rest_bad_request(request_type=experiment.GetExperimentRequest): @@ -5706,10 +5761,13 @@ def test_get_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_get_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_get_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_get_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.GetExperimentRequest.pb( experiment.GetExperimentRequest() ) @@ -5733,6 +5791,7 @@ def test_get_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.Experiment() + post_with_metadata.return_value = experiment.Experiment(), metadata client.get_experiment( request, @@ -5744,6 +5803,7 @@ def test_get_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_experiment_rest_bad_request( @@ -5971,10 +6031,13 @@ def test_create_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_create_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_create_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_create_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_experiment.CreateExperimentRequest.pb( gcdc_experiment.CreateExperimentRequest() ) @@ -5998,6 +6061,7 @@ def test_create_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_experiment.Experiment() + post_with_metadata.return_value = gcdc_experiment.Experiment(), metadata client.create_experiment( request, @@ -6009,6 +6073,7 @@ def test_create_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_experiment_rest_bad_request( @@ -6240,10 +6305,13 @@ def test_update_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_update_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_update_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_update_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_experiment.UpdateExperimentRequest.pb( gcdc_experiment.UpdateExperimentRequest() ) @@ -6267,6 +6335,7 @@ def test_update_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_experiment.Experiment() + post_with_metadata.return_value = gcdc_experiment.Experiment(), metadata client.update_experiment( request, @@ -6278,6 +6347,7 @@ def test_update_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_experiment_rest_bad_request( @@ -6487,10 +6557,13 @@ def test_start_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_start_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_start_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_start_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.StartExperimentRequest.pb( experiment.StartExperimentRequest() ) @@ -6514,6 +6587,7 @@ def test_start_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.Experiment() + post_with_metadata.return_value = experiment.Experiment(), metadata client.start_experiment( request, @@ -6525,6 +6599,7 @@ def test_start_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_stop_experiment_rest_bad_request( @@ -6621,10 +6696,13 @@ def test_stop_experiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ExperimentsRestInterceptor, "post_stop_experiment" ) as post, mock.patch.object( + transports.ExperimentsRestInterceptor, "post_stop_experiment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ExperimentsRestInterceptor, "pre_stop_experiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = experiment.StopExperimentRequest.pb( experiment.StopExperimentRequest() ) @@ -6648,6 +6726,7 @@ def test_stop_experiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = experiment.Experiment() + post_with_metadata.return_value = experiment.Experiment(), metadata client.stop_experiment( request, @@ -6659,6 +6738,7 @@ def test_stop_experiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py index 5d7d1efcfc08..cee6a7339dd4 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_flows.py @@ -90,6 +90,13 @@ from google.cloud.dialogflowcx_v3beta1.types import flow from google.cloud.dialogflowcx_v3beta1.types import flow as gcdc_flow +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -308,6 +315,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FlowsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FlowsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6909,10 +6959,13 @@ def test_create_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_create_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_create_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_create_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_flow.CreateFlowRequest.pb(gcdc_flow.CreateFlowRequest()) transcode.return_value = { "method": "post", @@ -6934,6 +6987,7 @@ def test_create_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_flow.Flow() + post_with_metadata.return_value = gcdc_flow.Flow(), metadata client.create_flow( request, @@ -6945,6 +6999,7 @@ def test_create_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_flow_rest_bad_request(request_type=flow.DeleteFlowRequest): @@ -7132,10 +7187,13 @@ def test_list_flows_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_list_flows" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_list_flows_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_list_flows" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ListFlowsRequest.pb(flow.ListFlowsRequest()) transcode.return_value = { "method": "post", @@ -7157,6 +7215,7 @@ def test_list_flows_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.ListFlowsResponse() + post_with_metadata.return_value = flow.ListFlowsResponse(), metadata client.list_flows( request, @@ -7168,6 +7227,7 @@ def test_list_flows_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_flow_rest_bad_request(request_type=flow.GetFlowRequest): @@ -7260,10 +7320,13 @@ def test_get_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_get_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_get_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_get_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.GetFlowRequest.pb(flow.GetFlowRequest()) transcode.return_value = { "method": "post", @@ -7285,6 +7348,7 @@ def test_get_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.Flow() + post_with_metadata.return_value = flow.Flow(), metadata client.get_flow( request, @@ -7296,6 +7360,7 @@ def test_get_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_flow_rest_bad_request(request_type=gcdc_flow.UpdateFlowRequest): @@ -7603,10 +7668,13 @@ def test_update_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_update_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_update_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_update_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_flow.UpdateFlowRequest.pb(gcdc_flow.UpdateFlowRequest()) transcode.return_value = { "method": "post", @@ -7628,6 +7696,7 @@ def test_update_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_flow.Flow() + post_with_metadata.return_value = gcdc_flow.Flow(), metadata client.update_flow( request, @@ -7639,6 +7708,7 @@ def test_update_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_train_flow_rest_bad_request(request_type=flow.TrainFlowRequest): @@ -7719,10 +7789,13 @@ def test_train_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FlowsRestInterceptor, "post_train_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_train_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_train_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.TrainFlowRequest.pb(flow.TrainFlowRequest()) transcode.return_value = { "method": "post", @@ -7744,6 +7817,7 @@ def test_train_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_flow( request, @@ -7755,6 +7829,7 @@ def test_train_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_flow_rest_bad_request(request_type=flow.ValidateFlowRequest): @@ -7839,10 +7914,13 @@ def test_validate_flow_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_validate_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_validate_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_validate_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ValidateFlowRequest.pb(flow.ValidateFlowRequest()) transcode.return_value = { "method": "post", @@ -7864,6 +7942,7 @@ def test_validate_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.FlowValidationResult() + post_with_metadata.return_value = flow.FlowValidationResult(), metadata client.validate_flow( request, @@ -7875,6 +7954,7 @@ def test_validate_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_flow_validation_result_rest_bad_request( @@ -7961,10 +8041,13 @@ def test_get_flow_validation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FlowsRestInterceptor, "post_get_flow_validation_result" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_get_flow_validation_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_get_flow_validation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.GetFlowValidationResultRequest.pb( flow.GetFlowValidationResultRequest() ) @@ -7988,6 +8071,7 @@ def test_get_flow_validation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = flow.FlowValidationResult() + post_with_metadata.return_value = flow.FlowValidationResult(), metadata client.get_flow_validation_result( request, @@ -7999,6 +8083,7 @@ def test_get_flow_validation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_flow_rest_bad_request(request_type=flow.ImportFlowRequest): @@ -8075,10 +8160,13 @@ def test_import_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FlowsRestInterceptor, "post_import_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_import_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_import_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ImportFlowRequest.pb(flow.ImportFlowRequest()) transcode.return_value = { "method": "post", @@ -8100,6 +8188,7 @@ def test_import_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_flow( request, @@ -8111,6 +8200,7 @@ def test_import_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_flow_rest_bad_request(request_type=flow.ExportFlowRequest): @@ -8191,10 +8281,13 @@ def test_export_flow_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FlowsRestInterceptor, "post_export_flow" ) as post, mock.patch.object( + transports.FlowsRestInterceptor, "post_export_flow_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FlowsRestInterceptor, "pre_export_flow" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = flow.ExportFlowRequest.pb(flow.ExportFlowRequest()) transcode.return_value = { "method": "post", @@ -8216,6 +8309,7 @@ def test_export_flow_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_flow( request, @@ -8227,6 +8321,7 @@ def test_export_flow_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py index ab01091adfa0..d32ae5ec8bf2 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_generators.py @@ -65,6 +65,13 @@ from google.cloud.dialogflowcx_v3beta1.types import generator from google.cloud.dialogflowcx_v3beta1.types import generator as gcdc_generator +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4317,10 +4367,13 @@ def test_list_generators_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_list_generators" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_list_generators_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_list_generators" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.ListGeneratorsRequest.pb( generator.ListGeneratorsRequest() ) @@ -4346,6 +4399,7 @@ def test_list_generators_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.ListGeneratorsResponse() + post_with_metadata.return_value = generator.ListGeneratorsResponse(), metadata client.list_generators( request, @@ -4357,6 +4411,7 @@ def test_list_generators_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_generator_rest_bad_request(request_type=generator.GetGeneratorRequest): @@ -4445,10 +4500,13 @@ def test_get_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_get_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_get_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_get_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.GetGeneratorRequest.pb(generator.GetGeneratorRequest()) transcode.return_value = { "method": "post", @@ -4470,6 +4528,7 @@ def test_get_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.Generator() + post_with_metadata.return_value = generator.Generator(), metadata client.get_generator( request, @@ -4481,6 +4540,7 @@ def test_get_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_generator_rest_bad_request( @@ -4650,10 +4710,13 @@ def test_create_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_create_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_create_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_create_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_generator.CreateGeneratorRequest.pb( gcdc_generator.CreateGeneratorRequest() ) @@ -4677,6 +4740,7 @@ def test_create_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_generator.Generator() + post_with_metadata.return_value = gcdc_generator.Generator(), metadata client.create_generator( request, @@ -4688,6 +4752,7 @@ def test_create_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_generator_rest_bad_request( @@ -4865,10 +4930,13 @@ def test_update_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_update_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_update_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_update_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_generator.UpdateGeneratorRequest.pb( gcdc_generator.UpdateGeneratorRequest() ) @@ -4892,6 +4960,7 @@ def test_update_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_generator.Generator() + post_with_metadata.return_value = gcdc_generator.Generator(), metadata client.update_generator( request, @@ -4903,6 +4972,7 @@ def test_update_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_generator_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py index ea072597a3c6..d86a266e4824 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py @@ -74,6 +74,13 @@ from google.cloud.dialogflowcx_v3beta1.types import intent from google.cloud.dialogflowcx_v3beta1.types import intent as gcdc_intent +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -297,6 +304,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5155,10 +5205,13 @@ def test_list_intents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_list_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_list_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_list_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ListIntentsRequest.pb(intent.ListIntentsRequest()) transcode.return_value = { "method": "post", @@ -5180,6 +5233,7 @@ def test_list_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.ListIntentsResponse() + post_with_metadata.return_value = intent.ListIntentsResponse(), metadata client.list_intents( request, @@ -5191,6 +5245,7 @@ def test_list_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_intent_rest_bad_request(request_type=intent.GetIntentRequest): @@ -5283,10 +5338,13 @@ def test_get_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_get_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_get_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_get_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.GetIntentRequest.pb(intent.GetIntentRequest()) transcode.return_value = { "method": "post", @@ -5308,6 +5366,7 @@ def test_get_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.Intent() + post_with_metadata.return_value = intent.Intent(), metadata client.get_intent( request, @@ -5319,6 +5378,7 @@ def test_get_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_intent_rest_bad_request(request_type=gcdc_intent.CreateIntentRequest): @@ -5497,10 +5557,13 @@ def test_create_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_create_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_create_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_create_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_intent.CreateIntentRequest.pb( gcdc_intent.CreateIntentRequest() ) @@ -5524,6 +5587,7 @@ def test_create_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_intent.Intent() + post_with_metadata.return_value = gcdc_intent.Intent(), metadata client.create_intent( request, @@ -5535,6 +5599,7 @@ def test_create_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_intent_rest_bad_request(request_type=gcdc_intent.UpdateIntentRequest): @@ -5721,10 +5786,13 @@ def test_update_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_update_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_update_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_update_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_intent.UpdateIntentRequest.pb( gcdc_intent.UpdateIntentRequest() ) @@ -5748,6 +5816,7 @@ def test_update_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_intent.Intent() + post_with_metadata.return_value = gcdc_intent.Intent(), metadata client.update_intent( request, @@ -5759,6 +5828,7 @@ def test_update_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_intent_rest_bad_request(request_type=intent.DeleteIntentRequest): @@ -5942,10 +6012,13 @@ def test_import_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_import_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_import_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_import_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ImportIntentsRequest.pb(intent.ImportIntentsRequest()) transcode.return_value = { "method": "post", @@ -5967,6 +6040,7 @@ def test_import_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_intents( request, @@ -5978,6 +6052,7 @@ def test_import_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_intents_rest_bad_request(request_type=intent.ExportIntentsRequest): @@ -6054,10 +6129,13 @@ def test_export_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_export_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_export_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_export_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ExportIntentsRequest.pb(intent.ExportIntentsRequest()) transcode.return_value = { "method": "post", @@ -6079,6 +6157,7 @@ def test_export_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_intents( request, @@ -6090,6 +6169,7 @@ def test_export_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py index af6aa09bd5c2..fa3bcf3b9638 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_pages.py @@ -73,6 +73,13 @@ from google.cloud.dialogflowcx_v3beta1.types import page as gcdc_page from google.cloud.dialogflowcx_v3beta1.types import response_message, tool_call +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -291,6 +298,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PagesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4300,10 +4350,13 @@ def test_list_pages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_list_pages" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_list_pages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_list_pages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = page.ListPagesRequest.pb(page.ListPagesRequest()) transcode.return_value = { "method": "post", @@ -4325,6 +4378,7 @@ def test_list_pages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = page.ListPagesResponse() + post_with_metadata.return_value = page.ListPagesResponse(), metadata client.list_pages( request, @@ -4336,6 +4390,7 @@ def test_list_pages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_page_rest_bad_request(request_type=page.GetPageRequest): @@ -4426,10 +4481,13 @@ def test_get_page_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_get_page" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_get_page_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_get_page" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = page.GetPageRequest.pb(page.GetPageRequest()) transcode.return_value = { "method": "post", @@ -4451,6 +4509,7 @@ def test_get_page_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = page.Page() + post_with_metadata.return_value = page.Page(), metadata client.get_page( request, @@ -4462,6 +4521,7 @@ def test_get_page_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_page_rest_bad_request(request_type=gcdc_page.CreatePageRequest): @@ -4764,10 +4824,13 @@ def test_create_page_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_create_page" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_create_page_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_create_page" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_page.CreatePageRequest.pb(gcdc_page.CreatePageRequest()) transcode.return_value = { "method": "post", @@ -4789,6 +4852,7 @@ def test_create_page_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_page.Page() + post_with_metadata.return_value = gcdc_page.Page(), metadata client.create_page( request, @@ -4800,6 +4864,7 @@ def test_create_page_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_page_rest_bad_request(request_type=gcdc_page.UpdatePageRequest): @@ -5106,10 +5171,13 @@ def test_update_page_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PagesRestInterceptor, "post_update_page" ) as post, mock.patch.object( + transports.PagesRestInterceptor, "post_update_page_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PagesRestInterceptor, "pre_update_page" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_page.UpdatePageRequest.pb(gcdc_page.UpdatePageRequest()) transcode.return_value = { "method": "post", @@ -5131,6 +5199,7 @@ def test_update_page_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_page.Page() + post_with_metadata.return_value = gcdc_page.Page(), metadata client.update_page( request, @@ -5142,6 +5211,7 @@ def test_update_page_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_page_rest_bad_request(request_type=page.DeletePageRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py index 92c097d8aee7..8eb32d21a222 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_playbooks.py @@ -71,6 +71,13 @@ from google.cloud.dialogflowcx_v3beta1.types import playbook from google.cloud.dialogflowcx_v3beta1.types import playbook as gcdc_playbook +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PlaybooksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PlaybooksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7061,10 +7111,13 @@ def test_create_playbook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_create_playbook" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, "post_create_playbook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_create_playbook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_playbook.CreatePlaybookRequest.pb( gcdc_playbook.CreatePlaybookRequest() ) @@ -7088,6 +7141,7 @@ def test_create_playbook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_playbook.Playbook() + post_with_metadata.return_value = gcdc_playbook.Playbook(), metadata client.create_playbook( request, @@ -7099,6 +7153,7 @@ def test_create_playbook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_playbook_rest_bad_request(request_type=playbook.DeletePlaybookRequest): @@ -7286,10 +7341,13 @@ def test_list_playbooks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_list_playbooks" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, "post_list_playbooks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_list_playbooks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = playbook.ListPlaybooksRequest.pb(playbook.ListPlaybooksRequest()) transcode.return_value = { "method": "post", @@ -7313,6 +7371,7 @@ def test_list_playbooks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = playbook.ListPlaybooksResponse() + post_with_metadata.return_value = playbook.ListPlaybooksResponse(), metadata client.list_playbooks( request, @@ -7324,6 +7383,7 @@ def test_list_playbooks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_playbook_rest_bad_request(request_type=playbook.GetPlaybookRequest): @@ -7420,10 +7480,13 @@ def test_get_playbook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_get_playbook" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, "post_get_playbook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_get_playbook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = playbook.GetPlaybookRequest.pb(playbook.GetPlaybookRequest()) transcode.return_value = { "method": "post", @@ -7445,6 +7508,7 @@ def test_get_playbook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = playbook.Playbook() + post_with_metadata.return_value = playbook.Playbook(), metadata client.get_playbook( request, @@ -7456,6 +7520,7 @@ def test_get_playbook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_playbook_rest_bad_request( @@ -7648,10 +7713,13 @@ def test_update_playbook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_update_playbook" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, "post_update_playbook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_update_playbook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_playbook.UpdatePlaybookRequest.pb( gcdc_playbook.UpdatePlaybookRequest() ) @@ -7675,6 +7743,7 @@ def test_update_playbook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_playbook.Playbook() + post_with_metadata.return_value = gcdc_playbook.Playbook(), metadata client.update_playbook( request, @@ -7686,6 +7755,7 @@ def test_update_playbook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_playbook_version_rest_bad_request( @@ -7913,10 +7983,14 @@ def test_create_playbook_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_create_playbook_version" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, + "post_create_playbook_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_create_playbook_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = playbook.CreatePlaybookVersionRequest.pb( playbook.CreatePlaybookVersionRequest() ) @@ -7940,6 +8014,7 @@ def test_create_playbook_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = playbook.PlaybookVersion() + post_with_metadata.return_value = playbook.PlaybookVersion(), metadata client.create_playbook_version( request, @@ -7951,6 +8026,7 @@ def test_create_playbook_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_playbook_version_rest_bad_request( @@ -8039,10 +8115,13 @@ def test_get_playbook_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_get_playbook_version" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, "post_get_playbook_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_get_playbook_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = playbook.GetPlaybookVersionRequest.pb( playbook.GetPlaybookVersionRequest() ) @@ -8066,6 +8145,7 @@ def test_get_playbook_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = playbook.PlaybookVersion() + post_with_metadata.return_value = playbook.PlaybookVersion(), metadata client.get_playbook_version( request, @@ -8077,6 +8157,7 @@ def test_get_playbook_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_playbook_versions_rest_bad_request( @@ -8163,10 +8244,13 @@ def test_list_playbook_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PlaybooksRestInterceptor, "post_list_playbook_versions" ) as post, mock.patch.object( + transports.PlaybooksRestInterceptor, "post_list_playbook_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PlaybooksRestInterceptor, "pre_list_playbook_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = playbook.ListPlaybookVersionsRequest.pb( playbook.ListPlaybookVersionsRequest() ) @@ -8192,6 +8276,10 @@ def test_list_playbook_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = playbook.ListPlaybookVersionsResponse() + post_with_metadata.return_value = ( + playbook.ListPlaybookVersionsResponse(), + metadata, + ) client.list_playbook_versions( request, @@ -8203,6 +8291,7 @@ def test_list_playbook_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_playbook_version_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py index 3769df0e3832..4b058bce68e3 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_security_settings_service.py @@ -66,6 +66,13 @@ ) from google.cloud.dialogflowcx_v3beta1.types import security_settings +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SecuritySettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SecuritySettingsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4833,11 +4883,15 @@ def test_create_security_settings_rest_interceptors(null_interceptor): transports.SecuritySettingsServiceRestInterceptor, "post_create_security_settings", ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_create_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_create_security_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_security_settings.CreateSecuritySettingsRequest.pb( gcdc_security_settings.CreateSecuritySettingsRequest() ) @@ -4863,6 +4917,10 @@ def test_create_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_security_settings.SecuritySettings() + post_with_metadata.return_value = ( + gcdc_security_settings.SecuritySettings(), + metadata, + ) client.create_security_settings( request, @@ -4874,6 +4932,7 @@ def test_create_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_security_settings_rest_bad_request( @@ -4985,10 +5044,14 @@ def test_get_security_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "post_get_security_settings" ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_get_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_get_security_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = security_settings.GetSecuritySettingsRequest.pb( security_settings.GetSecuritySettingsRequest() ) @@ -5014,6 +5077,7 @@ def test_get_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = security_settings.SecuritySettings() + post_with_metadata.return_value = security_settings.SecuritySettings(), metadata client.get_security_settings( request, @@ -5025,6 +5089,7 @@ def test_get_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_security_settings_rest_bad_request( @@ -5229,11 +5294,15 @@ def test_update_security_settings_rest_interceptors(null_interceptor): transports.SecuritySettingsServiceRestInterceptor, "post_update_security_settings", ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_update_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_update_security_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_security_settings.UpdateSecuritySettingsRequest.pb( gcdc_security_settings.UpdateSecuritySettingsRequest() ) @@ -5259,6 +5328,10 @@ def test_update_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_security_settings.SecuritySettings() + post_with_metadata.return_value = ( + gcdc_security_settings.SecuritySettings(), + metadata, + ) client.update_security_settings( request, @@ -5270,6 +5343,7 @@ def test_update_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_security_settings_rest_bad_request( @@ -5354,10 +5428,14 @@ def test_list_security_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "post_list_security_settings" ) as post, mock.patch.object( + transports.SecuritySettingsServiceRestInterceptor, + "post_list_security_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SecuritySettingsServiceRestInterceptor, "pre_list_security_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = security_settings.ListSecuritySettingsRequest.pb( security_settings.ListSecuritySettingsRequest() ) @@ -5383,6 +5461,10 @@ def test_list_security_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = security_settings.ListSecuritySettingsResponse() + post_with_metadata.return_value = ( + security_settings.ListSecuritySettingsResponse(), + metadata, + ) client.list_security_settings( request, @@ -5394,6 +5476,7 @@ def test_list_security_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_security_settings_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py index a99d3bd9d7dc..63142f2d3c4a 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_session_entity_types.py @@ -67,6 +67,13 @@ from google.cloud.dialogflowcx_v3beta1.types import entity_type from google.cloud.dialogflowcx_v3beta1.types import session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +345,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4589,10 +4639,14 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_list_session_entity_types" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_list_session_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_list_session_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.ListSessionEntityTypesRequest.pb( session_entity_type.ListSessionEntityTypesRequest() ) @@ -4618,6 +4672,10 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.ListSessionEntityTypesResponse() + post_with_metadata.return_value = ( + session_entity_type.ListSessionEntityTypesResponse(), + metadata, + ) client.list_session_entity_types( request, @@ -4629,6 +4687,7 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_entity_type_rest_bad_request( @@ -4722,10 +4781,14 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_get_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_get_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_get_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.GetSessionEntityTypeRequest.pb( session_entity_type.GetSessionEntityTypeRequest() ) @@ -4751,6 +4814,10 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + session_entity_type.SessionEntityType(), + metadata, + ) client.get_session_entity_type( request, @@ -4762,6 +4829,7 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_entity_type_rest_bad_request( @@ -4931,10 +4999,14 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_create_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_create_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_create_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_session_entity_type.CreateSessionEntityTypeRequest.pb( gcdc_session_entity_type.CreateSessionEntityTypeRequest() ) @@ -4960,6 +5032,10 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcdc_session_entity_type.SessionEntityType(), + metadata, + ) client.create_session_entity_type( request, @@ -4971,6 +5047,7 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_session_entity_type_rest_bad_request( @@ -5144,10 +5221,14 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_update_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_update_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_update_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_session_entity_type.UpdateSessionEntityTypeRequest.pb( gcdc_session_entity_type.UpdateSessionEntityTypeRequest() ) @@ -5173,6 +5254,10 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcdc_session_entity_type.SessionEntityType(), + metadata, + ) client.update_session_entity_type( request, @@ -5184,6 +5269,7 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_entity_type_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_sessions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_sessions.py index 6494bba9cab1..de0f3fc09332 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_sessions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_sessions.py @@ -74,6 +74,13 @@ tool_call, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3483,10 +3533,13 @@ def test_detect_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_detect_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_detect_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_detect_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.DetectIntentRequest.pb(session.DetectIntentRequest()) transcode.return_value = { "method": "post", @@ -3510,6 +3563,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.DetectIntentResponse() + post_with_metadata.return_value = session.DetectIntentResponse(), metadata client.detect_intent( request, @@ -3521,6 +3575,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_server_streaming_detect_intent_rest_bad_request( @@ -3617,10 +3672,14 @@ def test_server_streaming_detect_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_server_streaming_detect_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, + "post_server_streaming_detect_intent_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_server_streaming_detect_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.DetectIntentRequest.pb(session.DetectIntentRequest()) transcode.return_value = { "method": "post", @@ -3644,6 +3703,7 @@ def test_server_streaming_detect_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.DetectIntentResponse() + post_with_metadata.return_value = session.DetectIntentResponse(), metadata client.server_streaming_detect_intent( request, @@ -3655,6 +3715,7 @@ def test_server_streaming_detect_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_detect_intent_rest_error(): @@ -3750,10 +3811,13 @@ def test_match_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_match_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_match_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_match_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.MatchIntentRequest.pb(session.MatchIntentRequest()) transcode.return_value = { "method": "post", @@ -3777,6 +3841,7 @@ def test_match_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.MatchIntentResponse() + post_with_metadata.return_value = session.MatchIntentResponse(), metadata client.match_intent( request, @@ -3788,6 +3853,7 @@ def test_match_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fulfill_intent_rest_bad_request(request_type=session.FulfillIntentRequest): @@ -3878,10 +3944,13 @@ def test_fulfill_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_fulfill_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_fulfill_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_fulfill_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.FulfillIntentRequest.pb(session.FulfillIntentRequest()) transcode.return_value = { "method": "post", @@ -3905,6 +3974,7 @@ def test_fulfill_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.FulfillIntentResponse() + post_with_metadata.return_value = session.FulfillIntentResponse(), metadata client.fulfill_intent( request, @@ -3916,6 +3986,7 @@ def test_fulfill_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_submit_answer_feedback_rest_bad_request( @@ -4004,10 +4075,13 @@ def test_submit_answer_feedback_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_submit_answer_feedback" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_submit_answer_feedback_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_submit_answer_feedback" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session.SubmitAnswerFeedbackRequest.pb( session.SubmitAnswerFeedbackRequest() ) @@ -4031,6 +4105,7 @@ def test_submit_answer_feedback_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.AnswerFeedback() + post_with_metadata.return_value = session.AnswerFeedback(), metadata client.submit_answer_feedback( request, @@ -4042,6 +4117,7 @@ def test_submit_answer_feedback_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py index 7409fb4e76db..9d709090878e 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_test_cases.py @@ -90,6 +90,13 @@ from google.cloud.dialogflowcx_v3beta1.types import test_case as gcdc_test_case from google.cloud.dialogflowcx_v3beta1.types import tool_call +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -323,6 +330,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TestCasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TestCasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8007,10 +8057,13 @@ def test_list_test_cases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_list_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_list_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_list_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ListTestCasesRequest.pb(test_case.ListTestCasesRequest()) transcode.return_value = { "method": "post", @@ -8034,6 +8087,7 @@ def test_list_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.ListTestCasesResponse() + post_with_metadata.return_value = test_case.ListTestCasesResponse(), metadata client.list_test_cases( request, @@ -8045,6 +8099,7 @@ def test_list_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_test_cases_rest_bad_request( @@ -8242,10 +8297,13 @@ def test_get_test_case_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_get_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_get_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_get_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.GetTestCaseRequest.pb(test_case.GetTestCaseRequest()) transcode.return_value = { "method": "post", @@ -8267,6 +8325,7 @@ def test_get_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.TestCase() + post_with_metadata.return_value = test_case.TestCase(), metadata client.get_test_case( request, @@ -8278,6 +8337,7 @@ def test_get_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_test_case_rest_bad_request( @@ -8696,10 +8756,13 @@ def test_create_test_case_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_create_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_create_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_create_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_test_case.CreateTestCaseRequest.pb( gcdc_test_case.CreateTestCaseRequest() ) @@ -8723,6 +8786,7 @@ def test_create_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_test_case.TestCase() + post_with_metadata.return_value = gcdc_test_case.TestCase(), metadata client.create_test_case( request, @@ -8734,6 +8798,7 @@ def test_create_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_test_case_rest_bad_request( @@ -9160,10 +9225,13 @@ def test_update_test_case_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_update_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_update_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_update_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_test_case.UpdateTestCaseRequest.pb( gcdc_test_case.UpdateTestCaseRequest() ) @@ -9187,6 +9255,7 @@ def test_update_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_test_case.TestCase() + post_with_metadata.return_value = gcdc_test_case.TestCase(), metadata client.update_test_case( request, @@ -9198,6 +9267,7 @@ def test_update_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_test_case_rest_bad_request(request_type=test_case.RunTestCaseRequest): @@ -9278,10 +9348,13 @@ def test_run_test_case_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_run_test_case" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_run_test_case_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_run_test_case" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.RunTestCaseRequest.pb(test_case.RunTestCaseRequest()) transcode.return_value = { "method": "post", @@ -9303,6 +9376,7 @@ def test_run_test_case_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_test_case( request, @@ -9314,6 +9388,7 @@ def test_run_test_case_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_run_test_cases_rest_bad_request( @@ -9392,10 +9467,13 @@ def test_batch_run_test_cases_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_batch_run_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_batch_run_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_batch_run_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.BatchRunTestCasesRequest.pb( test_case.BatchRunTestCasesRequest() ) @@ -9419,6 +9497,7 @@ def test_batch_run_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_run_test_cases( request, @@ -9430,6 +9509,7 @@ def test_batch_run_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_calculate_coverage_rest_bad_request( @@ -9512,10 +9592,13 @@ def test_calculate_coverage_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_calculate_coverage" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_calculate_coverage_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_calculate_coverage" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.CalculateCoverageRequest.pb( test_case.CalculateCoverageRequest() ) @@ -9541,6 +9624,10 @@ def test_calculate_coverage_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.CalculateCoverageResponse() + post_with_metadata.return_value = ( + test_case.CalculateCoverageResponse(), + metadata, + ) client.calculate_coverage( request, @@ -9552,6 +9639,7 @@ def test_calculate_coverage_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_test_cases_rest_bad_request( @@ -9630,10 +9718,13 @@ def test_import_test_cases_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_import_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_import_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_import_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ImportTestCasesRequest.pb( test_case.ImportTestCasesRequest() ) @@ -9657,6 +9748,7 @@ def test_import_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_test_cases( request, @@ -9668,6 +9760,7 @@ def test_import_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_test_cases_rest_bad_request( @@ -9746,10 +9839,13 @@ def test_export_test_cases_rest_interceptors(null_interceptor): ), mock.patch.object( transports.TestCasesRestInterceptor, "post_export_test_cases" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_export_test_cases_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_export_test_cases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ExportTestCasesRequest.pb( test_case.ExportTestCasesRequest() ) @@ -9773,6 +9869,7 @@ def test_export_test_cases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_test_cases( request, @@ -9784,6 +9881,7 @@ def test_export_test_cases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_test_case_results_rest_bad_request( @@ -9870,10 +9968,13 @@ def test_list_test_case_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_list_test_case_results" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_list_test_case_results_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_list_test_case_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.ListTestCaseResultsRequest.pb( test_case.ListTestCaseResultsRequest() ) @@ -9899,6 +10000,10 @@ def test_list_test_case_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.ListTestCaseResultsResponse() + post_with_metadata.return_value = ( + test_case.ListTestCaseResultsResponse(), + metadata, + ) client.list_test_case_results( request, @@ -9910,6 +10015,7 @@ def test_list_test_case_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_test_case_result_rest_bad_request( @@ -10000,10 +10106,13 @@ def test_get_test_case_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.TestCasesRestInterceptor, "post_get_test_case_result" ) as post, mock.patch.object( + transports.TestCasesRestInterceptor, "post_get_test_case_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.TestCasesRestInterceptor, "pre_get_test_case_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = test_case.GetTestCaseResultRequest.pb( test_case.GetTestCaseResultRequest() ) @@ -10027,6 +10136,7 @@ def test_get_test_case_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = test_case.TestCaseResult() + post_with_metadata.return_value = test_case.TestCaseResult(), metadata client.get_test_case_result( request, @@ -10038,6 +10148,7 @@ def test_get_test_case_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py index 466463ecbb5a..0b0b49e3128c 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_tools.py @@ -75,6 +75,13 @@ from google.cloud.dialogflowcx_v3beta1.types import tool from google.cloud.dialogflowcx_v3beta1.types import tool as gcdc_tool +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -293,6 +300,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ToolsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ToolsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4800,10 +4850,13 @@ def test_create_tool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ToolsRestInterceptor, "post_create_tool" ) as post, mock.patch.object( + transports.ToolsRestInterceptor, "post_create_tool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ToolsRestInterceptor, "pre_create_tool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_tool.CreateToolRequest.pb(gcdc_tool.CreateToolRequest()) transcode.return_value = { "method": "post", @@ -4825,6 +4878,7 @@ def test_create_tool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_tool.Tool() + post_with_metadata.return_value = gcdc_tool.Tool(), metadata client.create_tool( request, @@ -4836,6 +4890,7 @@ def test_create_tool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tools_rest_bad_request(request_type=tool.ListToolsRequest): @@ -4916,10 +4971,13 @@ def test_list_tools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ToolsRestInterceptor, "post_list_tools" ) as post, mock.patch.object( + transports.ToolsRestInterceptor, "post_list_tools_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ToolsRestInterceptor, "pre_list_tools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tool.ListToolsRequest.pb(tool.ListToolsRequest()) transcode.return_value = { "method": "post", @@ -4941,6 +4999,7 @@ def test_list_tools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tool.ListToolsResponse() + post_with_metadata.return_value = tool.ListToolsResponse(), metadata client.list_tools( request, @@ -4952,6 +5011,7 @@ def test_list_tools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_tools_rest_bad_request(request_type=tool.ExportToolsRequest): @@ -5028,10 +5088,13 @@ def test_export_tools_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ToolsRestInterceptor, "post_export_tools" ) as post, mock.patch.object( + transports.ToolsRestInterceptor, "post_export_tools_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ToolsRestInterceptor, "pre_export_tools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tool.ExportToolsRequest.pb(tool.ExportToolsRequest()) transcode.return_value = { "method": "post", @@ -5053,6 +5116,7 @@ def test_export_tools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_tools( request, @@ -5064,6 +5128,7 @@ def test_export_tools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_tool_rest_bad_request(request_type=tool.GetToolRequest): @@ -5154,10 +5219,13 @@ def test_get_tool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ToolsRestInterceptor, "post_get_tool" ) as post, mock.patch.object( + transports.ToolsRestInterceptor, "post_get_tool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ToolsRestInterceptor, "pre_get_tool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = tool.GetToolRequest.pb(tool.GetToolRequest()) transcode.return_value = { "method": "post", @@ -5179,6 +5247,7 @@ def test_get_tool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = tool.Tool() + post_with_metadata.return_value = tool.Tool(), metadata client.get_tool( request, @@ -5190,6 +5259,7 @@ def test_get_tool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_tool_rest_bad_request(request_type=gcdc_tool.UpdateToolRequest): @@ -5390,10 +5460,13 @@ def test_update_tool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ToolsRestInterceptor, "post_update_tool" ) as post, mock.patch.object( + transports.ToolsRestInterceptor, "post_update_tool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ToolsRestInterceptor, "pre_update_tool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_tool.UpdateToolRequest.pb(gcdc_tool.UpdateToolRequest()) transcode.return_value = { "method": "post", @@ -5415,6 +5488,7 @@ def test_update_tool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_tool.Tool() + post_with_metadata.return_value = gcdc_tool.Tool(), metadata client.update_tool( request, @@ -5426,6 +5500,7 @@ def test_update_tool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_tool_rest_bad_request(request_type=tool.DeleteToolRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py index 0ea032b635c9..c77bc3c5ec40 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_transition_route_groups.py @@ -76,6 +76,13 @@ ) from google.cloud.dialogflowcx_v3beta1.types import transition_route_group +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = TransitionRouteGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = TransitionRouteGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4643,11 +4693,15 @@ def test_list_transition_route_groups_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_list_transition_route_groups", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_list_transition_route_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_list_transition_route_groups", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = transition_route_group.ListTransitionRouteGroupsRequest.pb( transition_route_group.ListTransitionRouteGroupsRequest() ) @@ -4673,6 +4727,10 @@ def test_list_transition_route_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transition_route_group.ListTransitionRouteGroupsResponse() + post_with_metadata.return_value = ( + transition_route_group.ListTransitionRouteGroupsResponse(), + metadata, + ) client.list_transition_route_groups( request, @@ -4684,6 +4742,7 @@ def test_list_transition_route_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_transition_route_group_rest_bad_request( @@ -4775,11 +4834,15 @@ def test_get_transition_route_group_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_get_transition_route_group", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_get_transition_route_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_get_transition_route_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = transition_route_group.GetTransitionRouteGroupRequest.pb( transition_route_group.GetTransitionRouteGroupRequest() ) @@ -4805,6 +4868,10 @@ def test_get_transition_route_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = transition_route_group.TransitionRouteGroup() + post_with_metadata.return_value = ( + transition_route_group.TransitionRouteGroup(), + metadata, + ) client.get_transition_route_group( request, @@ -4816,6 +4883,7 @@ def test_get_transition_route_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_transition_route_group_rest_bad_request( @@ -5086,11 +5154,15 @@ def test_create_transition_route_group_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_create_transition_route_group", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_create_transition_route_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_create_transition_route_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_transition_route_group.CreateTransitionRouteGroupRequest.pb( gcdc_transition_route_group.CreateTransitionRouteGroupRequest() ) @@ -5116,6 +5188,10 @@ def test_create_transition_route_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_transition_route_group.TransitionRouteGroup() + post_with_metadata.return_value = ( + gcdc_transition_route_group.TransitionRouteGroup(), + metadata, + ) client.create_transition_route_group( request, @@ -5127,6 +5203,7 @@ def test_create_transition_route_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_transition_route_group_rest_bad_request( @@ -5401,11 +5478,15 @@ def test_update_transition_route_group_rest_interceptors(null_interceptor): transports.TransitionRouteGroupsRestInterceptor, "post_update_transition_route_group", ) as post, mock.patch.object( + transports.TransitionRouteGroupsRestInterceptor, + "post_update_transition_route_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.TransitionRouteGroupsRestInterceptor, "pre_update_transition_route_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_transition_route_group.UpdateTransitionRouteGroupRequest.pb( gcdc_transition_route_group.UpdateTransitionRouteGroupRequest() ) @@ -5431,6 +5512,10 @@ def test_update_transition_route_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_transition_route_group.TransitionRouteGroup() + post_with_metadata.return_value = ( + gcdc_transition_route_group.TransitionRouteGroup(), + metadata, + ) client.update_transition_route_group( request, @@ -5442,6 +5527,7 @@ def test_update_transition_route_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_transition_route_group_rest_bad_request( diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py index 90df4faf1ccf..9f468059f958 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_versions.py @@ -77,6 +77,13 @@ from google.cloud.dialogflowcx_v3beta1.types import version from google.cloud.dialogflowcx_v3beta1.types import version as gcdc_version +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -307,6 +314,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5427,10 +5477,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -5454,6 +5507,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.ListVersionsResponse() + post_with_metadata.return_value = version.ListVersionsResponse(), metadata client.list_versions( request, @@ -5465,6 +5519,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): @@ -5555,10 +5610,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -5580,6 +5638,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -5591,6 +5650,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_version_rest_bad_request( @@ -5752,10 +5812,13 @@ def test_create_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_create_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_create_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_create_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_version.CreateVersionRequest.pb( gcdc_version.CreateVersionRequest() ) @@ -5779,6 +5842,7 @@ def test_create_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_version( request, @@ -5790,6 +5854,7 @@ def test_create_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request( @@ -5965,10 +6030,13 @@ def test_update_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_version.UpdateVersionRequest.pb( gcdc_version.UpdateVersionRequest() ) @@ -5992,6 +6060,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_version.Version() + post_with_metadata.return_value = gcdc_version.Version(), metadata client.update_version( request, @@ -6003,6 +6072,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): @@ -6190,10 +6260,13 @@ def test_load_version_rest_interceptors(null_interceptor): ), mock.patch.object( transports.VersionsRestInterceptor, "post_load_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_load_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_load_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.LoadVersionRequest.pb(version.LoadVersionRequest()) transcode.return_value = { "method": "post", @@ -6215,6 +6288,7 @@ def test_load_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.load_version( request, @@ -6226,6 +6300,7 @@ def test_load_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_compare_versions_rest_bad_request(request_type=version.CompareVersionsRequest): @@ -6312,10 +6387,13 @@ def test_compare_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_compare_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_compare_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_compare_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.CompareVersionsRequest.pb(version.CompareVersionsRequest()) transcode.return_value = { "method": "post", @@ -6339,6 +6417,7 @@ def test_compare_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.CompareVersionsResponse() + post_with_metadata.return_value = version.CompareVersionsResponse(), metadata client.compare_versions( request, @@ -6350,6 +6429,7 @@ def test_compare_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py index ddfdf27d0a82..ad997ffa492b 100644 --- a/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py +++ b/packages/google-cloud-dialogflow-cx/tests/unit/gapic/dialogflowcx_v3beta1/test_webhooks.py @@ -65,6 +65,13 @@ from google.cloud.dialogflowcx_v3beta1.types import webhook from google.cloud.dialogflowcx_v3beta1.types import webhook as gcdc_webhook +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = WebhooksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = WebhooksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4270,10 +4320,13 @@ def test_list_webhooks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_list_webhooks" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_list_webhooks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_list_webhooks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = webhook.ListWebhooksRequest.pb(webhook.ListWebhooksRequest()) transcode.return_value = { "method": "post", @@ -4297,6 +4350,7 @@ def test_list_webhooks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = webhook.ListWebhooksResponse() + post_with_metadata.return_value = webhook.ListWebhooksResponse(), metadata client.list_webhooks( request, @@ -4308,6 +4362,7 @@ def test_list_webhooks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_webhook_rest_bad_request(request_type=webhook.GetWebhookRequest): @@ -4396,10 +4451,13 @@ def test_get_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_get_webhook" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_get_webhook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_get_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = webhook.GetWebhookRequest.pb(webhook.GetWebhookRequest()) transcode.return_value = { "method": "post", @@ -4421,6 +4479,7 @@ def test_get_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = webhook.Webhook() + post_with_metadata.return_value = webhook.Webhook(), metadata client.get_webhook( request, @@ -4432,6 +4491,7 @@ def test_get_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_webhook_rest_bad_request( @@ -4610,10 +4670,13 @@ def test_create_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_create_webhook" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_create_webhook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_create_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_webhook.CreateWebhookRequest.pb( gcdc_webhook.CreateWebhookRequest() ) @@ -4637,6 +4700,7 @@ def test_create_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_webhook.Webhook() + post_with_metadata.return_value = gcdc_webhook.Webhook(), metadata client.create_webhook( request, @@ -4648,6 +4712,7 @@ def test_create_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_webhook_rest_bad_request( @@ -4834,10 +4899,13 @@ def test_update_webhook_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.WebhooksRestInterceptor, "post_update_webhook" ) as post, mock.patch.object( + transports.WebhooksRestInterceptor, "post_update_webhook_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.WebhooksRestInterceptor, "pre_update_webhook" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcdc_webhook.UpdateWebhookRequest.pb( gcdc_webhook.UpdateWebhookRequest() ) @@ -4861,6 +4929,7 @@ def test_update_webhook_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcdc_webhook.Webhook() + post_with_metadata.return_value = gcdc_webhook.Webhook(), metadata client.update_webhook( request, @@ -4872,6 +4941,7 @@ def test_update_webhook_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_webhook_rest_bad_request(request_type=webhook.DeleteWebhookRequest): diff --git a/packages/google-cloud-dialogflow/CHANGELOG.md b/packages/google-cloud-dialogflow/CHANGELOG.md index cc855c3a5799..945908fabbba 100644 --- a/packages/google-cloud-dialogflow/CHANGELOG.md +++ b/packages/google-cloud-dialogflow/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-dialogflow/#history +## [2.39.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.38.0...google-cloud-dialogflow-v2.39.0) (2025-02-12) + + +### Features + +* add PhoneNumbers API ([38d410a](https://github.com/googleapis/google-cloud-python/commit/38d410abe50390a467b2a6bdfd64816218759e87)) +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* add TelephonyConnectInfo of phone call-related info about the conversation ([38d410a](https://github.com/googleapis/google-cloud-python/commit/38d410abe50390a467b2a6bdfd64816218759e87)) + +## [2.38.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.37.0...google-cloud-dialogflow-v2.38.0) (2024-12-18) + + +### Features + +* [google-cloud-dialogflow] add new fields for delivering ([aa99816](https://github.com/googleapis/google-cloud-python/commit/aa998161a58ad9ce48c61ce913184fd49532327d)) +* [google-cloud-dialogflow] add new fields for delivering intermediate transcriptions through PubSub ([#13358](https://github.com/googleapis/google-cloud-python/issues/13358)) ([aa99816](https://github.com/googleapis/google-cloud-python/commit/aa998161a58ad9ce48c61ce913184fd49532327d)) + ## [2.37.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dialogflow-v2.36.0...google-cloud-dialogflow-v2.37.0) (2024-12-12) diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/phone_numbers.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/phone_numbers.rst new file mode 100644 index 000000000000..885678a88dc2 --- /dev/null +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/phone_numbers.rst @@ -0,0 +1,10 @@ +PhoneNumbers +------------------------------ + +.. automodule:: google.cloud.dialogflow_v2beta1.services.phone_numbers + :members: + :inherited-members: + +.. automodule:: google.cloud.dialogflow_v2beta1.services.phone_numbers.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst index 5e85c5d11c54..134d34945f5c 100644 --- a/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst +++ b/packages/google-cloud-dialogflow/docs/dialogflow_v2beta1/services_.rst @@ -17,6 +17,7 @@ Services for Google Cloud Dialogflow v2beta1 API intents knowledge_bases participants + phone_numbers session_entity_types sessions sip_trunks diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py index c34cf46454e4..52eb7a2892d5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.37.0" # {x-release-please-version} +__version__ = "2.39.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py index c34cf46454e4..52eb7a2892d5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.37.0" # {x-release-please-version} +__version__ = "2.39.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py index d78e35631c64..33292167f346 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -484,6 +486,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1816,16 +1845,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1871,16 +1904,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1981,16 +2018,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2036,16 +2077,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/transports/rest.py index f14c8a452f2a..77e7b09533c9 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/agents/transports/rest.py @@ -176,12 +176,35 @@ def post_export_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_export_agent` interceptor runs + before the `post_export_agent_with_metadata` interceptor. """ return response + def post_export_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_export_agent_with_metadata` + interceptor in new development instead of the `post_export_agent` interceptor. + When both interceptors are used, this `post_export_agent_with_metadata` interceptor runs after the + `post_export_agent` interceptor. The (possibly modified) response returned by + `post_export_agent` will be passed to + `post_export_agent_with_metadata`. + """ + return response, metadata + def pre_get_agent( self, request: agent.GetAgentRequest, @@ -197,12 +220,33 @@ def pre_get_agent( def post_get_agent(self, response: agent.Agent) -> agent.Agent: """Post-rpc interceptor for get_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_agent` interceptor runs + before the `post_get_agent_with_metadata` interceptor. """ return response + def post_get_agent_with_metadata( + self, response: agent.Agent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_agent_with_metadata` + interceptor in new development instead of the `post_get_agent` interceptor. + When both interceptors are used, this `post_get_agent_with_metadata` interceptor runs after the + `post_get_agent` interceptor. The (possibly modified) response returned by + `post_get_agent` will be passed to + `post_get_agent_with_metadata`. + """ + return response, metadata + def pre_get_validation_result( self, request: agent.GetValidationResultRequest, @@ -222,12 +266,37 @@ def post_get_validation_result( ) -> validation_result.ValidationResult: """Post-rpc interceptor for get_validation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_validation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_validation_result` interceptor runs + before the `post_get_validation_result_with_metadata` interceptor. """ return response + def post_get_validation_result_with_metadata( + self, + response: validation_result.ValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + validation_result.ValidationResult, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_validation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_validation_result_with_metadata` + interceptor in new development instead of the `post_get_validation_result` interceptor. + When both interceptors are used, this `post_get_validation_result_with_metadata` interceptor runs after the + `post_get_validation_result` interceptor. The (possibly modified) response returned by + `post_get_validation_result` will be passed to + `post_get_validation_result_with_metadata`. + """ + return response, metadata + def pre_import_agent( self, request: agent.ImportAgentRequest, @@ -245,12 +314,35 @@ def post_import_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_import_agent` interceptor runs + before the `post_import_agent_with_metadata` interceptor. """ return response + def post_import_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_import_agent_with_metadata` + interceptor in new development instead of the `post_import_agent` interceptor. + When both interceptors are used, this `post_import_agent_with_metadata` interceptor runs after the + `post_import_agent` interceptor. The (possibly modified) response returned by + `post_import_agent` will be passed to + `post_import_agent_with_metadata`. + """ + return response, metadata + def pre_restore_agent( self, request: agent.RestoreAgentRequest, @@ -268,12 +360,35 @@ def post_restore_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_restore_agent` interceptor runs + before the `post_restore_agent_with_metadata` interceptor. """ return response + def post_restore_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_restore_agent_with_metadata` + interceptor in new development instead of the `post_restore_agent` interceptor. + When both interceptors are used, this `post_restore_agent_with_metadata` interceptor runs after the + `post_restore_agent` interceptor. The (possibly modified) response returned by + `post_restore_agent` will be passed to + `post_restore_agent_with_metadata`. + """ + return response, metadata + def pre_search_agents( self, request: agent.SearchAgentsRequest, @@ -291,12 +406,35 @@ def post_search_agents( ) -> agent.SearchAgentsResponse: """Post-rpc interceptor for search_agents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_agents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_search_agents` interceptor runs + before the `post_search_agents_with_metadata` interceptor. """ return response + def post_search_agents_with_metadata( + self, + response: agent.SearchAgentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.SearchAgentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_agents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_search_agents_with_metadata` + interceptor in new development instead of the `post_search_agents` interceptor. + When both interceptors are used, this `post_search_agents_with_metadata` interceptor runs after the + `post_search_agents` interceptor. The (possibly modified) response returned by + `post_search_agents` will be passed to + `post_search_agents_with_metadata`. + """ + return response, metadata + def pre_set_agent( self, request: gcd_agent.SetAgentRequest, @@ -312,12 +450,35 @@ def pre_set_agent( def post_set_agent(self, response: gcd_agent.Agent) -> gcd_agent.Agent: """Post-rpc interceptor for set_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_set_agent` interceptor runs + before the `post_set_agent_with_metadata` interceptor. """ return response + def post_set_agent_with_metadata( + self, + response: gcd_agent.Agent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_set_agent_with_metadata` + interceptor in new development instead of the `post_set_agent` interceptor. + When both interceptors are used, this `post_set_agent_with_metadata` interceptor runs after the + `post_set_agent` interceptor. The (possibly modified) response returned by + `post_set_agent` will be passed to + `post_set_agent_with_metadata`. + """ + return response, metadata + def pre_train_agent( self, request: agent.TrainAgentRequest, @@ -335,12 +496,35 @@ def post_train_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_train_agent` interceptor runs + before the `post_train_agent_with_metadata` interceptor. """ return response + def post_train_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_train_agent_with_metadata` + interceptor in new development instead of the `post_train_agent` interceptor. + When both interceptors are used, this `post_train_agent_with_metadata` interceptor runs after the + `post_train_agent` interceptor. The (possibly modified) response returned by + `post_train_agent` will be passed to + `post_train_agent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -842,6 +1026,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -995,6 +1183,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1140,6 +1332,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_validation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_validation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1290,6 +1486,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1440,6 +1640,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1585,6 +1789,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_agents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_agents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1744,6 +1952,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1892,6 +2104,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py index 92520ab73796..3c4bd7612a7b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -525,6 +527,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1049,16 +1078,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1104,16 +1137,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1214,16 +1251,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1269,16 +1310,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/transports/rest.py index b9598bf7bd94..9a13609a5036 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/answer_records/transports/rest.py @@ -112,12 +112,37 @@ def post_list_answer_records( ) -> answer_record.ListAnswerRecordsResponse: """Post-rpc interceptor for list_answer_records - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_answer_records_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnswerRecords server but before - it is returned to user code. + it is returned to user code. This `post_list_answer_records` interceptor runs + before the `post_list_answer_records_with_metadata` interceptor. """ return response + def post_list_answer_records_with_metadata( + self, + response: answer_record.ListAnswerRecordsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + answer_record.ListAnswerRecordsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_answer_records + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnswerRecords server but before it is returned to user code. + + We recommend only using this `post_list_answer_records_with_metadata` + interceptor in new development instead of the `post_list_answer_records` interceptor. + When both interceptors are used, this `post_list_answer_records_with_metadata` interceptor runs after the + `post_list_answer_records` interceptor. The (possibly modified) response returned by + `post_list_answer_records` will be passed to + `post_list_answer_records_with_metadata`. + """ + return response, metadata + def pre_update_answer_record( self, request: gcd_answer_record.UpdateAnswerRecordRequest, @@ -138,12 +163,35 @@ def post_update_answer_record( ) -> gcd_answer_record.AnswerRecord: """Post-rpc interceptor for update_answer_record - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_answer_record_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnswerRecords server but before - it is returned to user code. + it is returned to user code. This `post_update_answer_record` interceptor runs + before the `post_update_answer_record_with_metadata` interceptor. """ return response + def post_update_answer_record_with_metadata( + self, + response: gcd_answer_record.AnswerRecord, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_answer_record.AnswerRecord, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_answer_record + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnswerRecords server but before it is returned to user code. + + We recommend only using this `post_update_answer_record_with_metadata` + interceptor in new development instead of the `post_update_answer_record` interceptor. + When both interceptors are used, this `post_update_answer_record_with_metadata` interceptor runs after the + `post_update_answer_record` interceptor. The (possibly modified) response returned by + `post_update_answer_record` will be passed to + `post_update_answer_record_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -477,6 +525,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_answer_records(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_answer_records_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -660,6 +712,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_answer_record(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_answer_record_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py index 5139b72d4d72..fdd52e3580b2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -488,6 +490,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1462,16 +1491,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1517,16 +1550,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1627,16 +1664,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1682,16 +1723,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/transports/rest.py index 9d7803ecae3c..745e8f941bc4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/contexts/transports/rest.py @@ -135,12 +135,35 @@ def pre_create_context( def post_create_context(self, response: gcd_context.Context) -> gcd_context.Context: """Post-rpc interceptor for create_context - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_context_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_create_context` interceptor runs + before the `post_create_context_with_metadata` interceptor. """ return response + def post_create_context_with_metadata( + self, + response: gcd_context.Context, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_context.Context, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_context + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_create_context_with_metadata` + interceptor in new development instead of the `post_create_context` interceptor. + When both interceptors are used, this `post_create_context_with_metadata` interceptor runs after the + `post_create_context` interceptor. The (possibly modified) response returned by + `post_create_context` will be passed to + `post_create_context_with_metadata`. + """ + return response, metadata + def pre_delete_all_contexts( self, request: context.DeleteAllContextsRequest, @@ -182,12 +205,35 @@ def pre_get_context( def post_get_context(self, response: context.Context) -> context.Context: """Post-rpc interceptor for get_context - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_context_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_get_context` interceptor runs + before the `post_get_context_with_metadata` interceptor. """ return response + def post_get_context_with_metadata( + self, + response: context.Context, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[context.Context, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_context + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_get_context_with_metadata` + interceptor in new development instead of the `post_get_context` interceptor. + When both interceptors are used, this `post_get_context_with_metadata` interceptor runs after the + `post_get_context` interceptor. The (possibly modified) response returned by + `post_get_context` will be passed to + `post_get_context_with_metadata`. + """ + return response, metadata + def pre_list_contexts( self, request: context.ListContextsRequest, @@ -205,12 +251,35 @@ def post_list_contexts( ) -> context.ListContextsResponse: """Post-rpc interceptor for list_contexts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_contexts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_list_contexts` interceptor runs + before the `post_list_contexts_with_metadata` interceptor. """ return response + def post_list_contexts_with_metadata( + self, + response: context.ListContextsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[context.ListContextsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_contexts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_list_contexts_with_metadata` + interceptor in new development instead of the `post_list_contexts` interceptor. + When both interceptors are used, this `post_list_contexts_with_metadata` interceptor runs after the + `post_list_contexts` interceptor. The (possibly modified) response returned by + `post_list_contexts` will be passed to + `post_list_contexts_with_metadata`. + """ + return response, metadata + def pre_update_context( self, request: gcd_context.UpdateContextRequest, @@ -228,12 +297,35 @@ def pre_update_context( def post_update_context(self, response: gcd_context.Context) -> gcd_context.Context: """Post-rpc interceptor for update_context - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_context_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_update_context` interceptor runs + before the `post_update_context_with_metadata` interceptor. """ return response + def post_update_context_with_metadata( + self, + response: gcd_context.Context, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_context.Context, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_context + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_update_context_with_metadata` + interceptor in new development instead of the `post_update_context` interceptor. + When both interceptors are used, this `post_update_context_with_metadata` interceptor runs after the + `post_update_context` interceptor. The (possibly modified) response returned by + `post_update_context` will be passed to + `post_update_context_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -591,6 +683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_context(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_context_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -973,6 +1069,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_context(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_context_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1118,6 +1218,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_contexts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_contexts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1288,6 +1392,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_context(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_context_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py index 27c1cf586101..a4d0a6b69731 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1411,16 +1440,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1466,16 +1499,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1576,16 +1613,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1631,16 +1672,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/transports/rest.py index db0063361e1f..149ee3070eca 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_datasets/transports/rest.py @@ -139,12 +139,35 @@ def post_create_conversation_dataset( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_conversation_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationDatasets server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation_dataset` interceptor runs + before the `post_create_conversation_dataset_with_metadata` interceptor. """ return response + def post_create_conversation_dataset_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationDatasets server but before it is returned to user code. + + We recommend only using this `post_create_conversation_dataset_with_metadata` + interceptor in new development instead of the `post_create_conversation_dataset` interceptor. + When both interceptors are used, this `post_create_conversation_dataset_with_metadata` interceptor runs after the + `post_create_conversation_dataset` interceptor. The (possibly modified) response returned by + `post_create_conversation_dataset` will be passed to + `post_create_conversation_dataset_with_metadata`. + """ + return response, metadata + def pre_delete_conversation_dataset( self, request: conversation_dataset.DeleteConversationDatasetRequest, @@ -165,12 +188,35 @@ def post_delete_conversation_dataset( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_conversation_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_conversation_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationDatasets server but before - it is returned to user code. + it is returned to user code. This `post_delete_conversation_dataset` interceptor runs + before the `post_delete_conversation_dataset_with_metadata` interceptor. """ return response + def post_delete_conversation_dataset_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_conversation_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationDatasets server but before it is returned to user code. + + We recommend only using this `post_delete_conversation_dataset_with_metadata` + interceptor in new development instead of the `post_delete_conversation_dataset` interceptor. + When both interceptors are used, this `post_delete_conversation_dataset_with_metadata` interceptor runs after the + `post_delete_conversation_dataset` interceptor. The (possibly modified) response returned by + `post_delete_conversation_dataset` will be passed to + `post_delete_conversation_dataset_with_metadata`. + """ + return response, metadata + def pre_get_conversation_dataset( self, request: conversation_dataset.GetConversationDatasetRequest, @@ -191,12 +237,38 @@ def post_get_conversation_dataset( ) -> conversation_dataset.ConversationDataset: """Post-rpc interceptor for get_conversation_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationDatasets server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation_dataset` interceptor runs + before the `post_get_conversation_dataset_with_metadata` interceptor. """ return response + def post_get_conversation_dataset_with_metadata( + self, + response: conversation_dataset.ConversationDataset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_dataset.ConversationDataset, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_conversation_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationDatasets server but before it is returned to user code. + + We recommend only using this `post_get_conversation_dataset_with_metadata` + interceptor in new development instead of the `post_get_conversation_dataset` interceptor. + When both interceptors are used, this `post_get_conversation_dataset_with_metadata` interceptor runs after the + `post_get_conversation_dataset` interceptor. The (possibly modified) response returned by + `post_get_conversation_dataset` will be passed to + `post_get_conversation_dataset_with_metadata`. + """ + return response, metadata + def pre_import_conversation_data( self, request: conversation_dataset.ImportConversationDataRequest, @@ -217,12 +289,35 @@ def post_import_conversation_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_conversation_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_conversation_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationDatasets server but before - it is returned to user code. + it is returned to user code. This `post_import_conversation_data` interceptor runs + before the `post_import_conversation_data_with_metadata` interceptor. """ return response + def post_import_conversation_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_conversation_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationDatasets server but before it is returned to user code. + + We recommend only using this `post_import_conversation_data_with_metadata` + interceptor in new development instead of the `post_import_conversation_data` interceptor. + When both interceptors are used, this `post_import_conversation_data_with_metadata` interceptor runs after the + `post_import_conversation_data` interceptor. The (possibly modified) response returned by + `post_import_conversation_data` will be passed to + `post_import_conversation_data_with_metadata`. + """ + return response, metadata + def pre_list_conversation_datasets( self, request: conversation_dataset.ListConversationDatasetsRequest, @@ -243,12 +338,38 @@ def post_list_conversation_datasets( ) -> conversation_dataset.ListConversationDatasetsResponse: """Post-rpc interceptor for list_conversation_datasets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversation_datasets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationDatasets server but before - it is returned to user code. + it is returned to user code. This `post_list_conversation_datasets` interceptor runs + before the `post_list_conversation_datasets_with_metadata` interceptor. """ return response + def post_list_conversation_datasets_with_metadata( + self, + response: conversation_dataset.ListConversationDatasetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_dataset.ListConversationDatasetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversation_datasets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationDatasets server but before it is returned to user code. + + We recommend only using this `post_list_conversation_datasets_with_metadata` + interceptor in new development instead of the `post_list_conversation_datasets` interceptor. + When both interceptors are used, this `post_list_conversation_datasets_with_metadata` interceptor runs after the + `post_list_conversation_datasets` interceptor. The (possibly modified) response returned by + `post_list_conversation_datasets` will be passed to + `post_list_conversation_datasets_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -650,6 +771,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -796,6 +921,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_conversation_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_conversation_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -949,6 +1078,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1104,6 +1237,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_conversation_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_conversation_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1251,6 +1388,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversation_datasets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversation_datasets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py index 071de039a4c5..b285e815f00b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -560,6 +562,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1963,16 +1992,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2018,16 +2051,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2128,16 +2165,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2183,16 +2224,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/transports/rest.py index 15e4ae8e846b..aba1babb9a60 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_models/transports/rest.py @@ -171,12 +171,35 @@ def post_create_conversation_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_conversation_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation_model` interceptor runs + before the `post_create_conversation_model_with_metadata` interceptor. """ return response + def post_create_conversation_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_create_conversation_model_with_metadata` + interceptor in new development instead of the `post_create_conversation_model` interceptor. + When both interceptors are used, this `post_create_conversation_model_with_metadata` interceptor runs after the + `post_create_conversation_model` interceptor. The (possibly modified) response returned by + `post_create_conversation_model` will be passed to + `post_create_conversation_model_with_metadata`. + """ + return response, metadata + def pre_create_conversation_model_evaluation( self, request: conversation_model.CreateConversationModelEvaluationRequest, @@ -197,12 +220,35 @@ def post_create_conversation_model_evaluation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_conversation_model_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_model_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation_model_evaluation` interceptor runs + before the `post_create_conversation_model_evaluation_with_metadata` interceptor. """ return response + def post_create_conversation_model_evaluation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation_model_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_create_conversation_model_evaluation_with_metadata` + interceptor in new development instead of the `post_create_conversation_model_evaluation` interceptor. + When both interceptors are used, this `post_create_conversation_model_evaluation_with_metadata` interceptor runs after the + `post_create_conversation_model_evaluation` interceptor. The (possibly modified) response returned by + `post_create_conversation_model_evaluation` will be passed to + `post_create_conversation_model_evaluation_with_metadata`. + """ + return response, metadata + def pre_delete_conversation_model( self, request: conversation_model.DeleteConversationModelRequest, @@ -223,12 +269,35 @@ def post_delete_conversation_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_conversation_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_conversation_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_delete_conversation_model` interceptor runs + before the `post_delete_conversation_model_with_metadata` interceptor. """ return response + def post_delete_conversation_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_conversation_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_delete_conversation_model_with_metadata` + interceptor in new development instead of the `post_delete_conversation_model` interceptor. + When both interceptors are used, this `post_delete_conversation_model_with_metadata` interceptor runs after the + `post_delete_conversation_model` interceptor. The (possibly modified) response returned by + `post_delete_conversation_model` will be passed to + `post_delete_conversation_model_with_metadata`. + """ + return response, metadata + def pre_deploy_conversation_model( self, request: conversation_model.DeployConversationModelRequest, @@ -249,12 +318,35 @@ def post_deploy_conversation_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_conversation_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_conversation_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_deploy_conversation_model` interceptor runs + before the `post_deploy_conversation_model_with_metadata` interceptor. """ return response + def post_deploy_conversation_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_conversation_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_deploy_conversation_model_with_metadata` + interceptor in new development instead of the `post_deploy_conversation_model` interceptor. + When both interceptors are used, this `post_deploy_conversation_model_with_metadata` interceptor runs after the + `post_deploy_conversation_model` interceptor. The (possibly modified) response returned by + `post_deploy_conversation_model` will be passed to + `post_deploy_conversation_model_with_metadata`. + """ + return response, metadata + def pre_get_conversation_model( self, request: conversation_model.GetConversationModelRequest, @@ -275,12 +367,37 @@ def post_get_conversation_model( ) -> conversation_model.ConversationModel: """Post-rpc interceptor for get_conversation_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation_model` interceptor runs + before the `post_get_conversation_model_with_metadata` interceptor. """ return response + def post_get_conversation_model_with_metadata( + self, + response: conversation_model.ConversationModel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_model.ConversationModel, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_conversation_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_get_conversation_model_with_metadata` + interceptor in new development instead of the `post_get_conversation_model` interceptor. + When both interceptors are used, this `post_get_conversation_model_with_metadata` interceptor runs after the + `post_get_conversation_model` interceptor. The (possibly modified) response returned by + `post_get_conversation_model` will be passed to + `post_get_conversation_model_with_metadata`. + """ + return response, metadata + def pre_get_conversation_model_evaluation( self, request: conversation_model.GetConversationModelEvaluationRequest, @@ -301,12 +418,38 @@ def post_get_conversation_model_evaluation( ) -> conversation_model.ConversationModelEvaluation: """Post-rpc interceptor for get_conversation_model_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_model_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation_model_evaluation` interceptor runs + before the `post_get_conversation_model_evaluation_with_metadata` interceptor. """ return response + def post_get_conversation_model_evaluation_with_metadata( + self, + response: conversation_model.ConversationModelEvaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_model.ConversationModelEvaluation, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_conversation_model_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_get_conversation_model_evaluation_with_metadata` + interceptor in new development instead of the `post_get_conversation_model_evaluation` interceptor. + When both interceptors are used, this `post_get_conversation_model_evaluation_with_metadata` interceptor runs after the + `post_get_conversation_model_evaluation` interceptor. The (possibly modified) response returned by + `post_get_conversation_model_evaluation` will be passed to + `post_get_conversation_model_evaluation_with_metadata`. + """ + return response, metadata + def pre_list_conversation_model_evaluations( self, request: conversation_model.ListConversationModelEvaluationsRequest, @@ -327,12 +470,38 @@ def post_list_conversation_model_evaluations( ) -> conversation_model.ListConversationModelEvaluationsResponse: """Post-rpc interceptor for list_conversation_model_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversation_model_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_list_conversation_model_evaluations` interceptor runs + before the `post_list_conversation_model_evaluations_with_metadata` interceptor. """ return response + def post_list_conversation_model_evaluations_with_metadata( + self, + response: conversation_model.ListConversationModelEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_model.ListConversationModelEvaluationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversation_model_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_list_conversation_model_evaluations_with_metadata` + interceptor in new development instead of the `post_list_conversation_model_evaluations` interceptor. + When both interceptors are used, this `post_list_conversation_model_evaluations_with_metadata` interceptor runs after the + `post_list_conversation_model_evaluations` interceptor. The (possibly modified) response returned by + `post_list_conversation_model_evaluations` will be passed to + `post_list_conversation_model_evaluations_with_metadata`. + """ + return response, metadata + def pre_list_conversation_models( self, request: conversation_model.ListConversationModelsRequest, @@ -353,12 +522,38 @@ def post_list_conversation_models( ) -> conversation_model.ListConversationModelsResponse: """Post-rpc interceptor for list_conversation_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversation_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_list_conversation_models` interceptor runs + before the `post_list_conversation_models_with_metadata` interceptor. """ return response + def post_list_conversation_models_with_metadata( + self, + response: conversation_model.ListConversationModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_model.ListConversationModelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversation_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_list_conversation_models_with_metadata` + interceptor in new development instead of the `post_list_conversation_models` interceptor. + When both interceptors are used, this `post_list_conversation_models_with_metadata` interceptor runs after the + `post_list_conversation_models` interceptor. The (possibly modified) response returned by + `post_list_conversation_models` will be passed to + `post_list_conversation_models_with_metadata`. + """ + return response, metadata + def pre_undeploy_conversation_model( self, request: conversation_model.UndeployConversationModelRequest, @@ -379,12 +574,35 @@ def post_undeploy_conversation_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for undeploy_conversation_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_conversation_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationModels server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_conversation_model` interceptor runs + before the `post_undeploy_conversation_model_with_metadata` interceptor. """ return response + def post_undeploy_conversation_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_conversation_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationModels server but before it is returned to user code. + + We recommend only using this `post_undeploy_conversation_model_with_metadata` + interceptor in new development instead of the `post_undeploy_conversation_model` interceptor. + When both interceptors are used, this `post_undeploy_conversation_model_with_metadata` interceptor runs after the + `post_undeploy_conversation_model` interceptor. The (possibly modified) response returned by + `post_undeploy_conversation_model` will be passed to + `post_undeploy_conversation_model_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -784,6 +1002,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -941,6 +1163,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation_model_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_conversation_model_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1088,6 +1317,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_conversation_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_conversation_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1241,6 +1474,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_conversation_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_conversation_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1387,6 +1624,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1538,6 +1779,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation_model_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_conversation_model_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1694,6 +1942,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversation_model_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_conversation_model_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1844,6 +2099,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversation_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversation_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2000,6 +2259,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_conversation_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undeploy_conversation_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py index 93f4d954e3d2..c1a5a1e749f8 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -640,6 +642,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1854,16 +1883,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1909,16 +1942,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2019,16 +2056,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2074,16 +2115,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/transports/rest.py index a467afdb1c2c..b6dbabd47a4a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversation_profiles/transports/rest.py @@ -152,12 +152,35 @@ def post_clear_suggestion_feature_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for clear_suggestion_feature_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_clear_suggestion_feature_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_clear_suggestion_feature_config` interceptor runs + before the `post_clear_suggestion_feature_config_with_metadata` interceptor. """ return response + def post_clear_suggestion_feature_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for clear_suggestion_feature_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_clear_suggestion_feature_config_with_metadata` + interceptor in new development instead of the `post_clear_suggestion_feature_config` interceptor. + When both interceptors are used, this `post_clear_suggestion_feature_config_with_metadata` interceptor runs after the + `post_clear_suggestion_feature_config` interceptor. The (possibly modified) response returned by + `post_clear_suggestion_feature_config` will be passed to + `post_clear_suggestion_feature_config_with_metadata`. + """ + return response, metadata + def pre_create_conversation_profile( self, request: gcd_conversation_profile.CreateConversationProfileRequest, @@ -178,12 +201,38 @@ def post_create_conversation_profile( ) -> gcd_conversation_profile.ConversationProfile: """Post-rpc interceptor for create_conversation_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation_profile` interceptor runs + before the `post_create_conversation_profile_with_metadata` interceptor. """ return response + def post_create_conversation_profile_with_metadata( + self, + response: gcd_conversation_profile.ConversationProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_conversation_profile.ConversationProfile, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_conversation_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_create_conversation_profile_with_metadata` + interceptor in new development instead of the `post_create_conversation_profile` interceptor. + When both interceptors are used, this `post_create_conversation_profile_with_metadata` interceptor runs after the + `post_create_conversation_profile` interceptor. The (possibly modified) response returned by + `post_create_conversation_profile` will be passed to + `post_create_conversation_profile_with_metadata`. + """ + return response, metadata + def pre_delete_conversation_profile( self, request: conversation_profile.DeleteConversationProfileRequest, @@ -219,12 +268,38 @@ def post_get_conversation_profile( ) -> conversation_profile.ConversationProfile: """Post-rpc interceptor for get_conversation_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation_profile` interceptor runs + before the `post_get_conversation_profile_with_metadata` interceptor. """ return response + def post_get_conversation_profile_with_metadata( + self, + response: conversation_profile.ConversationProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_profile.ConversationProfile, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_conversation_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_get_conversation_profile_with_metadata` + interceptor in new development instead of the `post_get_conversation_profile` interceptor. + When both interceptors are used, this `post_get_conversation_profile_with_metadata` interceptor runs after the + `post_get_conversation_profile` interceptor. The (possibly modified) response returned by + `post_get_conversation_profile` will be passed to + `post_get_conversation_profile_with_metadata`. + """ + return response, metadata + def pre_list_conversation_profiles( self, request: conversation_profile.ListConversationProfilesRequest, @@ -245,12 +320,38 @@ def post_list_conversation_profiles( ) -> conversation_profile.ListConversationProfilesResponse: """Post-rpc interceptor for list_conversation_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversation_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_list_conversation_profiles` interceptor runs + before the `post_list_conversation_profiles_with_metadata` interceptor. """ return response + def post_list_conversation_profiles_with_metadata( + self, + response: conversation_profile.ListConversationProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_profile.ListConversationProfilesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversation_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_list_conversation_profiles_with_metadata` + interceptor in new development instead of the `post_list_conversation_profiles` interceptor. + When both interceptors are used, this `post_list_conversation_profiles_with_metadata` interceptor runs after the + `post_list_conversation_profiles` interceptor. The (possibly modified) response returned by + `post_list_conversation_profiles` will be passed to + `post_list_conversation_profiles_with_metadata`. + """ + return response, metadata + def pre_set_suggestion_feature_config( self, request: gcd_conversation_profile.SetSuggestionFeatureConfigRequest, @@ -271,12 +372,35 @@ def post_set_suggestion_feature_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for set_suggestion_feature_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_suggestion_feature_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_set_suggestion_feature_config` interceptor runs + before the `post_set_suggestion_feature_config_with_metadata` interceptor. """ return response + def post_set_suggestion_feature_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_suggestion_feature_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_set_suggestion_feature_config_with_metadata` + interceptor in new development instead of the `post_set_suggestion_feature_config` interceptor. + When both interceptors are used, this `post_set_suggestion_feature_config_with_metadata` interceptor runs after the + `post_set_suggestion_feature_config` interceptor. The (possibly modified) response returned by + `post_set_suggestion_feature_config` will be passed to + `post_set_suggestion_feature_config_with_metadata`. + """ + return response, metadata + def pre_update_conversation_profile( self, request: gcd_conversation_profile.UpdateConversationProfileRequest, @@ -297,12 +421,38 @@ def post_update_conversation_profile( ) -> gcd_conversation_profile.ConversationProfile: """Post-rpc interceptor for update_conversation_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversation_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_update_conversation_profile` interceptor runs + before the `post_update_conversation_profile_with_metadata` interceptor. """ return response + def post_update_conversation_profile_with_metadata( + self, + response: gcd_conversation_profile.ConversationProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_conversation_profile.ConversationProfile, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_conversation_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_update_conversation_profile_with_metadata` + interceptor in new development instead of the `post_update_conversation_profile` interceptor. + When both interceptors are used, this `post_update_conversation_profile_with_metadata` interceptor runs after the + `post_update_conversation_profile` interceptor. The (possibly modified) response returned by + `post_update_conversation_profile` will be passed to + `post_update_conversation_profile_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -704,6 +854,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_clear_suggestion_feature_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_clear_suggestion_feature_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -857,6 +1014,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1121,6 +1282,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1270,6 +1435,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversation_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversation_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1426,6 +1595,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_suggestion_feature_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_suggestion_feature_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1579,6 +1755,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversation_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversation_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py index 17fa73983973..843fadc65221 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -718,6 +720,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1985,16 +2014,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2040,16 +2073,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2150,16 +2187,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2205,16 +2246,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py index dedf7d20919f..bc7711dc5ef4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/conversations/transports/rest.py @@ -169,12 +169,35 @@ def post_complete_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for complete_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_complete_conversation` interceptor runs + before the `post_complete_conversation_with_metadata` interceptor. """ return response + def post_complete_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for complete_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_complete_conversation_with_metadata` + interceptor in new development instead of the `post_complete_conversation` interceptor. + When both interceptors are used, this `post_complete_conversation_with_metadata` interceptor runs after the + `post_complete_conversation` interceptor. The (possibly modified) response returned by + `post_complete_conversation` will be passed to + `post_complete_conversation_with_metadata`. + """ + return response, metadata + def pre_create_conversation( self, request: gcd_conversation.CreateConversationRequest, @@ -195,12 +218,35 @@ def post_create_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for create_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation` interceptor runs + before the `post_create_conversation_with_metadata` interceptor. """ return response + def post_create_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_create_conversation_with_metadata` + interceptor in new development instead of the `post_create_conversation` interceptor. + When both interceptors are used, this `post_create_conversation_with_metadata` interceptor runs after the + `post_create_conversation` interceptor. The (possibly modified) response returned by + `post_create_conversation` will be passed to + `post_create_conversation_with_metadata`. + """ + return response, metadata + def pre_generate_stateless_suggestion( self, request: conversation.GenerateStatelessSuggestionRequest, @@ -221,12 +267,38 @@ def post_generate_stateless_suggestion( ) -> conversation.GenerateStatelessSuggestionResponse: """Post-rpc interceptor for generate_stateless_suggestion - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_stateless_suggestion_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_generate_stateless_suggestion` interceptor runs + before the `post_generate_stateless_suggestion_with_metadata` interceptor. """ return response + def post_generate_stateless_suggestion_with_metadata( + self, + response: conversation.GenerateStatelessSuggestionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.GenerateStatelessSuggestionResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_stateless_suggestion + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_generate_stateless_suggestion_with_metadata` + interceptor in new development instead of the `post_generate_stateless_suggestion` interceptor. + When both interceptors are used, this `post_generate_stateless_suggestion_with_metadata` interceptor runs after the + `post_generate_stateless_suggestion` interceptor. The (possibly modified) response returned by + `post_generate_stateless_suggestion` will be passed to + `post_generate_stateless_suggestion_with_metadata`. + """ + return response, metadata + def pre_generate_stateless_summary( self, request: conversation.GenerateStatelessSummaryRequest, @@ -247,12 +319,38 @@ def post_generate_stateless_summary( ) -> conversation.GenerateStatelessSummaryResponse: """Post-rpc interceptor for generate_stateless_summary - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_stateless_summary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_generate_stateless_summary` interceptor runs + before the `post_generate_stateless_summary_with_metadata` interceptor. """ return response + def post_generate_stateless_summary_with_metadata( + self, + response: conversation.GenerateStatelessSummaryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.GenerateStatelessSummaryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_stateless_summary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_generate_stateless_summary_with_metadata` + interceptor in new development instead of the `post_generate_stateless_summary` interceptor. + When both interceptors are used, this `post_generate_stateless_summary_with_metadata` interceptor runs after the + `post_generate_stateless_summary` interceptor. The (possibly modified) response returned by + `post_generate_stateless_summary` will be passed to + `post_generate_stateless_summary_with_metadata`. + """ + return response, metadata + def pre_get_conversation( self, request: conversation.GetConversationRequest, @@ -272,12 +370,35 @@ def post_get_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: conversation.ListConversationsRequest, @@ -297,12 +418,37 @@ def post_list_conversations( ) -> conversation.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: conversation.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.ListConversationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_list_messages( self, request: conversation.ListMessagesRequest, @@ -322,12 +468,37 @@ def post_list_messages( ) -> conversation.ListMessagesResponse: """Post-rpc interceptor for list_messages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_messages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_list_messages` interceptor runs + before the `post_list_messages_with_metadata` interceptor. """ return response + def post_list_messages_with_metadata( + self, + response: conversation.ListMessagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.ListMessagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_messages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_list_messages_with_metadata` + interceptor in new development instead of the `post_list_messages` interceptor. + When both interceptors are used, this `post_list_messages_with_metadata` interceptor runs after the + `post_list_messages` interceptor. The (possibly modified) response returned by + `post_list_messages` will be passed to + `post_list_messages_with_metadata`. + """ + return response, metadata + def pre_search_knowledge( self, request: conversation.SearchKnowledgeRequest, @@ -347,12 +518,37 @@ def post_search_knowledge( ) -> conversation.SearchKnowledgeResponse: """Post-rpc interceptor for search_knowledge - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_knowledge_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_search_knowledge` interceptor runs + before the `post_search_knowledge_with_metadata` interceptor. """ return response + def post_search_knowledge_with_metadata( + self, + response: conversation.SearchKnowledgeResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.SearchKnowledgeResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for search_knowledge + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_search_knowledge_with_metadata` + interceptor in new development instead of the `post_search_knowledge` interceptor. + When both interceptors are used, this `post_search_knowledge_with_metadata` interceptor runs after the + `post_search_knowledge` interceptor. The (possibly modified) response returned by + `post_search_knowledge` will be passed to + `post_search_knowledge_with_metadata`. + """ + return response, metadata + def pre_suggest_conversation_summary( self, request: gcd_conversation.SuggestConversationSummaryRequest, @@ -373,12 +569,38 @@ def post_suggest_conversation_summary( ) -> gcd_conversation.SuggestConversationSummaryResponse: """Post-rpc interceptor for suggest_conversation_summary - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_conversation_summary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_suggest_conversation_summary` interceptor runs + before the `post_suggest_conversation_summary_with_metadata` interceptor. """ return response + def post_suggest_conversation_summary_with_metadata( + self, + response: gcd_conversation.SuggestConversationSummaryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_conversation.SuggestConversationSummaryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for suggest_conversation_summary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_suggest_conversation_summary_with_metadata` + interceptor in new development instead of the `post_suggest_conversation_summary` interceptor. + When both interceptors are used, this `post_suggest_conversation_summary_with_metadata` interceptor runs after the + `post_suggest_conversation_summary` interceptor. The (possibly modified) response returned by + `post_suggest_conversation_summary` will be passed to + `post_suggest_conversation_summary_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -723,6 +945,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -879,6 +1105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1034,6 +1264,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_stateless_suggestion(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_stateless_suggestion_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1193,6 +1430,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_stateless_summary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_stateless_summary_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1345,6 +1586,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1490,6 +1735,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1635,6 +1884,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_messages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_messages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1788,6 +2041,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_knowledge(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_knowledge_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1945,6 +2202,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_conversation_summary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_conversation_summary_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py index b8aa3313dbd6..36e2e64129ea 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1806,16 +1835,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1861,16 +1894,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1971,16 +2008,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2026,16 +2067,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/transports/rest.py index 62f13c0eb9a0..e9a920d36de3 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/documents/transports/rest.py @@ -160,12 +160,35 @@ def post_create_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: document.DeleteDocumentRequest, @@ -183,12 +206,35 @@ def post_delete_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_delete_document` interceptor runs + before the `post_delete_document_with_metadata` interceptor. """ return response + def post_delete_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_delete_document_with_metadata` + interceptor in new development instead of the `post_delete_document` interceptor. + When both interceptors are used, this `post_delete_document_with_metadata` interceptor runs after the + `post_delete_document` interceptor. The (possibly modified) response returned by + `post_delete_document` will be passed to + `post_delete_document_with_metadata`. + """ + return response, metadata + def pre_export_document( self, request: document.ExportDocumentRequest, @@ -206,12 +252,35 @@ def post_export_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_export_document` interceptor runs + before the `post_export_document_with_metadata` interceptor. """ return response + def post_export_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_export_document_with_metadata` + interceptor in new development instead of the `post_export_document` interceptor. + When both interceptors are used, this `post_export_document_with_metadata` interceptor runs after the + `post_export_document` interceptor. The (possibly modified) response returned by + `post_export_document` will be passed to + `post_export_document_with_metadata`. + """ + return response, metadata + def pre_get_document( self, request: document.GetDocumentRequest, @@ -227,12 +296,35 @@ def pre_get_document( def post_get_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: document.ImportDocumentsRequest, @@ -252,12 +344,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: document.ListDocumentsRequest, @@ -275,12 +390,35 @@ def post_list_documents( ) -> document.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: document.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_reload_document( self, request: document.ReloadDocumentRequest, @@ -298,12 +436,35 @@ def post_reload_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for reload_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reload_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_reload_document` interceptor runs + before the `post_reload_document_with_metadata` interceptor. """ return response + def post_reload_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reload_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_reload_document_with_metadata` + interceptor in new development instead of the `post_reload_document` interceptor. + When both interceptors are used, this `post_reload_document_with_metadata` interceptor runs after the + `post_reload_document` interceptor. The (possibly modified) response returned by + `post_reload_document` will be passed to + `post_reload_document_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: gcd_document.UpdateDocumentRequest, @@ -323,12 +484,35 @@ def post_update_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -730,6 +914,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -876,6 +1064,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1030,6 +1222,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1182,6 +1378,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1336,6 +1536,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1483,6 +1687,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1637,6 +1845,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reload_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reload_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1791,6 +2003,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py index 108046c38a84..9a836756f187 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -995,16 +1024,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1050,16 +1083,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1160,16 +1197,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1215,16 +1256,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/transports/rest.py index 7f490c22127e..335f9136fbcc 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/encryption_spec_service/transports/rest.py @@ -113,12 +113,35 @@ def post_get_encryption_spec( ) -> encryption_spec.EncryptionSpec: """Post-rpc interceptor for get_encryption_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_encryption_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EncryptionSpecService server but before - it is returned to user code. + it is returned to user code. This `post_get_encryption_spec` interceptor runs + before the `post_get_encryption_spec_with_metadata` interceptor. """ return response + def post_get_encryption_spec_with_metadata( + self, + response: encryption_spec.EncryptionSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[encryption_spec.EncryptionSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_encryption_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EncryptionSpecService server but before it is returned to user code. + + We recommend only using this `post_get_encryption_spec_with_metadata` + interceptor in new development instead of the `post_get_encryption_spec` interceptor. + When both interceptors are used, this `post_get_encryption_spec_with_metadata` interceptor runs after the + `post_get_encryption_spec` interceptor. The (possibly modified) response returned by + `post_get_encryption_spec` will be passed to + `post_get_encryption_spec_with_metadata`. + """ + return response, metadata + def pre_initialize_encryption_spec( self, request: gcd_encryption_spec.InitializeEncryptionSpecRequest, @@ -139,12 +162,35 @@ def post_initialize_encryption_spec( ) -> operations_pb2.Operation: """Post-rpc interceptor for initialize_encryption_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_initialize_encryption_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EncryptionSpecService server but before - it is returned to user code. + it is returned to user code. This `post_initialize_encryption_spec` interceptor runs + before the `post_initialize_encryption_spec_with_metadata` interceptor. """ return response + def post_initialize_encryption_spec_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for initialize_encryption_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EncryptionSpecService server but before it is returned to user code. + + We recommend only using this `post_initialize_encryption_spec_with_metadata` + interceptor in new development instead of the `post_initialize_encryption_spec` interceptor. + When both interceptors are used, this `post_initialize_encryption_spec_with_metadata` interceptor runs after the + `post_initialize_encryption_spec` interceptor. The (possibly modified) response returned by + `post_initialize_encryption_spec` will be passed to + `post_initialize_encryption_spec_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -541,6 +587,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_encryption_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_encryption_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -693,6 +743,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_initialize_encryption_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_initialize_encryption_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py index ae8a23635e11..d71f1bb7c6b3 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -489,6 +491,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2168,16 +2197,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2223,16 +2256,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2333,16 +2370,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2388,16 +2429,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/transports/rest.py index b8a46a27b693..5a01d029e32d 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/entity_types/transports/rest.py @@ -173,12 +173,35 @@ def post_batch_create_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_entities` interceptor runs + before the `post_batch_create_entities_with_metadata` interceptor. """ return response + def post_batch_create_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_create_entities_with_metadata` + interceptor in new development instead of the `post_batch_create_entities` interceptor. + When both interceptors are used, this `post_batch_create_entities_with_metadata` interceptor runs after the + `post_batch_create_entities` interceptor. The (possibly modified) response returned by + `post_batch_create_entities` will be passed to + `post_batch_create_entities_with_metadata`. + """ + return response, metadata + def pre_batch_delete_entities( self, request: entity_type.BatchDeleteEntitiesRequest, @@ -198,12 +221,35 @@ def post_batch_delete_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_entities` interceptor runs + before the `post_batch_delete_entities_with_metadata` interceptor. """ return response + def post_batch_delete_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_delete_entities_with_metadata` + interceptor in new development instead of the `post_batch_delete_entities` interceptor. + When both interceptors are used, this `post_batch_delete_entities_with_metadata` interceptor runs after the + `post_batch_delete_entities` interceptor. The (possibly modified) response returned by + `post_batch_delete_entities` will be passed to + `post_batch_delete_entities_with_metadata`. + """ + return response, metadata + def pre_batch_delete_entity_types( self, request: entity_type.BatchDeleteEntityTypesRequest, @@ -224,12 +270,35 @@ def post_batch_delete_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_entity_types` interceptor runs + before the `post_batch_delete_entity_types_with_metadata` interceptor. """ return response + def post_batch_delete_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_delete_entity_types_with_metadata` + interceptor in new development instead of the `post_batch_delete_entity_types` interceptor. + When both interceptors are used, this `post_batch_delete_entity_types_with_metadata` interceptor runs after the + `post_batch_delete_entity_types` interceptor. The (possibly modified) response returned by + `post_batch_delete_entity_types` will be passed to + `post_batch_delete_entity_types_with_metadata`. + """ + return response, metadata + def pre_batch_update_entities( self, request: entity_type.BatchUpdateEntitiesRequest, @@ -249,12 +318,35 @@ def post_batch_update_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_update_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_entities` interceptor runs + before the `post_batch_update_entities_with_metadata` interceptor. """ return response + def post_batch_update_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_update_entities_with_metadata` + interceptor in new development instead of the `post_batch_update_entities` interceptor. + When both interceptors are used, this `post_batch_update_entities_with_metadata` interceptor runs after the + `post_batch_update_entities` interceptor. The (possibly modified) response returned by + `post_batch_update_entities` will be passed to + `post_batch_update_entities_with_metadata`. + """ + return response, metadata + def pre_batch_update_entity_types( self, request: entity_type.BatchUpdateEntityTypesRequest, @@ -275,12 +367,35 @@ def post_batch_update_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_update_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_entity_types` interceptor runs + before the `post_batch_update_entity_types_with_metadata` interceptor. """ return response + def post_batch_update_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_update_entity_types_with_metadata` + interceptor in new development instead of the `post_batch_update_entity_types` interceptor. + When both interceptors are used, this `post_batch_update_entity_types_with_metadata` interceptor runs after the + `post_batch_update_entity_types` interceptor. The (possibly modified) response returned by + `post_batch_update_entity_types` will be passed to + `post_batch_update_entity_types_with_metadata`. + """ + return response, metadata + def pre_create_entity_type( self, request: gcd_entity_type.CreateEntityTypeRequest, @@ -300,12 +415,35 @@ def post_create_entity_type( ) -> gcd_entity_type.EntityType: """Post-rpc interceptor for create_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_entity_type` interceptor runs + before the `post_create_entity_type_with_metadata` interceptor. """ return response + def post_create_entity_type_with_metadata( + self, + response: gcd_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_entity_type_with_metadata` + interceptor in new development instead of the `post_create_entity_type` interceptor. + When both interceptors are used, this `post_create_entity_type_with_metadata` interceptor runs after the + `post_create_entity_type` interceptor. The (possibly modified) response returned by + `post_create_entity_type` will be passed to + `post_create_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_entity_type( self, request: entity_type.DeleteEntityTypeRequest, @@ -339,12 +477,35 @@ def post_get_entity_type( ) -> entity_type.EntityType: """Post-rpc interceptor for get_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_entity_type` interceptor runs + before the `post_get_entity_type_with_metadata` interceptor. """ return response + def post_get_entity_type_with_metadata( + self, + response: entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_entity_type_with_metadata` + interceptor in new development instead of the `post_get_entity_type` interceptor. + When both interceptors are used, this `post_get_entity_type_with_metadata` interceptor runs after the + `post_get_entity_type` interceptor. The (possibly modified) response returned by + `post_get_entity_type` will be passed to + `post_get_entity_type_with_metadata`. + """ + return response, metadata + def pre_list_entity_types( self, request: entity_type.ListEntityTypesRequest, @@ -364,12 +525,37 @@ def post_list_entity_types( ) -> entity_type.ListEntityTypesResponse: """Post-rpc interceptor for list_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_entity_types` interceptor runs + before the `post_list_entity_types_with_metadata` interceptor. """ return response + def post_list_entity_types_with_metadata( + self, + response: entity_type.ListEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_type.ListEntityTypesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_entity_types_with_metadata` + interceptor in new development instead of the `post_list_entity_types` interceptor. + When both interceptors are used, this `post_list_entity_types_with_metadata` interceptor runs after the + `post_list_entity_types` interceptor. The (possibly modified) response returned by + `post_list_entity_types` will be passed to + `post_list_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_entity_type( self, request: gcd_entity_type.UpdateEntityTypeRequest, @@ -389,12 +575,35 @@ def post_update_entity_type( ) -> gcd_entity_type.EntityType: """Post-rpc interceptor for update_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_entity_type` interceptor runs + before the `post_update_entity_type_with_metadata` interceptor. """ return response + def post_update_entity_type_with_metadata( + self, + response: gcd_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_entity_type_with_metadata` + interceptor in new development instead of the `post_update_entity_type` interceptor. + When both interceptors are used, this `post_update_entity_type_with_metadata` interceptor runs after the + `post_update_entity_type` interceptor. The (possibly modified) response returned by + `post_update_entity_type` will be passed to + `post_update_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -792,6 +1001,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -942,6 +1155,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1092,6 +1309,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1242,6 +1463,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1392,6 +1617,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1556,6 +1785,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1823,6 +2056,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1968,6 +2205,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2134,6 +2375,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py index 34436aa091e8..fc1afc062e4e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -519,6 +521,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1388,16 +1417,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1443,16 +1476,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1553,16 +1590,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1608,16 +1649,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/transports/rest.py index d87f8cb20668..09d53cc4e30a 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/environments/transports/rest.py @@ -140,12 +140,35 @@ def post_create_environment( ) -> environment.Environment: """Post-rpc interceptor for create_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_create_environment` interceptor runs + before the `post_create_environment_with_metadata` interceptor. """ return response + def post_create_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_create_environment_with_metadata` + interceptor in new development instead of the `post_create_environment` interceptor. + When both interceptors are used, this `post_create_environment_with_metadata` interceptor runs after the + `post_create_environment` interceptor. The (possibly modified) response returned by + `post_create_environment` will be passed to + `post_create_environment_with_metadata`. + """ + return response, metadata + def pre_delete_environment( self, request: environment.DeleteEnvironmentRequest, @@ -179,12 +202,35 @@ def post_get_environment( ) -> environment.Environment: """Post-rpc interceptor for get_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_get_environment` interceptor runs + before the `post_get_environment_with_metadata` interceptor. """ return response + def post_get_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_get_environment_with_metadata` + interceptor in new development instead of the `post_get_environment` interceptor. + When both interceptors are used, this `post_get_environment_with_metadata` interceptor runs after the + `post_get_environment` interceptor. The (possibly modified) response returned by + `post_get_environment` will be passed to + `post_get_environment_with_metadata`. + """ + return response, metadata + def pre_get_environment_history( self, request: environment.GetEnvironmentHistoryRequest, @@ -205,12 +251,35 @@ def post_get_environment_history( ) -> environment.EnvironmentHistory: """Post-rpc interceptor for get_environment_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_environment_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_get_environment_history` interceptor runs + before the `post_get_environment_history_with_metadata` interceptor. """ return response + def post_get_environment_history_with_metadata( + self, + response: environment.EnvironmentHistory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.EnvironmentHistory, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_get_environment_history_with_metadata` + interceptor in new development instead of the `post_get_environment_history` interceptor. + When both interceptors are used, this `post_get_environment_history_with_metadata` interceptor runs after the + `post_get_environment_history` interceptor. The (possibly modified) response returned by + `post_get_environment_history` will be passed to + `post_get_environment_history_with_metadata`. + """ + return response, metadata + def pre_list_environments( self, request: environment.ListEnvironmentsRequest, @@ -230,12 +299,37 @@ def post_list_environments( ) -> environment.ListEnvironmentsResponse: """Post-rpc interceptor for list_environments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_environments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_list_environments` interceptor runs + before the `post_list_environments_with_metadata` interceptor. """ return response + def post_list_environments_with_metadata( + self, + response: environment.ListEnvironmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.ListEnvironmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_environments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_list_environments_with_metadata` + interceptor in new development instead of the `post_list_environments` interceptor. + When both interceptors are used, this `post_list_environments_with_metadata` interceptor runs after the + `post_list_environments` interceptor. The (possibly modified) response returned by + `post_list_environments` will be passed to + `post_list_environments_with_metadata`. + """ + return response, metadata + def pre_update_environment( self, request: environment.UpdateEnvironmentRequest, @@ -255,12 +349,35 @@ def post_update_environment( ) -> environment.Environment: """Post-rpc interceptor for update_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_update_environment` interceptor runs + before the `post_update_environment_with_metadata` interceptor. """ return response + def post_update_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_update_environment_with_metadata` + interceptor in new development instead of the `post_update_environment` interceptor. + When both interceptors are used, this `post_update_environment_with_metadata` interceptor runs after the + `post_update_environment` interceptor. The (possibly modified) response returned by + `post_update_environment` will be passed to + `post_update_environment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -617,6 +734,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -886,6 +1007,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1031,6 +1156,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_environment_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_history_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1176,6 +1305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_environments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_environments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1346,6 +1479,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py index 29de5d0c1fbc..41ccdbdca941 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -481,6 +483,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -976,16 +1005,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1031,16 +1064,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1141,16 +1178,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1196,16 +1237,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/transports/rest.py index 7abd8284ec17..d28539e0a683 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/fulfillments/transports/rest.py @@ -112,12 +112,35 @@ def post_get_fulfillment( ) -> fulfillment.Fulfillment: """Post-rpc interceptor for get_fulfillment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_fulfillment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Fulfillments server but before - it is returned to user code. + it is returned to user code. This `post_get_fulfillment` interceptor runs + before the `post_get_fulfillment_with_metadata` interceptor. """ return response + def post_get_fulfillment_with_metadata( + self, + response: fulfillment.Fulfillment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[fulfillment.Fulfillment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_fulfillment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Fulfillments server but before it is returned to user code. + + We recommend only using this `post_get_fulfillment_with_metadata` + interceptor in new development instead of the `post_get_fulfillment` interceptor. + When both interceptors are used, this `post_get_fulfillment_with_metadata` interceptor runs after the + `post_get_fulfillment` interceptor. The (possibly modified) response returned by + `post_get_fulfillment` will be passed to + `post_get_fulfillment_with_metadata`. + """ + return response, metadata + def pre_update_fulfillment( self, request: gcd_fulfillment.UpdateFulfillmentRequest, @@ -138,12 +161,35 @@ def post_update_fulfillment( ) -> gcd_fulfillment.Fulfillment: """Post-rpc interceptor for update_fulfillment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_fulfillment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Fulfillments server but before - it is returned to user code. + it is returned to user code. This `post_update_fulfillment` interceptor runs + before the `post_update_fulfillment_with_metadata` interceptor. """ return response + def post_update_fulfillment_with_metadata( + self, + response: gcd_fulfillment.Fulfillment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_fulfillment.Fulfillment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_fulfillment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Fulfillments server but before it is returned to user code. + + We recommend only using this `post_update_fulfillment_with_metadata` + interceptor in new development instead of the `post_update_fulfillment` interceptor. + When both interceptors are used, this `post_update_fulfillment_with_metadata` interceptor runs after the + `post_update_fulfillment` interceptor. The (possibly modified) response returned by + `post_update_fulfillment` will be passed to + `post_update_fulfillment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -485,6 +531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_fulfillment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_fulfillment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -646,6 +696,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_fulfillment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_fulfillment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py index 9adf9f1e6af2..17101756b4ff 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1305,16 +1334,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1360,16 +1393,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1470,16 +1507,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1525,16 +1566,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/transports/rest.py index 61d47b8c4752..87046d863dc6 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/generators/transports/rest.py @@ -133,12 +133,35 @@ def post_create_generator( ) -> gcd_generator.Generator: """Post-rpc interceptor for create_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_create_generator` interceptor runs + before the `post_create_generator_with_metadata` interceptor. """ return response + def post_create_generator_with_metadata( + self, + response: gcd_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_create_generator_with_metadata` + interceptor in new development instead of the `post_create_generator` interceptor. + When both interceptors are used, this `post_create_generator_with_metadata` interceptor runs after the + `post_create_generator` interceptor. The (possibly modified) response returned by + `post_create_generator` will be passed to + `post_create_generator_with_metadata`. + """ + return response, metadata + def pre_delete_generator( self, request: generator.DeleteGeneratorRequest, @@ -168,12 +191,35 @@ def pre_get_generator( def post_get_generator(self, response: generator.Generator) -> generator.Generator: """Post-rpc interceptor for get_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_get_generator` interceptor runs + before the `post_get_generator_with_metadata` interceptor. """ return response + def post_get_generator_with_metadata( + self, + response: generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_get_generator_with_metadata` + interceptor in new development instead of the `post_get_generator` interceptor. + When both interceptors are used, this `post_get_generator_with_metadata` interceptor runs after the + `post_get_generator` interceptor. The (possibly modified) response returned by + `post_get_generator` will be passed to + `post_get_generator_with_metadata`. + """ + return response, metadata + def pre_list_generators( self, request: generator.ListGeneratorsRequest, @@ -193,12 +239,37 @@ def post_list_generators( ) -> generator.ListGeneratorsResponse: """Post-rpc interceptor for list_generators - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_generators_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_list_generators` interceptor runs + before the `post_list_generators_with_metadata` interceptor. """ return response + def post_list_generators_with_metadata( + self, + response: generator.ListGeneratorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generator.ListGeneratorsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_generators + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_list_generators_with_metadata` + interceptor in new development instead of the `post_list_generators` interceptor. + When both interceptors are used, this `post_list_generators_with_metadata` interceptor runs after the + `post_list_generators` interceptor. The (possibly modified) response returned by + `post_list_generators` will be passed to + `post_list_generators_with_metadata`. + """ + return response, metadata + def pre_update_generator( self, request: gcd_generator.UpdateGeneratorRequest, @@ -218,12 +289,35 @@ def post_update_generator( ) -> gcd_generator.Generator: """Post-rpc interceptor for update_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_update_generator` interceptor runs + before the `post_update_generator_with_metadata` interceptor. """ return response + def post_update_generator_with_metadata( + self, + response: gcd_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_update_generator_with_metadata` + interceptor in new development instead of the `post_update_generator` interceptor. + When both interceptors are used, this `post_update_generator_with_metadata` interceptor runs after the + `post_update_generator` interceptor. The (possibly modified) response returned by + `post_update_generator` will be passed to + `post_update_generator_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -563,6 +657,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -815,6 +913,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -957,6 +1059,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_generators(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_generators_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1107,6 +1213,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py index c1f9782b7085..496ab8060a67 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -509,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1728,16 +1757,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1783,16 +1816,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1893,16 +1930,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1948,16 +1989,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/transports/rest.py index ae88e6021ba1..18990b8b3724 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/intents/transports/rest.py @@ -149,12 +149,35 @@ def post_batch_delete_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_intents` interceptor runs + before the `post_batch_delete_intents_with_metadata` interceptor. """ return response + def post_batch_delete_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_batch_delete_intents_with_metadata` + interceptor in new development instead of the `post_batch_delete_intents` interceptor. + When both interceptors are used, this `post_batch_delete_intents_with_metadata` interceptor runs after the + `post_batch_delete_intents` interceptor. The (possibly modified) response returned by + `post_batch_delete_intents` will be passed to + `post_batch_delete_intents_with_metadata`. + """ + return response, metadata + def pre_batch_update_intents( self, request: intent.BatchUpdateIntentsRequest, @@ -174,12 +197,35 @@ def post_batch_update_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_update_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_intents` interceptor runs + before the `post_batch_update_intents_with_metadata` interceptor. """ return response + def post_batch_update_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_batch_update_intents_with_metadata` + interceptor in new development instead of the `post_batch_update_intents` interceptor. + When both interceptors are used, this `post_batch_update_intents_with_metadata` interceptor runs after the + `post_batch_update_intents` interceptor. The (possibly modified) response returned by + `post_batch_update_intents` will be passed to + `post_batch_update_intents_with_metadata`. + """ + return response, metadata + def pre_create_intent( self, request: gcd_intent.CreateIntentRequest, @@ -195,12 +241,35 @@ def pre_create_intent( def post_create_intent(self, response: gcd_intent.Intent) -> gcd_intent.Intent: """Post-rpc interceptor for create_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_create_intent` interceptor runs + before the `post_create_intent_with_metadata` interceptor. """ return response + def post_create_intent_with_metadata( + self, + response: gcd_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_create_intent_with_metadata` + interceptor in new development instead of the `post_create_intent` interceptor. + When both interceptors are used, this `post_create_intent_with_metadata` interceptor runs after the + `post_create_intent` interceptor. The (possibly modified) response returned by + `post_create_intent` will be passed to + `post_create_intent_with_metadata`. + """ + return response, metadata + def pre_delete_intent( self, request: intent.DeleteIntentRequest, @@ -228,12 +297,33 @@ def pre_get_intent( def post_get_intent(self, response: intent.Intent) -> intent.Intent: """Post-rpc interceptor for get_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_get_intent` interceptor runs + before the `post_get_intent_with_metadata` interceptor. """ return response + def post_get_intent_with_metadata( + self, response: intent.Intent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_get_intent_with_metadata` + interceptor in new development instead of the `post_get_intent` interceptor. + When both interceptors are used, this `post_get_intent_with_metadata` interceptor runs after the + `post_get_intent` interceptor. The (possibly modified) response returned by + `post_get_intent` will be passed to + `post_get_intent_with_metadata`. + """ + return response, metadata + def pre_list_intents( self, request: intent.ListIntentsRequest, @@ -251,12 +341,35 @@ def post_list_intents( ) -> intent.ListIntentsResponse: """Post-rpc interceptor for list_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_list_intents` interceptor runs + before the `post_list_intents_with_metadata` interceptor. """ return response + def post_list_intents_with_metadata( + self, + response: intent.ListIntentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[intent.ListIntentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_list_intents_with_metadata` + interceptor in new development instead of the `post_list_intents` interceptor. + When both interceptors are used, this `post_list_intents_with_metadata` interceptor runs after the + `post_list_intents` interceptor. The (possibly modified) response returned by + `post_list_intents` will be passed to + `post_list_intents_with_metadata`. + """ + return response, metadata + def pre_update_intent( self, request: gcd_intent.UpdateIntentRequest, @@ -272,12 +385,35 @@ def pre_update_intent( def post_update_intent(self, response: gcd_intent.Intent) -> gcd_intent.Intent: """Post-rpc interceptor for update_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_update_intent` interceptor runs + before the `post_update_intent_with_metadata` interceptor. """ return response + def post_update_intent_with_metadata( + self, + response: gcd_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_update_intent_with_metadata` + interceptor in new development instead of the `post_update_intent` interceptor. + When both interceptors are used, this `post_update_intent_with_metadata` interceptor runs after the + `post_update_intent` interceptor. The (possibly modified) response returned by + `post_update_intent` will be passed to + `post_update_intent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -674,6 +810,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -823,6 +963,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -983,6 +1127,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1244,6 +1392,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1389,6 +1541,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1549,6 +1705,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py index b99e22250a11..9923f3a811e2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -488,6 +490,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1340,16 +1369,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1395,16 +1428,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1505,16 +1542,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1560,16 +1601,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/transports/rest.py index a7f8d8cfe37b..f49e0b3db45b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/knowledge_bases/transports/rest.py @@ -134,12 +134,37 @@ def post_create_knowledge_base( ) -> gcd_knowledge_base.KnowledgeBase: """Post-rpc interceptor for create_knowledge_base - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_knowledge_base_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_create_knowledge_base` interceptor runs + before the `post_create_knowledge_base_with_metadata` interceptor. """ return response + def post_create_knowledge_base_with_metadata( + self, + response: gcd_knowledge_base.KnowledgeBase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_knowledge_base.KnowledgeBase, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_knowledge_base + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_create_knowledge_base_with_metadata` + interceptor in new development instead of the `post_create_knowledge_base` interceptor. + When both interceptors are used, this `post_create_knowledge_base_with_metadata` interceptor runs after the + `post_create_knowledge_base` interceptor. The (possibly modified) response returned by + `post_create_knowledge_base` will be passed to + `post_create_knowledge_base_with_metadata`. + """ + return response, metadata + def pre_delete_knowledge_base( self, request: knowledge_base.DeleteKnowledgeBaseRequest, @@ -174,12 +199,35 @@ def post_get_knowledge_base( ) -> knowledge_base.KnowledgeBase: """Post-rpc interceptor for get_knowledge_base - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_knowledge_base_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_get_knowledge_base` interceptor runs + before the `post_get_knowledge_base_with_metadata` interceptor. """ return response + def post_get_knowledge_base_with_metadata( + self, + response: knowledge_base.KnowledgeBase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[knowledge_base.KnowledgeBase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_knowledge_base + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_get_knowledge_base_with_metadata` + interceptor in new development instead of the `post_get_knowledge_base` interceptor. + When both interceptors are used, this `post_get_knowledge_base_with_metadata` interceptor runs after the + `post_get_knowledge_base` interceptor. The (possibly modified) response returned by + `post_get_knowledge_base` will be passed to + `post_get_knowledge_base_with_metadata`. + """ + return response, metadata + def pre_list_knowledge_bases( self, request: knowledge_base.ListKnowledgeBasesRequest, @@ -200,12 +248,38 @@ def post_list_knowledge_bases( ) -> knowledge_base.ListKnowledgeBasesResponse: """Post-rpc interceptor for list_knowledge_bases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_knowledge_bases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_list_knowledge_bases` interceptor runs + before the `post_list_knowledge_bases_with_metadata` interceptor. """ return response + def post_list_knowledge_bases_with_metadata( + self, + response: knowledge_base.ListKnowledgeBasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + knowledge_base.ListKnowledgeBasesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_knowledge_bases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_list_knowledge_bases_with_metadata` + interceptor in new development instead of the `post_list_knowledge_bases` interceptor. + When both interceptors are used, this `post_list_knowledge_bases_with_metadata` interceptor runs after the + `post_list_knowledge_bases` interceptor. The (possibly modified) response returned by + `post_list_knowledge_bases` will be passed to + `post_list_knowledge_bases_with_metadata`. + """ + return response, metadata + def pre_update_knowledge_base( self, request: gcd_knowledge_base.UpdateKnowledgeBaseRequest, @@ -226,12 +300,37 @@ def post_update_knowledge_base( ) -> gcd_knowledge_base.KnowledgeBase: """Post-rpc interceptor for update_knowledge_base - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_knowledge_base_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_update_knowledge_base` interceptor runs + before the `post_update_knowledge_base_with_metadata` interceptor. """ return response + def post_update_knowledge_base_with_metadata( + self, + response: gcd_knowledge_base.KnowledgeBase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_knowledge_base.KnowledgeBase, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_knowledge_base + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_update_knowledge_base_with_metadata` + interceptor in new development instead of the `post_update_knowledge_base` interceptor. + When both interceptors are used, this `post_update_knowledge_base_with_metadata` interceptor runs after the + `post_update_knowledge_base` interceptor. The (possibly modified) response returned by + `post_update_knowledge_base` will be passed to + `post_update_knowledge_base_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -582,6 +681,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_knowledge_base(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_knowledge_base_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -849,6 +952,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_knowledge_base(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_knowledge_base_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -994,6 +1101,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_knowledge_bases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_knowledge_bases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1158,6 +1269,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_knowledge_base(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_knowledge_base_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py index e18bada3741f..2addabbda1a7 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -618,6 +620,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2037,16 +2066,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2092,16 +2125,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2202,16 +2239,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2257,16 +2298,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/transports/rest.py index 08618be1bd56..9bba4ea1b5ad 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/participants/transports/rest.py @@ -168,12 +168,37 @@ def post_analyze_content( ) -> gcd_participant.AnalyzeContentResponse: """Post-rpc interceptor for analyze_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_analyze_content` interceptor runs + before the `post_analyze_content_with_metadata` interceptor. """ return response + def post_analyze_content_with_metadata( + self, + response: gcd_participant.AnalyzeContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_participant.AnalyzeContentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for analyze_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_analyze_content_with_metadata` + interceptor in new development instead of the `post_analyze_content` interceptor. + When both interceptors are used, this `post_analyze_content_with_metadata` interceptor runs after the + `post_analyze_content` interceptor. The (possibly modified) response returned by + `post_analyze_content` will be passed to + `post_analyze_content_with_metadata`. + """ + return response, metadata + def pre_create_participant( self, request: gcd_participant.CreateParticipantRequest, @@ -194,12 +219,35 @@ def post_create_participant( ) -> gcd_participant.Participant: """Post-rpc interceptor for create_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_create_participant` interceptor runs + before the `post_create_participant_with_metadata` interceptor. """ return response + def post_create_participant_with_metadata( + self, + response: gcd_participant.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_participant.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_create_participant_with_metadata` + interceptor in new development instead of the `post_create_participant` interceptor. + When both interceptors are used, this `post_create_participant_with_metadata` interceptor runs after the + `post_create_participant` interceptor. The (possibly modified) response returned by + `post_create_participant` will be passed to + `post_create_participant_with_metadata`. + """ + return response, metadata + def pre_get_participant( self, request: participant.GetParticipantRequest, @@ -219,12 +267,35 @@ def post_get_participant( ) -> participant.Participant: """Post-rpc interceptor for get_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_get_participant` interceptor runs + before the `post_get_participant_with_metadata` interceptor. """ return response + def post_get_participant_with_metadata( + self, + response: participant.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[participant.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_get_participant_with_metadata` + interceptor in new development instead of the `post_get_participant` interceptor. + When both interceptors are used, this `post_get_participant_with_metadata` interceptor runs after the + `post_get_participant` interceptor. The (possibly modified) response returned by + `post_get_participant` will be passed to + `post_get_participant_with_metadata`. + """ + return response, metadata + def pre_list_participants( self, request: participant.ListParticipantsRequest, @@ -244,12 +315,37 @@ def post_list_participants( ) -> participant.ListParticipantsResponse: """Post-rpc interceptor for list_participants - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_participants_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_list_participants` interceptor runs + before the `post_list_participants_with_metadata` interceptor. """ return response + def post_list_participants_with_metadata( + self, + response: participant.ListParticipantsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.ListParticipantsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_participants + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_list_participants_with_metadata` + interceptor in new development instead of the `post_list_participants` interceptor. + When both interceptors are used, this `post_list_participants_with_metadata` interceptor runs after the + `post_list_participants` interceptor. The (possibly modified) response returned by + `post_list_participants` will be passed to + `post_list_participants_with_metadata`. + """ + return response, metadata + def pre_suggest_articles( self, request: participant.SuggestArticlesRequest, @@ -269,12 +365,37 @@ def post_suggest_articles( ) -> participant.SuggestArticlesResponse: """Post-rpc interceptor for suggest_articles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_articles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_articles` interceptor runs + before the `post_suggest_articles_with_metadata` interceptor. """ return response + def post_suggest_articles_with_metadata( + self, + response: participant.SuggestArticlesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestArticlesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for suggest_articles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_articles_with_metadata` + interceptor in new development instead of the `post_suggest_articles` interceptor. + When both interceptors are used, this `post_suggest_articles_with_metadata` interceptor runs after the + `post_suggest_articles` interceptor. The (possibly modified) response returned by + `post_suggest_articles` will be passed to + `post_suggest_articles_with_metadata`. + """ + return response, metadata + def pre_suggest_faq_answers( self, request: participant.SuggestFaqAnswersRequest, @@ -294,12 +415,37 @@ def post_suggest_faq_answers( ) -> participant.SuggestFaqAnswersResponse: """Post-rpc interceptor for suggest_faq_answers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_faq_answers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_faq_answers` interceptor runs + before the `post_suggest_faq_answers_with_metadata` interceptor. """ return response + def post_suggest_faq_answers_with_metadata( + self, + response: participant.SuggestFaqAnswersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestFaqAnswersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for suggest_faq_answers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_faq_answers_with_metadata` + interceptor in new development instead of the `post_suggest_faq_answers` interceptor. + When both interceptors are used, this `post_suggest_faq_answers_with_metadata` interceptor runs after the + `post_suggest_faq_answers` interceptor. The (possibly modified) response returned by + `post_suggest_faq_answers` will be passed to + `post_suggest_faq_answers_with_metadata`. + """ + return response, metadata + def pre_suggest_knowledge_assist( self, request: participant.SuggestKnowledgeAssistRequest, @@ -320,12 +466,38 @@ def post_suggest_knowledge_assist( ) -> participant.SuggestKnowledgeAssistResponse: """Post-rpc interceptor for suggest_knowledge_assist - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_knowledge_assist_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_knowledge_assist` interceptor runs + before the `post_suggest_knowledge_assist_with_metadata` interceptor. """ return response + def post_suggest_knowledge_assist_with_metadata( + self, + response: participant.SuggestKnowledgeAssistResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestKnowledgeAssistResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for suggest_knowledge_assist + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_knowledge_assist_with_metadata` + interceptor in new development instead of the `post_suggest_knowledge_assist` interceptor. + When both interceptors are used, this `post_suggest_knowledge_assist_with_metadata` interceptor runs after the + `post_suggest_knowledge_assist` interceptor. The (possibly modified) response returned by + `post_suggest_knowledge_assist` will be passed to + `post_suggest_knowledge_assist_with_metadata`. + """ + return response, metadata + def pre_suggest_smart_replies( self, request: participant.SuggestSmartRepliesRequest, @@ -345,12 +517,37 @@ def post_suggest_smart_replies( ) -> participant.SuggestSmartRepliesResponse: """Post-rpc interceptor for suggest_smart_replies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_smart_replies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_smart_replies` interceptor runs + before the `post_suggest_smart_replies_with_metadata` interceptor. """ return response + def post_suggest_smart_replies_with_metadata( + self, + response: participant.SuggestSmartRepliesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestSmartRepliesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for suggest_smart_replies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_smart_replies_with_metadata` + interceptor in new development instead of the `post_suggest_smart_replies` interceptor. + When both interceptors are used, this `post_suggest_smart_replies_with_metadata` interceptor runs after the + `post_suggest_smart_replies` interceptor. The (possibly modified) response returned by + `post_suggest_smart_replies` will be passed to + `post_suggest_smart_replies_with_metadata`. + """ + return response, metadata + def pre_update_participant( self, request: gcd_participant.UpdateParticipantRequest, @@ -371,12 +568,35 @@ def post_update_participant( ) -> gcd_participant.Participant: """Post-rpc interceptor for update_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_update_participant` interceptor runs + before the `post_update_participant_with_metadata` interceptor. """ return response + def post_update_participant_with_metadata( + self, + response: gcd_participant.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_participant.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_update_participant_with_metadata` + interceptor in new development instead of the `post_update_participant` interceptor. + When both interceptors are used, this `post_update_participant_with_metadata` interceptor runs after the + `post_update_participant` interceptor. The (possibly modified) response returned by + `post_update_participant` will be passed to + `post_update_participant_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -714,6 +934,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -867,6 +1091,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1010,6 +1238,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1155,6 +1387,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_participants(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_participants_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1327,6 +1563,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_articles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_articles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1480,6 +1720,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_faq_answers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_faq_answers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1633,6 +1877,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_knowledge_assist(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_knowledge_assist_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1786,6 +2034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_smart_replies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_smart_replies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1939,6 +2191,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py index 397fa8434e00..830d8722efd2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -494,6 +496,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1404,16 +1433,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1459,16 +1492,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1569,16 +1606,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1624,16 +1665,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/transports/rest.py index 0304c6a29855..ad6d7fc71693 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/session_entity_types/transports/rest.py @@ -136,12 +136,38 @@ def post_create_session_entity_type( ) -> gcd_session_entity_type.SessionEntityType: """Post-rpc interceptor for create_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_session_entity_type` interceptor runs + before the `post_create_session_entity_type_with_metadata` interceptor. """ return response + def post_create_session_entity_type_with_metadata( + self, + response: gcd_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_session_entity_type_with_metadata` + interceptor in new development instead of the `post_create_session_entity_type` interceptor. + When both interceptors are used, this `post_create_session_entity_type_with_metadata` interceptor runs after the + `post_create_session_entity_type` interceptor. The (possibly modified) response returned by + `post_create_session_entity_type` will be passed to + `post_create_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_session_entity_type( self, request: session_entity_type.DeleteSessionEntityTypeRequest, @@ -177,12 +203,37 @@ def post_get_session_entity_type( ) -> session_entity_type.SessionEntityType: """Post-rpc interceptor for get_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_session_entity_type` interceptor runs + before the `post_get_session_entity_type_with_metadata` interceptor. """ return response + def post_get_session_entity_type_with_metadata( + self, + response: session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.SessionEntityType, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_session_entity_type_with_metadata` + interceptor in new development instead of the `post_get_session_entity_type` interceptor. + When both interceptors are used, this `post_get_session_entity_type_with_metadata` interceptor runs after the + `post_get_session_entity_type` interceptor. The (possibly modified) response returned by + `post_get_session_entity_type` will be passed to + `post_get_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_list_session_entity_types( self, request: session_entity_type.ListSessionEntityTypesRequest, @@ -203,12 +254,38 @@ def post_list_session_entity_types( ) -> session_entity_type.ListSessionEntityTypesResponse: """Post-rpc interceptor for list_session_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_session_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_session_entity_types` interceptor runs + before the `post_list_session_entity_types_with_metadata` interceptor. """ return response + def post_list_session_entity_types_with_metadata( + self, + response: session_entity_type.ListSessionEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.ListSessionEntityTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_session_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_session_entity_types_with_metadata` + interceptor in new development instead of the `post_list_session_entity_types` interceptor. + When both interceptors are used, this `post_list_session_entity_types_with_metadata` interceptor runs after the + `post_list_session_entity_types` interceptor. The (possibly modified) response returned by + `post_list_session_entity_types` will be passed to + `post_list_session_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_session_entity_type( self, request: gcd_session_entity_type.UpdateSessionEntityTypeRequest, @@ -229,12 +306,38 @@ def post_update_session_entity_type( ) -> gcd_session_entity_type.SessionEntityType: """Post-rpc interceptor for update_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_session_entity_type` interceptor runs + before the `post_update_session_entity_type_with_metadata` interceptor. """ return response + def post_update_session_entity_type_with_metadata( + self, + response: gcd_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_session_entity_type_with_metadata` + interceptor in new development instead of the `post_update_session_entity_type` interceptor. + When both interceptors are used, this `post_update_session_entity_type_with_metadata` interceptor runs after the + `post_update_session_entity_type` interceptor. The (possibly modified) response returned by + `post_update_session_entity_type` will be passed to + `post_update_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -586,6 +689,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -857,6 +964,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1007,6 +1118,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_session_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_session_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1174,6 +1289,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py index 46d575903adb..06238cfe529c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -573,6 +575,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1140,16 +1169,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1195,16 +1228,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1305,16 +1342,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1360,16 +1401,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/transports/rest.py index 8ff4d4772a99..56bfd9187037 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/sessions/transports/rest.py @@ -104,12 +104,37 @@ def post_detect_intent( ) -> gcd_session.DetectIntentResponse: """Post-rpc interceptor for detect_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detect_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_detect_intent` interceptor runs + before the `post_detect_intent_with_metadata` interceptor. """ return response + def post_detect_intent_with_metadata( + self, + response: gcd_session.DetectIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_session.DetectIntentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for detect_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_detect_intent_with_metadata` + interceptor in new development instead of the `post_detect_intent` interceptor. + When both interceptors are used, this `post_detect_intent_with_metadata` interceptor runs after the + `post_detect_intent` interceptor. The (possibly modified) response returned by + `post_detect_intent` will be passed to + `post_detect_intent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -448,6 +473,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detect_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detect_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py index abf88d652e3f..5dff2cf4ad03 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -485,6 +487,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1361,16 +1390,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1416,16 +1449,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1526,16 +1563,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1581,16 +1622,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/transports/rest.py index 2d6e0d717708..55b72ac3cd3f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/services/versions/transports/rest.py @@ -131,12 +131,35 @@ def pre_create_version( def post_create_version(self, response: gcd_version.Version) -> gcd_version.Version: """Post-rpc interceptor for create_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_create_version` interceptor runs + before the `post_create_version_with_metadata` interceptor. """ return response + def post_create_version_with_metadata( + self, + response: gcd_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_create_version_with_metadata` + interceptor in new development instead of the `post_create_version` interceptor. + When both interceptors are used, this `post_create_version_with_metadata` interceptor runs after the + `post_create_version` interceptor. The (possibly modified) response returned by + `post_create_version` will be passed to + `post_create_version_with_metadata`. + """ + return response, metadata + def pre_delete_version( self, request: version.DeleteVersionRequest, @@ -164,12 +187,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: version.ListVersionsRequest, @@ -187,12 +233,35 @@ def post_list_versions( ) -> version.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: version.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: gcd_version.UpdateVersionRequest, @@ -210,12 +279,35 @@ def pre_update_version( def post_update_version(self, response: gcd_version.Version) -> gcd_version.Version: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: gcd_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -573,6 +665,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -846,6 +942,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -991,6 +1091,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1161,6 +1265,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_event.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_event.py index 5e7803b19708..849d9bd4da33 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_event.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_event.py @@ -20,7 +20,7 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.dialogflow_v2.types import participant +from google.cloud.dialogflow_v2.types import participant, session __protobuf__ = proto.module( package="google.cloud.dialogflow.v2", @@ -34,6 +34,10 @@ class ConversationEvent(proto.Message): r"""Represents a notification sent to Pub/Sub subscribers for conversation lifecycle events. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -51,6 +55,10 @@ class ConversationEvent(proto.Message): new_message_payload (google.cloud.dialogflow_v2.types.Message): Payload of NEW_MESSAGE event. + This field is a member of `oneof`_ ``payload``. + new_recognition_result_payload (google.cloud.dialogflow_v2.types.StreamingRecognitionResult): + Payload of NEW_RECOGNITION_RESULT event. + This field is a member of `oneof`_ ``payload``. """ @@ -76,6 +84,11 @@ class Type(proto.Enum): An existing conversation has received a new message, either from API or telephony. It is configured in [ConversationProfile.new_message_event_notification_config][google.cloud.dialogflow.v2.ConversationProfile.new_message_event_notification_config] + NEW_RECOGNITION_RESULT (7): + An existing conversation has received a new speech + recognition result. This is mainly for delivering + intermediate transcripts. The notification is configured in + [ConversationProfile.new_recognition_event_notification_config][]. UNRECOVERABLE_ERROR (4): Unrecoverable error during a telephone call. @@ -95,6 +108,7 @@ class Type(proto.Enum): CONVERSATION_FINISHED = 2 HUMAN_INTERVENTION_NEEDED = 3 NEW_MESSAGE = 5 + NEW_RECOGNITION_RESULT = 7 UNRECOVERABLE_ERROR = 4 conversation: str = proto.Field( @@ -117,6 +131,12 @@ class Type(proto.Enum): oneof="payload", message=participant.Message, ) + new_recognition_result_payload: session.StreamingRecognitionResult = proto.Field( + proto.MESSAGE, + number=5, + oneof="payload", + message=session.StreamingRecognitionResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py index da87185af50d..5743baa763ca 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2/types/conversation_profile.py @@ -86,6 +86,18 @@ class ConversationProfile(proto.Message): Configuration for publishing new message events. Event will be sent in format of [ConversationEvent][google.cloud.dialogflow.v2.ConversationEvent] + new_recognition_result_notification_config (google.cloud.dialogflow_v2.types.NotificationConfig): + Optional. Configuration for publishing transcription + intermediate results. Event will be sent in format of + [ConversationEvent][google.cloud.dialogflow.v2.ConversationEvent]. + If configured, the following information will be populated + as + [ConversationEvent][google.cloud.dialogflow.v2.ConversationEvent] + Pub/Sub message attributes: + + - "participant_id" + - "participant_role" + - "message_id". stt_config (google.cloud.dialogflow_v2.types.SpeechToTextConfig): Settings for speech transcription. language_code (str): @@ -161,6 +173,11 @@ class ConversationProfile(proto.Message): number=8, message="NotificationConfig", ) + new_recognition_result_notification_config: "NotificationConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="NotificationConfig", + ) stt_config: audio_config.SpeechToTextConfig = proto.Field( proto.MESSAGE, number=9, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py index 646eddd61805..3c8e92a0aafd 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/__init__.py @@ -38,6 +38,7 @@ from .services.intents import IntentsAsyncClient, IntentsClient from .services.knowledge_bases import KnowledgeBasesAsyncClient, KnowledgeBasesClient from .services.participants import ParticipantsAsyncClient, ParticipantsClient +from .services.phone_numbers import PhoneNumbersAsyncClient, PhoneNumbersClient from .services.session_entity_types import ( SessionEntityTypesAsyncClient, SessionEntityTypesClient, @@ -283,6 +284,14 @@ SuggestSmartRepliesResponse, UpdateParticipantRequest, ) +from .types.phone_number import ( + DeletePhoneNumberRequest, + ListPhoneNumbersRequest, + ListPhoneNumbersResponse, + PhoneNumber, + UndeletePhoneNumberRequest, + UpdatePhoneNumberRequest, +) from .types.session import ( CloudConversationDebuggingInfo, DetectIntentRequest, @@ -346,6 +355,7 @@ "IntentsAsyncClient", "KnowledgeBasesAsyncClient", "ParticipantsAsyncClient", + "PhoneNumbersAsyncClient", "SessionEntityTypesAsyncClient", "SessionsAsyncClient", "SipTrunksAsyncClient", @@ -418,6 +428,7 @@ "DeleteGeneratorRequest", "DeleteIntentRequest", "DeleteKnowledgeBaseRequest", + "DeletePhoneNumberRequest", "DeleteSessionEntityTypeRequest", "DeleteSipTrunkRequest", "DeleteVersionRequest", @@ -520,6 +531,8 @@ "ListMessagesResponse", "ListParticipantsRequest", "ListParticipantsResponse", + "ListPhoneNumbersRequest", + "ListPhoneNumbersResponse", "ListSessionEntityTypesRequest", "ListSessionEntityTypesResponse", "ListSipTrunksRequest", @@ -539,6 +552,8 @@ "OutputAudioEncoding", "Participant", "ParticipantsClient", + "PhoneNumber", + "PhoneNumbersClient", "QueryInput", "QueryParameters", "QueryResult", @@ -599,6 +614,7 @@ "TextToSpeechSettings", "TrainAgentRequest", "TriggerEvent", + "UndeletePhoneNumberRequest", "UpdateAnswerRecordRequest", "UpdateContextRequest", "UpdateConversationProfileRequest", @@ -610,6 +626,7 @@ "UpdateIntentRequest", "UpdateKnowledgeBaseRequest", "UpdateParticipantRequest", + "UpdatePhoneNumberRequest", "UpdateSessionEntityTypeRequest", "UpdateSipTrunkRequest", "UpdateVersionRequest", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_metadata.json b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_metadata.json index 0f12e8667666..48778b9f9a94 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_metadata.json +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_metadata.json @@ -1636,6 +1636,85 @@ } } }, + "PhoneNumbers": { + "clients": { + "grpc": { + "libraryClient": "PhoneNumbersClient", + "rpcs": { + "DeletePhoneNumber": { + "methods": [ + "delete_phone_number" + ] + }, + "ListPhoneNumbers": { + "methods": [ + "list_phone_numbers" + ] + }, + "UndeletePhoneNumber": { + "methods": [ + "undelete_phone_number" + ] + }, + "UpdatePhoneNumber": { + "methods": [ + "update_phone_number" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PhoneNumbersAsyncClient", + "rpcs": { + "DeletePhoneNumber": { + "methods": [ + "delete_phone_number" + ] + }, + "ListPhoneNumbers": { + "methods": [ + "list_phone_numbers" + ] + }, + "UndeletePhoneNumber": { + "methods": [ + "undelete_phone_number" + ] + }, + "UpdatePhoneNumber": { + "methods": [ + "update_phone_number" + ] + } + } + }, + "rest": { + "libraryClient": "PhoneNumbersClient", + "rpcs": { + "DeletePhoneNumber": { + "methods": [ + "delete_phone_number" + ] + }, + "ListPhoneNumbers": { + "methods": [ + "list_phone_numbers" + ] + }, + "UndeletePhoneNumber": { + "methods": [ + "undelete_phone_number" + ] + }, + "UpdatePhoneNumber": { + "methods": [ + "update_phone_number" + ] + } + } + } + } + }, "SessionEntityTypes": { "clients": { "grpc": { diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py index c34cf46454e4..52eb7a2892d5 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.37.0" # {x-release-please-version} +__version__ = "2.39.0" # {x-release-please-version} diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py index 29501e1892cd..fad16181b77b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -486,6 +488,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1820,16 +1849,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1875,16 +1908,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1985,16 +2022,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2040,16 +2081,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/transports/rest.py index 86631cf4c344..a1eab16f64f9 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/agents/transports/rest.py @@ -176,12 +176,35 @@ def post_export_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_export_agent` interceptor runs + before the `post_export_agent_with_metadata` interceptor. """ return response + def post_export_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_export_agent_with_metadata` + interceptor in new development instead of the `post_export_agent` interceptor. + When both interceptors are used, this `post_export_agent_with_metadata` interceptor runs after the + `post_export_agent` interceptor. The (possibly modified) response returned by + `post_export_agent` will be passed to + `post_export_agent_with_metadata`. + """ + return response, metadata + def pre_get_agent( self, request: agent.GetAgentRequest, @@ -197,12 +220,33 @@ def pre_get_agent( def post_get_agent(self, response: agent.Agent) -> agent.Agent: """Post-rpc interceptor for get_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_agent` interceptor runs + before the `post_get_agent_with_metadata` interceptor. """ return response + def post_get_agent_with_metadata( + self, response: agent.Agent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_agent_with_metadata` + interceptor in new development instead of the `post_get_agent` interceptor. + When both interceptors are used, this `post_get_agent_with_metadata` interceptor runs after the + `post_get_agent` interceptor. The (possibly modified) response returned by + `post_get_agent` will be passed to + `post_get_agent_with_metadata`. + """ + return response, metadata + def pre_get_validation_result( self, request: agent.GetValidationResultRequest, @@ -222,12 +266,37 @@ def post_get_validation_result( ) -> validation_result.ValidationResult: """Post-rpc interceptor for get_validation_result - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_validation_result_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_get_validation_result` interceptor runs + before the `post_get_validation_result_with_metadata` interceptor. """ return response + def post_get_validation_result_with_metadata( + self, + response: validation_result.ValidationResult, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + validation_result.ValidationResult, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_validation_result + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_get_validation_result_with_metadata` + interceptor in new development instead of the `post_get_validation_result` interceptor. + When both interceptors are used, this `post_get_validation_result_with_metadata` interceptor runs after the + `post_get_validation_result` interceptor. The (possibly modified) response returned by + `post_get_validation_result` will be passed to + `post_get_validation_result_with_metadata`. + """ + return response, metadata + def pre_import_agent( self, request: agent.ImportAgentRequest, @@ -245,12 +314,35 @@ def post_import_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_import_agent` interceptor runs + before the `post_import_agent_with_metadata` interceptor. """ return response + def post_import_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_import_agent_with_metadata` + interceptor in new development instead of the `post_import_agent` interceptor. + When both interceptors are used, this `post_import_agent_with_metadata` interceptor runs after the + `post_import_agent` interceptor. The (possibly modified) response returned by + `post_import_agent` will be passed to + `post_import_agent_with_metadata`. + """ + return response, metadata + def pre_restore_agent( self, request: agent.RestoreAgentRequest, @@ -268,12 +360,35 @@ def post_restore_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_restore_agent` interceptor runs + before the `post_restore_agent_with_metadata` interceptor. """ return response + def post_restore_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_restore_agent_with_metadata` + interceptor in new development instead of the `post_restore_agent` interceptor. + When both interceptors are used, this `post_restore_agent_with_metadata` interceptor runs after the + `post_restore_agent` interceptor. The (possibly modified) response returned by + `post_restore_agent` will be passed to + `post_restore_agent_with_metadata`. + """ + return response, metadata + def pre_search_agents( self, request: agent.SearchAgentsRequest, @@ -291,12 +406,35 @@ def post_search_agents( ) -> agent.SearchAgentsResponse: """Post-rpc interceptor for search_agents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_agents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_search_agents` interceptor runs + before the `post_search_agents_with_metadata` interceptor. """ return response + def post_search_agents_with_metadata( + self, + response: agent.SearchAgentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[agent.SearchAgentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_agents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_search_agents_with_metadata` + interceptor in new development instead of the `post_search_agents` interceptor. + When both interceptors are used, this `post_search_agents_with_metadata` interceptor runs after the + `post_search_agents` interceptor. The (possibly modified) response returned by + `post_search_agents` will be passed to + `post_search_agents_with_metadata`. + """ + return response, metadata + def pre_set_agent( self, request: gcd_agent.SetAgentRequest, @@ -312,12 +450,35 @@ def pre_set_agent( def post_set_agent(self, response: gcd_agent.Agent) -> gcd_agent.Agent: """Post-rpc interceptor for set_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_set_agent` interceptor runs + before the `post_set_agent_with_metadata` interceptor. """ return response + def post_set_agent_with_metadata( + self, + response: gcd_agent.Agent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_agent.Agent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_set_agent_with_metadata` + interceptor in new development instead of the `post_set_agent` interceptor. + When both interceptors are used, this `post_set_agent_with_metadata` interceptor runs after the + `post_set_agent` interceptor. The (possibly modified) response returned by + `post_set_agent` will be passed to + `post_set_agent_with_metadata`. + """ + return response, metadata + def pre_train_agent( self, request: agent.TrainAgentRequest, @@ -335,12 +496,35 @@ def post_train_agent( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_agent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_agent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Agents server but before - it is returned to user code. + it is returned to user code. This `post_train_agent` interceptor runs + before the `post_train_agent_with_metadata` interceptor. """ return response + def post_train_agent_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_agent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Agents server but before it is returned to user code. + + We recommend only using this `post_train_agent_with_metadata` + interceptor in new development instead of the `post_train_agent` interceptor. + When both interceptors are used, this `post_train_agent_with_metadata` interceptor runs after the + `post_train_agent` interceptor. The (possibly modified) response returned by + `post_train_agent` will be passed to + `post_train_agent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -843,6 +1027,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -996,6 +1184,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1141,6 +1333,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_validation_result(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_validation_result_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1291,6 +1487,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1441,6 +1641,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1586,6 +1790,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_agents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_agents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1745,6 +1953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1893,6 +2105,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_agent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_agent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py index a0e39a9c03c6..5625311bd68c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -547,6 +549,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1189,16 +1218,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1244,16 +1277,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1354,16 +1391,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1409,16 +1450,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/transports/rest.py index b18d5c593c80..35d32caaf516 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/answer_records/transports/rest.py @@ -120,12 +120,35 @@ def post_get_answer_record( ) -> answer_record.AnswerRecord: """Post-rpc interceptor for get_answer_record - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_answer_record_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnswerRecords server but before - it is returned to user code. + it is returned to user code. This `post_get_answer_record` interceptor runs + before the `post_get_answer_record_with_metadata` interceptor. """ return response + def post_get_answer_record_with_metadata( + self, + response: answer_record.AnswerRecord, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[answer_record.AnswerRecord, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_answer_record + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnswerRecords server but before it is returned to user code. + + We recommend only using this `post_get_answer_record_with_metadata` + interceptor in new development instead of the `post_get_answer_record` interceptor. + When both interceptors are used, this `post_get_answer_record_with_metadata` interceptor runs after the + `post_get_answer_record` interceptor. The (possibly modified) response returned by + `post_get_answer_record` will be passed to + `post_get_answer_record_with_metadata`. + """ + return response, metadata + def pre_list_answer_records( self, request: answer_record.ListAnswerRecordsRequest, @@ -145,12 +168,37 @@ def post_list_answer_records( ) -> answer_record.ListAnswerRecordsResponse: """Post-rpc interceptor for list_answer_records - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_answer_records_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnswerRecords server but before - it is returned to user code. + it is returned to user code. This `post_list_answer_records` interceptor runs + before the `post_list_answer_records_with_metadata` interceptor. """ return response + def post_list_answer_records_with_metadata( + self, + response: answer_record.ListAnswerRecordsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + answer_record.ListAnswerRecordsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_answer_records + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnswerRecords server but before it is returned to user code. + + We recommend only using this `post_list_answer_records_with_metadata` + interceptor in new development instead of the `post_list_answer_records` interceptor. + When both interceptors are used, this `post_list_answer_records_with_metadata` interceptor runs after the + `post_list_answer_records` interceptor. The (possibly modified) response returned by + `post_list_answer_records` will be passed to + `post_list_answer_records_with_metadata`. + """ + return response, metadata + def pre_update_answer_record( self, request: gcd_answer_record.UpdateAnswerRecordRequest, @@ -171,12 +219,35 @@ def post_update_answer_record( ) -> gcd_answer_record.AnswerRecord: """Post-rpc interceptor for update_answer_record - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_answer_record_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AnswerRecords server but before - it is returned to user code. + it is returned to user code. This `post_update_answer_record` interceptor runs + before the `post_update_answer_record_with_metadata` interceptor. """ return response + def post_update_answer_record_with_metadata( + self, + response: gcd_answer_record.AnswerRecord, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_answer_record.AnswerRecord, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_answer_record + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnswerRecords server but before it is returned to user code. + + We recommend only using this `post_update_answer_record_with_metadata` + interceptor in new development instead of the `post_update_answer_record` interceptor. + When both interceptors are used, this `post_update_answer_record_with_metadata` interceptor runs after the + `post_update_answer_record` interceptor. The (possibly modified) response returned by + `post_update_answer_record` will be passed to + `post_update_answer_record_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -539,6 +610,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_answer_record(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_answer_record_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -684,6 +759,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_answer_records(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_answer_records_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -866,6 +945,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_answer_record(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_answer_record_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py index 7bdd79509b8c..df4e1ee90123 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -490,6 +492,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1482,16 +1511,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1537,16 +1570,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1647,16 +1684,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1702,16 +1743,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/transports/rest.py index ca1a92be3248..0083647adee2 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/contexts/transports/rest.py @@ -135,12 +135,35 @@ def pre_create_context( def post_create_context(self, response: gcd_context.Context) -> gcd_context.Context: """Post-rpc interceptor for create_context - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_context_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_create_context` interceptor runs + before the `post_create_context_with_metadata` interceptor. """ return response + def post_create_context_with_metadata( + self, + response: gcd_context.Context, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_context.Context, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_context + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_create_context_with_metadata` + interceptor in new development instead of the `post_create_context` interceptor. + When both interceptors are used, this `post_create_context_with_metadata` interceptor runs after the + `post_create_context` interceptor. The (possibly modified) response returned by + `post_create_context` will be passed to + `post_create_context_with_metadata`. + """ + return response, metadata + def pre_delete_all_contexts( self, request: context.DeleteAllContextsRequest, @@ -182,12 +205,35 @@ def pre_get_context( def post_get_context(self, response: context.Context) -> context.Context: """Post-rpc interceptor for get_context - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_context_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_get_context` interceptor runs + before the `post_get_context_with_metadata` interceptor. """ return response + def post_get_context_with_metadata( + self, + response: context.Context, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[context.Context, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_context + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_get_context_with_metadata` + interceptor in new development instead of the `post_get_context` interceptor. + When both interceptors are used, this `post_get_context_with_metadata` interceptor runs after the + `post_get_context` interceptor. The (possibly modified) response returned by + `post_get_context` will be passed to + `post_get_context_with_metadata`. + """ + return response, metadata + def pre_list_contexts( self, request: context.ListContextsRequest, @@ -205,12 +251,35 @@ def post_list_contexts( ) -> context.ListContextsResponse: """Post-rpc interceptor for list_contexts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_contexts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_list_contexts` interceptor runs + before the `post_list_contexts_with_metadata` interceptor. """ return response + def post_list_contexts_with_metadata( + self, + response: context.ListContextsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[context.ListContextsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_contexts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_list_contexts_with_metadata` + interceptor in new development instead of the `post_list_contexts` interceptor. + When both interceptors are used, this `post_list_contexts_with_metadata` interceptor runs after the + `post_list_contexts` interceptor. The (possibly modified) response returned by + `post_list_contexts` will be passed to + `post_list_contexts_with_metadata`. + """ + return response, metadata + def pre_update_context( self, request: gcd_context.UpdateContextRequest, @@ -228,12 +297,35 @@ def pre_update_context( def post_update_context(self, response: gcd_context.Context) -> gcd_context.Context: """Post-rpc interceptor for update_context - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_context_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Contexts server but before - it is returned to user code. + it is returned to user code. This `post_update_context` interceptor runs + before the `post_update_context_with_metadata` interceptor. """ return response + def post_update_context_with_metadata( + self, + response: gcd_context.Context, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_context.Context, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_context + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Contexts server but before it is returned to user code. + + We recommend only using this `post_update_context_with_metadata` + interceptor in new development instead of the `post_update_context` interceptor. + When both interceptors are used, this `post_update_context_with_metadata` interceptor runs after the + `post_update_context` interceptor. The (possibly modified) response returned by + `post_update_context` will be passed to + `post_update_context_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -592,6 +684,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_context(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_context_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -974,6 +1070,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_context(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_context_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1119,6 +1219,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_contexts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_contexts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1289,6 +1393,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_context(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_context_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py index 5dd5e881b7e3..74546aea7c5c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -640,6 +642,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1850,16 +1879,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1905,16 +1938,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2015,16 +2052,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2070,16 +2111,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/transports/rest.py index c6c0e21d77b5..24dac5fd3c0e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversation_profiles/transports/rest.py @@ -152,12 +152,35 @@ def post_clear_suggestion_feature_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for clear_suggestion_feature_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_clear_suggestion_feature_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_clear_suggestion_feature_config` interceptor runs + before the `post_clear_suggestion_feature_config_with_metadata` interceptor. """ return response + def post_clear_suggestion_feature_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for clear_suggestion_feature_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_clear_suggestion_feature_config_with_metadata` + interceptor in new development instead of the `post_clear_suggestion_feature_config` interceptor. + When both interceptors are used, this `post_clear_suggestion_feature_config_with_metadata` interceptor runs after the + `post_clear_suggestion_feature_config` interceptor. The (possibly modified) response returned by + `post_clear_suggestion_feature_config` will be passed to + `post_clear_suggestion_feature_config_with_metadata`. + """ + return response, metadata + def pre_create_conversation_profile( self, request: gcd_conversation_profile.CreateConversationProfileRequest, @@ -178,12 +201,38 @@ def post_create_conversation_profile( ) -> gcd_conversation_profile.ConversationProfile: """Post-rpc interceptor for create_conversation_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation_profile` interceptor runs + before the `post_create_conversation_profile_with_metadata` interceptor. """ return response + def post_create_conversation_profile_with_metadata( + self, + response: gcd_conversation_profile.ConversationProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_conversation_profile.ConversationProfile, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_conversation_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_create_conversation_profile_with_metadata` + interceptor in new development instead of the `post_create_conversation_profile` interceptor. + When both interceptors are used, this `post_create_conversation_profile_with_metadata` interceptor runs after the + `post_create_conversation_profile` interceptor. The (possibly modified) response returned by + `post_create_conversation_profile` will be passed to + `post_create_conversation_profile_with_metadata`. + """ + return response, metadata + def pre_delete_conversation_profile( self, request: conversation_profile.DeleteConversationProfileRequest, @@ -219,12 +268,38 @@ def post_get_conversation_profile( ) -> conversation_profile.ConversationProfile: """Post-rpc interceptor for get_conversation_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation_profile` interceptor runs + before the `post_get_conversation_profile_with_metadata` interceptor. """ return response + def post_get_conversation_profile_with_metadata( + self, + response: conversation_profile.ConversationProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_profile.ConversationProfile, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_conversation_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_get_conversation_profile_with_metadata` + interceptor in new development instead of the `post_get_conversation_profile` interceptor. + When both interceptors are used, this `post_get_conversation_profile_with_metadata` interceptor runs after the + `post_get_conversation_profile` interceptor. The (possibly modified) response returned by + `post_get_conversation_profile` will be passed to + `post_get_conversation_profile_with_metadata`. + """ + return response, metadata + def pre_list_conversation_profiles( self, request: conversation_profile.ListConversationProfilesRequest, @@ -245,12 +320,38 @@ def post_list_conversation_profiles( ) -> conversation_profile.ListConversationProfilesResponse: """Post-rpc interceptor for list_conversation_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversation_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_list_conversation_profiles` interceptor runs + before the `post_list_conversation_profiles_with_metadata` interceptor. """ return response + def post_list_conversation_profiles_with_metadata( + self, + response: conversation_profile.ListConversationProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation_profile.ListConversationProfilesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversation_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_list_conversation_profiles_with_metadata` + interceptor in new development instead of the `post_list_conversation_profiles` interceptor. + When both interceptors are used, this `post_list_conversation_profiles_with_metadata` interceptor runs after the + `post_list_conversation_profiles` interceptor. The (possibly modified) response returned by + `post_list_conversation_profiles` will be passed to + `post_list_conversation_profiles_with_metadata`. + """ + return response, metadata + def pre_set_suggestion_feature_config( self, request: gcd_conversation_profile.SetSuggestionFeatureConfigRequest, @@ -271,12 +372,35 @@ def post_set_suggestion_feature_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for set_suggestion_feature_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_suggestion_feature_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_set_suggestion_feature_config` interceptor runs + before the `post_set_suggestion_feature_config_with_metadata` interceptor. """ return response + def post_set_suggestion_feature_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_suggestion_feature_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_set_suggestion_feature_config_with_metadata` + interceptor in new development instead of the `post_set_suggestion_feature_config` interceptor. + When both interceptors are used, this `post_set_suggestion_feature_config_with_metadata` interceptor runs after the + `post_set_suggestion_feature_config` interceptor. The (possibly modified) response returned by + `post_set_suggestion_feature_config` will be passed to + `post_set_suggestion_feature_config_with_metadata`. + """ + return response, metadata + def pre_update_conversation_profile( self, request: gcd_conversation_profile.UpdateConversationProfileRequest, @@ -297,12 +421,38 @@ def post_update_conversation_profile( ) -> gcd_conversation_profile.ConversationProfile: """Post-rpc interceptor for update_conversation_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversation_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationProfiles server but before - it is returned to user code. + it is returned to user code. This `post_update_conversation_profile` interceptor runs + before the `post_update_conversation_profile_with_metadata` interceptor. """ return response + def post_update_conversation_profile_with_metadata( + self, + response: gcd_conversation_profile.ConversationProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_conversation_profile.ConversationProfile, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_conversation_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationProfiles server but before it is returned to user code. + + We recommend only using this `post_update_conversation_profile_with_metadata` + interceptor in new development instead of the `post_update_conversation_profile` interceptor. + When both interceptors are used, this `post_update_conversation_profile_with_metadata` interceptor runs after the + `post_update_conversation_profile` interceptor. The (possibly modified) response returned by + `post_update_conversation_profile` will be passed to + `post_update_conversation_profile_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -704,6 +854,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_clear_suggestion_feature_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_clear_suggestion_feature_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -857,6 +1014,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1121,6 +1282,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1270,6 +1435,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversation_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversation_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1426,6 +1595,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_suggestion_feature_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_suggestion_feature_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1579,6 +1755,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversation_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversation_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py index 7e08953cd0c6..e3b0de682e6f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -718,6 +720,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2105,16 +2134,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2160,16 +2193,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2270,16 +2307,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2325,16 +2366,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/transports/rest.py index 57d77e1525a6..a851819e55cd 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/conversations/transports/rest.py @@ -176,12 +176,38 @@ def post_batch_create_messages( ) -> conversation.BatchCreateMessagesResponse: """Post-rpc interceptor for batch_create_messages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_messages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_messages` interceptor runs + before the `post_batch_create_messages_with_metadata` interceptor. """ return response + def post_batch_create_messages_with_metadata( + self, + response: conversation.BatchCreateMessagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.BatchCreateMessagesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_create_messages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_batch_create_messages_with_metadata` + interceptor in new development instead of the `post_batch_create_messages` interceptor. + When both interceptors are used, this `post_batch_create_messages_with_metadata` interceptor runs after the + `post_batch_create_messages` interceptor. The (possibly modified) response returned by + `post_batch_create_messages` will be passed to + `post_batch_create_messages_with_metadata`. + """ + return response, metadata + def pre_complete_conversation( self, request: conversation.CompleteConversationRequest, @@ -202,12 +228,35 @@ def post_complete_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for complete_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_complete_conversation` interceptor runs + before the `post_complete_conversation_with_metadata` interceptor. """ return response + def post_complete_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for complete_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_complete_conversation_with_metadata` + interceptor in new development instead of the `post_complete_conversation` interceptor. + When both interceptors are used, this `post_complete_conversation_with_metadata` interceptor runs after the + `post_complete_conversation` interceptor. The (possibly modified) response returned by + `post_complete_conversation` will be passed to + `post_complete_conversation_with_metadata`. + """ + return response, metadata + def pre_create_conversation( self, request: gcd_conversation.CreateConversationRequest, @@ -228,12 +277,35 @@ def post_create_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for create_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation` interceptor runs + before the `post_create_conversation_with_metadata` interceptor. """ return response + def post_create_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_create_conversation_with_metadata` + interceptor in new development instead of the `post_create_conversation` interceptor. + When both interceptors are used, this `post_create_conversation_with_metadata` interceptor runs after the + `post_create_conversation` interceptor. The (possibly modified) response returned by + `post_create_conversation` will be passed to + `post_create_conversation_with_metadata`. + """ + return response, metadata + def pre_generate_stateless_suggestion( self, request: conversation.GenerateStatelessSuggestionRequest, @@ -254,12 +326,38 @@ def post_generate_stateless_suggestion( ) -> conversation.GenerateStatelessSuggestionResponse: """Post-rpc interceptor for generate_stateless_suggestion - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_stateless_suggestion_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_generate_stateless_suggestion` interceptor runs + before the `post_generate_stateless_suggestion_with_metadata` interceptor. """ return response + def post_generate_stateless_suggestion_with_metadata( + self, + response: conversation.GenerateStatelessSuggestionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.GenerateStatelessSuggestionResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_stateless_suggestion + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_generate_stateless_suggestion_with_metadata` + interceptor in new development instead of the `post_generate_stateless_suggestion` interceptor. + When both interceptors are used, this `post_generate_stateless_suggestion_with_metadata` interceptor runs after the + `post_generate_stateless_suggestion` interceptor. The (possibly modified) response returned by + `post_generate_stateless_suggestion` will be passed to + `post_generate_stateless_suggestion_with_metadata`. + """ + return response, metadata + def pre_generate_stateless_summary( self, request: conversation.GenerateStatelessSummaryRequest, @@ -280,12 +378,38 @@ def post_generate_stateless_summary( ) -> conversation.GenerateStatelessSummaryResponse: """Post-rpc interceptor for generate_stateless_summary - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_stateless_summary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_generate_stateless_summary` interceptor runs + before the `post_generate_stateless_summary_with_metadata` interceptor. """ return response + def post_generate_stateless_summary_with_metadata( + self, + response: conversation.GenerateStatelessSummaryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.GenerateStatelessSummaryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_stateless_summary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_generate_stateless_summary_with_metadata` + interceptor in new development instead of the `post_generate_stateless_summary` interceptor. + When both interceptors are used, this `post_generate_stateless_summary_with_metadata` interceptor runs after the + `post_generate_stateless_summary` interceptor. The (possibly modified) response returned by + `post_generate_stateless_summary` will be passed to + `post_generate_stateless_summary_with_metadata`. + """ + return response, metadata + def pre_get_conversation( self, request: conversation.GetConversationRequest, @@ -305,12 +429,35 @@ def post_get_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: conversation.ListConversationsRequest, @@ -330,12 +477,37 @@ def post_list_conversations( ) -> conversation.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: conversation.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.ListConversationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_list_messages( self, request: conversation.ListMessagesRequest, @@ -355,12 +527,37 @@ def post_list_messages( ) -> conversation.ListMessagesResponse: """Post-rpc interceptor for list_messages - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_messages_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_list_messages` interceptor runs + before the `post_list_messages_with_metadata` interceptor. """ return response + def post_list_messages_with_metadata( + self, + response: conversation.ListMessagesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.ListMessagesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_messages + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_list_messages_with_metadata` + interceptor in new development instead of the `post_list_messages` interceptor. + When both interceptors are used, this `post_list_messages_with_metadata` interceptor runs after the + `post_list_messages` interceptor. The (possibly modified) response returned by + `post_list_messages` will be passed to + `post_list_messages_with_metadata`. + """ + return response, metadata + def pre_search_knowledge( self, request: conversation.SearchKnowledgeRequest, @@ -380,12 +577,37 @@ def post_search_knowledge( ) -> conversation.SearchKnowledgeResponse: """Post-rpc interceptor for search_knowledge - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_knowledge_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_search_knowledge` interceptor runs + before the `post_search_knowledge_with_metadata` interceptor. """ return response + def post_search_knowledge_with_metadata( + self, + response: conversation.SearchKnowledgeResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversation.SearchKnowledgeResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for search_knowledge + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_search_knowledge_with_metadata` + interceptor in new development instead of the `post_search_knowledge` interceptor. + When both interceptors are used, this `post_search_knowledge_with_metadata` interceptor runs after the + `post_search_knowledge` interceptor. The (possibly modified) response returned by + `post_search_knowledge` will be passed to + `post_search_knowledge_with_metadata`. + """ + return response, metadata + def pre_suggest_conversation_summary( self, request: gcd_conversation.SuggestConversationSummaryRequest, @@ -406,12 +628,38 @@ def post_suggest_conversation_summary( ) -> gcd_conversation.SuggestConversationSummaryResponse: """Post-rpc interceptor for suggest_conversation_summary - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_conversation_summary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Conversations server but before - it is returned to user code. + it is returned to user code. This `post_suggest_conversation_summary` interceptor runs + before the `post_suggest_conversation_summary_with_metadata` interceptor. """ return response + def post_suggest_conversation_summary_with_metadata( + self, + response: gcd_conversation.SuggestConversationSummaryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_conversation.SuggestConversationSummaryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for suggest_conversation_summary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Conversations server but before it is returned to user code. + + We recommend only using this `post_suggest_conversation_summary_with_metadata` + interceptor in new development instead of the `post_suggest_conversation_summary` interceptor. + When both interceptors are used, this `post_suggest_conversation_summary_with_metadata` interceptor runs after the + `post_suggest_conversation_summary` interceptor. The (possibly modified) response returned by + `post_suggest_conversation_summary` will be passed to + `post_suggest_conversation_summary_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -751,6 +999,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_messages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_messages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -909,6 +1161,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1065,6 +1321,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1220,6 +1480,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_stateless_suggestion(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_stateless_suggestion_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1379,6 +1646,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_stateless_summary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_stateless_summary_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1531,6 +1802,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1676,6 +1951,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1821,6 +2100,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_messages(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_messages_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1974,6 +2257,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_knowledge(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_knowledge_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2131,6 +2418,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_conversation_summary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_conversation_summary_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py index c37540d23773..8d8c29189e8e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1711,16 +1740,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1766,16 +1799,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1876,16 +1913,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1931,16 +1972,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/transports/rest.py index 7c7cdb5197dc..6c3d375e0237 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/documents/transports/rest.py @@ -152,12 +152,35 @@ def post_create_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: document.DeleteDocumentRequest, @@ -175,12 +198,35 @@ def post_delete_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_delete_document` interceptor runs + before the `post_delete_document_with_metadata` interceptor. """ return response + def post_delete_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_delete_document_with_metadata` + interceptor in new development instead of the `post_delete_document` interceptor. + When both interceptors are used, this `post_delete_document_with_metadata` interceptor runs after the + `post_delete_document` interceptor. The (possibly modified) response returned by + `post_delete_document` will be passed to + `post_delete_document_with_metadata`. + """ + return response, metadata + def pre_get_document( self, request: document.GetDocumentRequest, @@ -196,12 +242,35 @@ def pre_get_document( def post_get_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: document.ImportDocumentsRequest, @@ -221,12 +290,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: document.ListDocumentsRequest, @@ -244,12 +336,35 @@ def post_list_documents( ) -> document.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: document.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_reload_document( self, request: document.ReloadDocumentRequest, @@ -267,12 +382,35 @@ def post_reload_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for reload_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reload_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_reload_document` interceptor runs + before the `post_reload_document_with_metadata` interceptor. """ return response + def post_reload_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reload_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_reload_document_with_metadata` + interceptor in new development instead of the `post_reload_document` interceptor. + When both interceptors are used, this `post_reload_document_with_metadata` interceptor runs after the + `post_reload_document` interceptor. The (possibly modified) response returned by + `post_reload_document` will be passed to + `post_reload_document_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: gcd_document.UpdateDocumentRequest, @@ -292,12 +430,35 @@ def post_update_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Documents server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Documents server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -699,6 +860,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -845,6 +1010,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -997,6 +1166,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1151,6 +1324,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1298,6 +1475,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1452,6 +1633,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reload_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reload_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1606,6 +1791,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py index 981a248b573b..16b4a55203ed 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -995,16 +1024,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1050,16 +1083,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1160,16 +1197,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1215,16 +1256,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/transports/rest.py index f0c753f22b1f..17e2ee852239 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/encryption_spec_service/transports/rest.py @@ -113,12 +113,35 @@ def post_get_encryption_spec( ) -> encryption_spec.EncryptionSpec: """Post-rpc interceptor for get_encryption_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_encryption_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EncryptionSpecService server but before - it is returned to user code. + it is returned to user code. This `post_get_encryption_spec` interceptor runs + before the `post_get_encryption_spec_with_metadata` interceptor. """ return response + def post_get_encryption_spec_with_metadata( + self, + response: encryption_spec.EncryptionSpec, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[encryption_spec.EncryptionSpec, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_encryption_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EncryptionSpecService server but before it is returned to user code. + + We recommend only using this `post_get_encryption_spec_with_metadata` + interceptor in new development instead of the `post_get_encryption_spec` interceptor. + When both interceptors are used, this `post_get_encryption_spec_with_metadata` interceptor runs after the + `post_get_encryption_spec` interceptor. The (possibly modified) response returned by + `post_get_encryption_spec` will be passed to + `post_get_encryption_spec_with_metadata`. + """ + return response, metadata + def pre_initialize_encryption_spec( self, request: gcd_encryption_spec.InitializeEncryptionSpecRequest, @@ -139,12 +162,35 @@ def post_initialize_encryption_spec( ) -> operations_pb2.Operation: """Post-rpc interceptor for initialize_encryption_spec - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_initialize_encryption_spec_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EncryptionSpecService server but before - it is returned to user code. + it is returned to user code. This `post_initialize_encryption_spec` interceptor runs + before the `post_initialize_encryption_spec_with_metadata` interceptor. """ return response + def post_initialize_encryption_spec_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for initialize_encryption_spec + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EncryptionSpecService server but before it is returned to user code. + + We recommend only using this `post_initialize_encryption_spec_with_metadata` + interceptor in new development instead of the `post_initialize_encryption_spec` interceptor. + When both interceptors are used, this `post_initialize_encryption_spec_with_metadata` interceptor runs after the + `post_initialize_encryption_spec` interceptor. The (possibly modified) response returned by + `post_initialize_encryption_spec` will be passed to + `post_initialize_encryption_spec_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -541,6 +587,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_encryption_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_encryption_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -693,6 +743,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_initialize_encryption_spec(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_initialize_encryption_spec_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py index 10d4e67c7fe7..8b190fc6a7f0 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -490,6 +492,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2198,16 +2227,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2253,16 +2286,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2363,16 +2400,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2418,16 +2459,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/transports/rest.py index c1ce60c97a4b..6219cb03c659 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/entity_types/transports/rest.py @@ -173,12 +173,35 @@ def post_batch_create_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_entities` interceptor runs + before the `post_batch_create_entities_with_metadata` interceptor. """ return response + def post_batch_create_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_create_entities_with_metadata` + interceptor in new development instead of the `post_batch_create_entities` interceptor. + When both interceptors are used, this `post_batch_create_entities_with_metadata` interceptor runs after the + `post_batch_create_entities` interceptor. The (possibly modified) response returned by + `post_batch_create_entities` will be passed to + `post_batch_create_entities_with_metadata`. + """ + return response, metadata + def pre_batch_delete_entities( self, request: entity_type.BatchDeleteEntitiesRequest, @@ -198,12 +221,35 @@ def post_batch_delete_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_entities` interceptor runs + before the `post_batch_delete_entities_with_metadata` interceptor. """ return response + def post_batch_delete_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_delete_entities_with_metadata` + interceptor in new development instead of the `post_batch_delete_entities` interceptor. + When both interceptors are used, this `post_batch_delete_entities_with_metadata` interceptor runs after the + `post_batch_delete_entities` interceptor. The (possibly modified) response returned by + `post_batch_delete_entities` will be passed to + `post_batch_delete_entities_with_metadata`. + """ + return response, metadata + def pre_batch_delete_entity_types( self, request: entity_type.BatchDeleteEntityTypesRequest, @@ -224,12 +270,35 @@ def post_batch_delete_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_entity_types` interceptor runs + before the `post_batch_delete_entity_types_with_metadata` interceptor. """ return response + def post_batch_delete_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_delete_entity_types_with_metadata` + interceptor in new development instead of the `post_batch_delete_entity_types` interceptor. + When both interceptors are used, this `post_batch_delete_entity_types_with_metadata` interceptor runs after the + `post_batch_delete_entity_types` interceptor. The (possibly modified) response returned by + `post_batch_delete_entity_types` will be passed to + `post_batch_delete_entity_types_with_metadata`. + """ + return response, metadata + def pre_batch_update_entities( self, request: entity_type.BatchUpdateEntitiesRequest, @@ -249,12 +318,35 @@ def post_batch_update_entities( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_update_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_entities` interceptor runs + before the `post_batch_update_entities_with_metadata` interceptor. """ return response + def post_batch_update_entities_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_update_entities_with_metadata` + interceptor in new development instead of the `post_batch_update_entities` interceptor. + When both interceptors are used, this `post_batch_update_entities_with_metadata` interceptor runs after the + `post_batch_update_entities` interceptor. The (possibly modified) response returned by + `post_batch_update_entities` will be passed to + `post_batch_update_entities_with_metadata`. + """ + return response, metadata + def pre_batch_update_entity_types( self, request: entity_type.BatchUpdateEntityTypesRequest, @@ -275,12 +367,35 @@ def post_batch_update_entity_types( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_update_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_entity_types` interceptor runs + before the `post_batch_update_entity_types_with_metadata` interceptor. """ return response + def post_batch_update_entity_types_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_batch_update_entity_types_with_metadata` + interceptor in new development instead of the `post_batch_update_entity_types` interceptor. + When both interceptors are used, this `post_batch_update_entity_types_with_metadata` interceptor runs after the + `post_batch_update_entity_types` interceptor. The (possibly modified) response returned by + `post_batch_update_entity_types` will be passed to + `post_batch_update_entity_types_with_metadata`. + """ + return response, metadata + def pre_create_entity_type( self, request: gcd_entity_type.CreateEntityTypeRequest, @@ -300,12 +415,35 @@ def post_create_entity_type( ) -> gcd_entity_type.EntityType: """Post-rpc interceptor for create_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_entity_type` interceptor runs + before the `post_create_entity_type_with_metadata` interceptor. """ return response + def post_create_entity_type_with_metadata( + self, + response: gcd_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_entity_type_with_metadata` + interceptor in new development instead of the `post_create_entity_type` interceptor. + When both interceptors are used, this `post_create_entity_type_with_metadata` interceptor runs after the + `post_create_entity_type` interceptor. The (possibly modified) response returned by + `post_create_entity_type` will be passed to + `post_create_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_entity_type( self, request: entity_type.DeleteEntityTypeRequest, @@ -339,12 +477,35 @@ def post_get_entity_type( ) -> entity_type.EntityType: """Post-rpc interceptor for get_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_entity_type` interceptor runs + before the `post_get_entity_type_with_metadata` interceptor. """ return response + def post_get_entity_type_with_metadata( + self, + response: entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_entity_type_with_metadata` + interceptor in new development instead of the `post_get_entity_type` interceptor. + When both interceptors are used, this `post_get_entity_type_with_metadata` interceptor runs after the + `post_get_entity_type` interceptor. The (possibly modified) response returned by + `post_get_entity_type` will be passed to + `post_get_entity_type_with_metadata`. + """ + return response, metadata + def pre_list_entity_types( self, request: entity_type.ListEntityTypesRequest, @@ -364,12 +525,37 @@ def post_list_entity_types( ) -> entity_type.ListEntityTypesResponse: """Post-rpc interceptor for list_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_entity_types` interceptor runs + before the `post_list_entity_types_with_metadata` interceptor. """ return response + def post_list_entity_types_with_metadata( + self, + response: entity_type.ListEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + entity_type.ListEntityTypesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_entity_types_with_metadata` + interceptor in new development instead of the `post_list_entity_types` interceptor. + When both interceptors are used, this `post_list_entity_types_with_metadata` interceptor runs after the + `post_list_entity_types` interceptor. The (possibly modified) response returned by + `post_list_entity_types` will be passed to + `post_list_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_entity_type( self, request: gcd_entity_type.UpdateEntityTypeRequest, @@ -389,12 +575,35 @@ def post_update_entity_type( ) -> gcd_entity_type.EntityType: """Post-rpc interceptor for update_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_entity_type` interceptor runs + before the `post_update_entity_type_with_metadata` interceptor. """ return response + def post_update_entity_type_with_metadata( + self, + response: gcd_entity_type.EntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_entity_type.EntityType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_entity_type_with_metadata` + interceptor in new development instead of the `post_update_entity_type` interceptor. + When both interceptors are used, this `post_update_entity_type_with_metadata` interceptor runs after the + `post_update_entity_type` interceptor. The (possibly modified) response returned by + `post_update_entity_type` will be passed to + `post_update_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -792,6 +1001,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -942,6 +1155,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1092,6 +1309,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1242,6 +1463,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1392,6 +1617,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1556,6 +1785,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1823,6 +2056,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1968,6 +2205,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2134,6 +2375,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py index 88995506e78c..5c6c08465b0d 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -519,6 +521,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1387,16 +1416,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1442,16 +1475,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1552,16 +1589,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1607,16 +1648,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/transports/rest.py index b58901b74cfe..3e940e2479af 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/environments/transports/rest.py @@ -140,12 +140,35 @@ def post_create_environment( ) -> environment.Environment: """Post-rpc interceptor for create_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_create_environment` interceptor runs + before the `post_create_environment_with_metadata` interceptor. """ return response + def post_create_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_create_environment_with_metadata` + interceptor in new development instead of the `post_create_environment` interceptor. + When both interceptors are used, this `post_create_environment_with_metadata` interceptor runs after the + `post_create_environment` interceptor. The (possibly modified) response returned by + `post_create_environment` will be passed to + `post_create_environment_with_metadata`. + """ + return response, metadata + def pre_delete_environment( self, request: environment.DeleteEnvironmentRequest, @@ -179,12 +202,35 @@ def post_get_environment( ) -> environment.Environment: """Post-rpc interceptor for get_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_get_environment` interceptor runs + before the `post_get_environment_with_metadata` interceptor. """ return response + def post_get_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_get_environment_with_metadata` + interceptor in new development instead of the `post_get_environment` interceptor. + When both interceptors are used, this `post_get_environment_with_metadata` interceptor runs after the + `post_get_environment` interceptor. The (possibly modified) response returned by + `post_get_environment` will be passed to + `post_get_environment_with_metadata`. + """ + return response, metadata + def pre_get_environment_history( self, request: environment.GetEnvironmentHistoryRequest, @@ -205,12 +251,35 @@ def post_get_environment_history( ) -> environment.EnvironmentHistory: """Post-rpc interceptor for get_environment_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_environment_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_get_environment_history` interceptor runs + before the `post_get_environment_history_with_metadata` interceptor. """ return response + def post_get_environment_history_with_metadata( + self, + response: environment.EnvironmentHistory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.EnvironmentHistory, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_environment_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_get_environment_history_with_metadata` + interceptor in new development instead of the `post_get_environment_history` interceptor. + When both interceptors are used, this `post_get_environment_history_with_metadata` interceptor runs after the + `post_get_environment_history` interceptor. The (possibly modified) response returned by + `post_get_environment_history` will be passed to + `post_get_environment_history_with_metadata`. + """ + return response, metadata + def pre_list_environments( self, request: environment.ListEnvironmentsRequest, @@ -230,12 +299,37 @@ def post_list_environments( ) -> environment.ListEnvironmentsResponse: """Post-rpc interceptor for list_environments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_environments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_list_environments` interceptor runs + before the `post_list_environments_with_metadata` interceptor. """ return response + def post_list_environments_with_metadata( + self, + response: environment.ListEnvironmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + environment.ListEnvironmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_environments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_list_environments_with_metadata` + interceptor in new development instead of the `post_list_environments` interceptor. + When both interceptors are used, this `post_list_environments_with_metadata` interceptor runs after the + `post_list_environments` interceptor. The (possibly modified) response returned by + `post_list_environments` will be passed to + `post_list_environments_with_metadata`. + """ + return response, metadata + def pre_update_environment( self, request: environment.UpdateEnvironmentRequest, @@ -255,12 +349,35 @@ def post_update_environment( ) -> environment.Environment: """Post-rpc interceptor for update_environment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_environment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Environments server but before - it is returned to user code. + it is returned to user code. This `post_update_environment` interceptor runs + before the `post_update_environment_with_metadata` interceptor. """ return response + def post_update_environment_with_metadata( + self, + response: environment.Environment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[environment.Environment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_environment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Environments server but before it is returned to user code. + + We recommend only using this `post_update_environment_with_metadata` + interceptor in new development instead of the `post_update_environment` interceptor. + When both interceptors are used, this `post_update_environment_with_metadata` interceptor runs after the + `post_update_environment` interceptor. The (possibly modified) response returned by + `post_update_environment` will be passed to + `post_update_environment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -617,6 +734,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -886,6 +1007,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1031,6 +1156,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_environment_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_environment_history_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1176,6 +1305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_environments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_environments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1346,6 +1479,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_environment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_environment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py index 526df65ae19c..766e3f76114e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -481,6 +483,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -979,16 +1008,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1034,16 +1067,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1144,16 +1181,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1199,16 +1240,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/transports/rest.py index 5e743e3283f7..a49e8bc6847e 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/fulfillments/transports/rest.py @@ -112,12 +112,35 @@ def post_get_fulfillment( ) -> fulfillment.Fulfillment: """Post-rpc interceptor for get_fulfillment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_fulfillment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Fulfillments server but before - it is returned to user code. + it is returned to user code. This `post_get_fulfillment` interceptor runs + before the `post_get_fulfillment_with_metadata` interceptor. """ return response + def post_get_fulfillment_with_metadata( + self, + response: fulfillment.Fulfillment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[fulfillment.Fulfillment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_fulfillment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Fulfillments server but before it is returned to user code. + + We recommend only using this `post_get_fulfillment_with_metadata` + interceptor in new development instead of the `post_get_fulfillment` interceptor. + When both interceptors are used, this `post_get_fulfillment_with_metadata` interceptor runs after the + `post_get_fulfillment` interceptor. The (possibly modified) response returned by + `post_get_fulfillment` will be passed to + `post_get_fulfillment_with_metadata`. + """ + return response, metadata + def pre_update_fulfillment( self, request: gcd_fulfillment.UpdateFulfillmentRequest, @@ -138,12 +161,35 @@ def post_update_fulfillment( ) -> gcd_fulfillment.Fulfillment: """Post-rpc interceptor for update_fulfillment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_fulfillment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Fulfillments server but before - it is returned to user code. + it is returned to user code. This `post_update_fulfillment` interceptor runs + before the `post_update_fulfillment_with_metadata` interceptor. """ return response + def post_update_fulfillment_with_metadata( + self, + response: gcd_fulfillment.Fulfillment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_fulfillment.Fulfillment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_fulfillment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Fulfillments server but before it is returned to user code. + + We recommend only using this `post_update_fulfillment_with_metadata` + interceptor in new development instead of the `post_update_fulfillment` interceptor. + When both interceptors are used, this `post_update_fulfillment_with_metadata` interceptor runs after the + `post_update_fulfillment` interceptor. The (possibly modified) response returned by + `post_update_fulfillment` will be passed to + `post_update_fulfillment_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -485,6 +531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_fulfillment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_fulfillment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -646,6 +696,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_fulfillment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_fulfillment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py index bf0a5d28b245..0de8fdb1b605 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -493,6 +495,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1305,16 +1334,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1360,16 +1393,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1470,16 +1507,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1525,16 +1566,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/transports/rest.py index f3cfe2ab4bc9..96ffb3c4cf30 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/generators/transports/rest.py @@ -133,12 +133,35 @@ def post_create_generator( ) -> gcd_generator.Generator: """Post-rpc interceptor for create_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_create_generator` interceptor runs + before the `post_create_generator_with_metadata` interceptor. """ return response + def post_create_generator_with_metadata( + self, + response: gcd_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_create_generator_with_metadata` + interceptor in new development instead of the `post_create_generator` interceptor. + When both interceptors are used, this `post_create_generator_with_metadata` interceptor runs after the + `post_create_generator` interceptor. The (possibly modified) response returned by + `post_create_generator` will be passed to + `post_create_generator_with_metadata`. + """ + return response, metadata + def pre_delete_generator( self, request: generator.DeleteGeneratorRequest, @@ -168,12 +191,35 @@ def pre_get_generator( def post_get_generator(self, response: generator.Generator) -> generator.Generator: """Post-rpc interceptor for get_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_get_generator` interceptor runs + before the `post_get_generator_with_metadata` interceptor. """ return response + def post_get_generator_with_metadata( + self, + response: generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_get_generator_with_metadata` + interceptor in new development instead of the `post_get_generator` interceptor. + When both interceptors are used, this `post_get_generator_with_metadata` interceptor runs after the + `post_get_generator` interceptor. The (possibly modified) response returned by + `post_get_generator` will be passed to + `post_get_generator_with_metadata`. + """ + return response, metadata + def pre_list_generators( self, request: generator.ListGeneratorsRequest, @@ -193,12 +239,37 @@ def post_list_generators( ) -> generator.ListGeneratorsResponse: """Post-rpc interceptor for list_generators - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_generators_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_list_generators` interceptor runs + before the `post_list_generators_with_metadata` interceptor. """ return response + def post_list_generators_with_metadata( + self, + response: generator.ListGeneratorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + generator.ListGeneratorsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_generators + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_list_generators_with_metadata` + interceptor in new development instead of the `post_list_generators` interceptor. + When both interceptors are used, this `post_list_generators_with_metadata` interceptor runs after the + `post_list_generators` interceptor. The (possibly modified) response returned by + `post_list_generators` will be passed to + `post_list_generators_with_metadata`. + """ + return response, metadata + def pre_update_generator( self, request: gcd_generator.UpdateGeneratorRequest, @@ -218,12 +289,35 @@ def post_update_generator( ) -> gcd_generator.Generator: """Post-rpc interceptor for update_generator - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_generator_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Generators server but before - it is returned to user code. + it is returned to user code. This `post_update_generator` interceptor runs + before the `post_update_generator_with_metadata` interceptor. """ return response + def post_update_generator_with_metadata( + self, + response: gcd_generator.Generator, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_generator.Generator, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_generator + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Generators server but before it is returned to user code. + + We recommend only using this `post_update_generator_with_metadata` + interceptor in new development instead of the `post_update_generator` interceptor. + When both interceptors are used, this `post_update_generator_with_metadata` interceptor runs after the + `post_update_generator` interceptor. The (possibly modified) response returned by + `post_update_generator` will be passed to + `post_update_generator_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -563,6 +657,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -815,6 +913,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -957,6 +1059,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_generators(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_generators_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1107,6 +1213,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_generator(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_generator_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py index accac19fc383..2e202fc0dd4b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -511,6 +513,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1746,16 +1775,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1801,16 +1834,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1911,16 +1948,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1966,16 +2007,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/transports/rest.py index 716185442b92..d9a887c50ff1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/intents/transports/rest.py @@ -149,12 +149,35 @@ def post_batch_delete_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_intents` interceptor runs + before the `post_batch_delete_intents_with_metadata` interceptor. """ return response + def post_batch_delete_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_batch_delete_intents_with_metadata` + interceptor in new development instead of the `post_batch_delete_intents` interceptor. + When both interceptors are used, this `post_batch_delete_intents_with_metadata` interceptor runs after the + `post_batch_delete_intents` interceptor. The (possibly modified) response returned by + `post_batch_delete_intents` will be passed to + `post_batch_delete_intents_with_metadata`. + """ + return response, metadata + def pre_batch_update_intents( self, request: intent.BatchUpdateIntentsRequest, @@ -174,12 +197,35 @@ def post_batch_update_intents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_update_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_intents` interceptor runs + before the `post_batch_update_intents_with_metadata` interceptor. """ return response + def post_batch_update_intents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_update_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_batch_update_intents_with_metadata` + interceptor in new development instead of the `post_batch_update_intents` interceptor. + When both interceptors are used, this `post_batch_update_intents_with_metadata` interceptor runs after the + `post_batch_update_intents` interceptor. The (possibly modified) response returned by + `post_batch_update_intents` will be passed to + `post_batch_update_intents_with_metadata`. + """ + return response, metadata + def pre_create_intent( self, request: gcd_intent.CreateIntentRequest, @@ -195,12 +241,35 @@ def pre_create_intent( def post_create_intent(self, response: gcd_intent.Intent) -> gcd_intent.Intent: """Post-rpc interceptor for create_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_create_intent` interceptor runs + before the `post_create_intent_with_metadata` interceptor. """ return response + def post_create_intent_with_metadata( + self, + response: gcd_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_create_intent_with_metadata` + interceptor in new development instead of the `post_create_intent` interceptor. + When both interceptors are used, this `post_create_intent_with_metadata` interceptor runs after the + `post_create_intent` interceptor. The (possibly modified) response returned by + `post_create_intent` will be passed to + `post_create_intent_with_metadata`. + """ + return response, metadata + def pre_delete_intent( self, request: intent.DeleteIntentRequest, @@ -228,12 +297,33 @@ def pre_get_intent( def post_get_intent(self, response: intent.Intent) -> intent.Intent: """Post-rpc interceptor for get_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_get_intent` interceptor runs + before the `post_get_intent_with_metadata` interceptor. """ return response + def post_get_intent_with_metadata( + self, response: intent.Intent, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_get_intent_with_metadata` + interceptor in new development instead of the `post_get_intent` interceptor. + When both interceptors are used, this `post_get_intent_with_metadata` interceptor runs after the + `post_get_intent` interceptor. The (possibly modified) response returned by + `post_get_intent` will be passed to + `post_get_intent_with_metadata`. + """ + return response, metadata + def pre_list_intents( self, request: intent.ListIntentsRequest, @@ -251,12 +341,35 @@ def post_list_intents( ) -> intent.ListIntentsResponse: """Post-rpc interceptor for list_intents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_intents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_list_intents` interceptor runs + before the `post_list_intents_with_metadata` interceptor. """ return response + def post_list_intents_with_metadata( + self, + response: intent.ListIntentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[intent.ListIntentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_intents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_list_intents_with_metadata` + interceptor in new development instead of the `post_list_intents` interceptor. + When both interceptors are used, this `post_list_intents_with_metadata` interceptor runs after the + `post_list_intents` interceptor. The (possibly modified) response returned by + `post_list_intents` will be passed to + `post_list_intents_with_metadata`. + """ + return response, metadata + def pre_update_intent( self, request: gcd_intent.UpdateIntentRequest, @@ -272,12 +385,35 @@ def pre_update_intent( def post_update_intent(self, response: gcd_intent.Intent) -> gcd_intent.Intent: """Post-rpc interceptor for update_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Intents server but before - it is returned to user code. + it is returned to user code. This `post_update_intent` interceptor runs + before the `post_update_intent_with_metadata` interceptor. """ return response + def post_update_intent_with_metadata( + self, + response: gcd_intent.Intent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_intent.Intent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Intents server but before it is returned to user code. + + We recommend only using this `post_update_intent_with_metadata` + interceptor in new development instead of the `post_update_intent` interceptor. + When both interceptors are used, this `post_update_intent_with_metadata` interceptor runs after the + `post_update_intent` interceptor. The (possibly modified) response returned by + `post_update_intent` will be passed to + `post_update_intent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -675,6 +811,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -825,6 +965,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -985,6 +1129,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1246,6 +1394,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1391,6 +1543,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_intents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_intents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1551,6 +1707,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py index 673bd67725e7..b625490871d4 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -488,6 +490,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1354,16 +1383,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1409,16 +1442,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1519,16 +1556,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1574,16 +1615,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/transports/rest.py index e83acc480f5f..a0342f21d19c 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/knowledge_bases/transports/rest.py @@ -134,12 +134,37 @@ def post_create_knowledge_base( ) -> gcd_knowledge_base.KnowledgeBase: """Post-rpc interceptor for create_knowledge_base - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_knowledge_base_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_create_knowledge_base` interceptor runs + before the `post_create_knowledge_base_with_metadata` interceptor. """ return response + def post_create_knowledge_base_with_metadata( + self, + response: gcd_knowledge_base.KnowledgeBase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_knowledge_base.KnowledgeBase, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_knowledge_base + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_create_knowledge_base_with_metadata` + interceptor in new development instead of the `post_create_knowledge_base` interceptor. + When both interceptors are used, this `post_create_knowledge_base_with_metadata` interceptor runs after the + `post_create_knowledge_base` interceptor. The (possibly modified) response returned by + `post_create_knowledge_base` will be passed to + `post_create_knowledge_base_with_metadata`. + """ + return response, metadata + def pre_delete_knowledge_base( self, request: knowledge_base.DeleteKnowledgeBaseRequest, @@ -174,12 +199,35 @@ def post_get_knowledge_base( ) -> knowledge_base.KnowledgeBase: """Post-rpc interceptor for get_knowledge_base - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_knowledge_base_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_get_knowledge_base` interceptor runs + before the `post_get_knowledge_base_with_metadata` interceptor. """ return response + def post_get_knowledge_base_with_metadata( + self, + response: knowledge_base.KnowledgeBase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[knowledge_base.KnowledgeBase, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_knowledge_base + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_get_knowledge_base_with_metadata` + interceptor in new development instead of the `post_get_knowledge_base` interceptor. + When both interceptors are used, this `post_get_knowledge_base_with_metadata` interceptor runs after the + `post_get_knowledge_base` interceptor. The (possibly modified) response returned by + `post_get_knowledge_base` will be passed to + `post_get_knowledge_base_with_metadata`. + """ + return response, metadata + def pre_list_knowledge_bases( self, request: knowledge_base.ListKnowledgeBasesRequest, @@ -200,12 +248,38 @@ def post_list_knowledge_bases( ) -> knowledge_base.ListKnowledgeBasesResponse: """Post-rpc interceptor for list_knowledge_bases - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_knowledge_bases_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_list_knowledge_bases` interceptor runs + before the `post_list_knowledge_bases_with_metadata` interceptor. """ return response + def post_list_knowledge_bases_with_metadata( + self, + response: knowledge_base.ListKnowledgeBasesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + knowledge_base.ListKnowledgeBasesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_knowledge_bases + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_list_knowledge_bases_with_metadata` + interceptor in new development instead of the `post_list_knowledge_bases` interceptor. + When both interceptors are used, this `post_list_knowledge_bases_with_metadata` interceptor runs after the + `post_list_knowledge_bases` interceptor. The (possibly modified) response returned by + `post_list_knowledge_bases` will be passed to + `post_list_knowledge_bases_with_metadata`. + """ + return response, metadata + def pre_update_knowledge_base( self, request: gcd_knowledge_base.UpdateKnowledgeBaseRequest, @@ -226,12 +300,37 @@ def post_update_knowledge_base( ) -> gcd_knowledge_base.KnowledgeBase: """Post-rpc interceptor for update_knowledge_base - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_knowledge_base_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KnowledgeBases server but before - it is returned to user code. + it is returned to user code. This `post_update_knowledge_base` interceptor runs + before the `post_update_knowledge_base_with_metadata` interceptor. """ return response + def post_update_knowledge_base_with_metadata( + self, + response: gcd_knowledge_base.KnowledgeBase, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_knowledge_base.KnowledgeBase, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_knowledge_base + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KnowledgeBases server but before it is returned to user code. + + We recommend only using this `post_update_knowledge_base_with_metadata` + interceptor in new development instead of the `post_update_knowledge_base` interceptor. + When both interceptors are used, this `post_update_knowledge_base_with_metadata` interceptor runs after the + `post_update_knowledge_base` interceptor. The (possibly modified) response returned by + `post_update_knowledge_base` will be passed to + `post_update_knowledge_base_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -582,6 +681,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_knowledge_base(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_knowledge_base_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -849,6 +952,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_knowledge_base(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_knowledge_base_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -994,6 +1101,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_knowledge_bases(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_knowledge_bases_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1158,6 +1269,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_knowledge_base(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_knowledge_base_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py index 861e727822b8..a78b5a0a7eaf 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -640,6 +642,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2296,16 +2325,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2351,16 +2384,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -2461,16 +2498,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2516,16 +2557,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/transports/rest.py index fba5d657fc34..dbefab310176 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/participants/transports/rest.py @@ -184,12 +184,37 @@ def post_analyze_content( ) -> gcd_participant.AnalyzeContentResponse: """Post-rpc interceptor for analyze_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_analyze_content` interceptor runs + before the `post_analyze_content_with_metadata` interceptor. """ return response + def post_analyze_content_with_metadata( + self, + response: gcd_participant.AnalyzeContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_participant.AnalyzeContentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for analyze_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_analyze_content_with_metadata` + interceptor in new development instead of the `post_analyze_content` interceptor. + When both interceptors are used, this `post_analyze_content_with_metadata` interceptor runs after the + `post_analyze_content` interceptor. The (possibly modified) response returned by + `post_analyze_content` will be passed to + `post_analyze_content_with_metadata`. + """ + return response, metadata + def pre_compile_suggestion( self, request: participant.CompileSuggestionRequest, @@ -209,12 +234,37 @@ def post_compile_suggestion( ) -> participant.CompileSuggestionResponse: """Post-rpc interceptor for compile_suggestion - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_compile_suggestion_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_compile_suggestion` interceptor runs + before the `post_compile_suggestion_with_metadata` interceptor. """ return response + def post_compile_suggestion_with_metadata( + self, + response: participant.CompileSuggestionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.CompileSuggestionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for compile_suggestion + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_compile_suggestion_with_metadata` + interceptor in new development instead of the `post_compile_suggestion` interceptor. + When both interceptors are used, this `post_compile_suggestion_with_metadata` interceptor runs after the + `post_compile_suggestion` interceptor. The (possibly modified) response returned by + `post_compile_suggestion` will be passed to + `post_compile_suggestion_with_metadata`. + """ + return response, metadata + def pre_create_participant( self, request: gcd_participant.CreateParticipantRequest, @@ -235,12 +285,35 @@ def post_create_participant( ) -> gcd_participant.Participant: """Post-rpc interceptor for create_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_create_participant` interceptor runs + before the `post_create_participant_with_metadata` interceptor. """ return response + def post_create_participant_with_metadata( + self, + response: gcd_participant.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_participant.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_create_participant_with_metadata` + interceptor in new development instead of the `post_create_participant` interceptor. + When both interceptors are used, this `post_create_participant_with_metadata` interceptor runs after the + `post_create_participant` interceptor. The (possibly modified) response returned by + `post_create_participant` will be passed to + `post_create_participant_with_metadata`. + """ + return response, metadata + def pre_get_participant( self, request: participant.GetParticipantRequest, @@ -260,12 +333,35 @@ def post_get_participant( ) -> participant.Participant: """Post-rpc interceptor for get_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_get_participant` interceptor runs + before the `post_get_participant_with_metadata` interceptor. """ return response + def post_get_participant_with_metadata( + self, + response: participant.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[participant.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_get_participant_with_metadata` + interceptor in new development instead of the `post_get_participant` interceptor. + When both interceptors are used, this `post_get_participant_with_metadata` interceptor runs after the + `post_get_participant` interceptor. The (possibly modified) response returned by + `post_get_participant` will be passed to + `post_get_participant_with_metadata`. + """ + return response, metadata + def pre_list_participants( self, request: participant.ListParticipantsRequest, @@ -285,12 +381,37 @@ def post_list_participants( ) -> participant.ListParticipantsResponse: """Post-rpc interceptor for list_participants - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_participants_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_list_participants` interceptor runs + before the `post_list_participants_with_metadata` interceptor. """ return response + def post_list_participants_with_metadata( + self, + response: participant.ListParticipantsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.ListParticipantsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_participants + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_list_participants_with_metadata` + interceptor in new development instead of the `post_list_participants` interceptor. + When both interceptors are used, this `post_list_participants_with_metadata` interceptor runs after the + `post_list_participants` interceptor. The (possibly modified) response returned by + `post_list_participants` will be passed to + `post_list_participants_with_metadata`. + """ + return response, metadata + def pre_list_suggestions( self, request: participant.ListSuggestionsRequest, @@ -310,12 +431,37 @@ def post_list_suggestions( ) -> participant.ListSuggestionsResponse: """Post-rpc interceptor for list_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_list_suggestions` interceptor runs + before the `post_list_suggestions_with_metadata` interceptor. """ return response + def post_list_suggestions_with_metadata( + self, + response: participant.ListSuggestionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.ListSuggestionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_list_suggestions_with_metadata` + interceptor in new development instead of the `post_list_suggestions` interceptor. + When both interceptors are used, this `post_list_suggestions_with_metadata` interceptor runs after the + `post_list_suggestions` interceptor. The (possibly modified) response returned by + `post_list_suggestions` will be passed to + `post_list_suggestions_with_metadata`. + """ + return response, metadata + def pre_suggest_articles( self, request: participant.SuggestArticlesRequest, @@ -335,12 +481,37 @@ def post_suggest_articles( ) -> participant.SuggestArticlesResponse: """Post-rpc interceptor for suggest_articles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_articles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_articles` interceptor runs + before the `post_suggest_articles_with_metadata` interceptor. """ return response + def post_suggest_articles_with_metadata( + self, + response: participant.SuggestArticlesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestArticlesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for suggest_articles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_articles_with_metadata` + interceptor in new development instead of the `post_suggest_articles` interceptor. + When both interceptors are used, this `post_suggest_articles_with_metadata` interceptor runs after the + `post_suggest_articles` interceptor. The (possibly modified) response returned by + `post_suggest_articles` will be passed to + `post_suggest_articles_with_metadata`. + """ + return response, metadata + def pre_suggest_faq_answers( self, request: participant.SuggestFaqAnswersRequest, @@ -360,12 +531,37 @@ def post_suggest_faq_answers( ) -> participant.SuggestFaqAnswersResponse: """Post-rpc interceptor for suggest_faq_answers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_faq_answers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_faq_answers` interceptor runs + before the `post_suggest_faq_answers_with_metadata` interceptor. """ return response + def post_suggest_faq_answers_with_metadata( + self, + response: participant.SuggestFaqAnswersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestFaqAnswersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for suggest_faq_answers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_faq_answers_with_metadata` + interceptor in new development instead of the `post_suggest_faq_answers` interceptor. + When both interceptors are used, this `post_suggest_faq_answers_with_metadata` interceptor runs after the + `post_suggest_faq_answers` interceptor. The (possibly modified) response returned by + `post_suggest_faq_answers` will be passed to + `post_suggest_faq_answers_with_metadata`. + """ + return response, metadata + def pre_suggest_knowledge_assist( self, request: participant.SuggestKnowledgeAssistRequest, @@ -386,12 +582,38 @@ def post_suggest_knowledge_assist( ) -> participant.SuggestKnowledgeAssistResponse: """Post-rpc interceptor for suggest_knowledge_assist - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_knowledge_assist_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_knowledge_assist` interceptor runs + before the `post_suggest_knowledge_assist_with_metadata` interceptor. """ return response + def post_suggest_knowledge_assist_with_metadata( + self, + response: participant.SuggestKnowledgeAssistResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestKnowledgeAssistResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for suggest_knowledge_assist + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_knowledge_assist_with_metadata` + interceptor in new development instead of the `post_suggest_knowledge_assist` interceptor. + When both interceptors are used, this `post_suggest_knowledge_assist_with_metadata` interceptor runs after the + `post_suggest_knowledge_assist` interceptor. The (possibly modified) response returned by + `post_suggest_knowledge_assist` will be passed to + `post_suggest_knowledge_assist_with_metadata`. + """ + return response, metadata + def pre_suggest_smart_replies( self, request: participant.SuggestSmartRepliesRequest, @@ -411,12 +633,37 @@ def post_suggest_smart_replies( ) -> participant.SuggestSmartRepliesResponse: """Post-rpc interceptor for suggest_smart_replies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_suggest_smart_replies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_suggest_smart_replies` interceptor runs + before the `post_suggest_smart_replies_with_metadata` interceptor. """ return response + def post_suggest_smart_replies_with_metadata( + self, + response: participant.SuggestSmartRepliesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + participant.SuggestSmartRepliesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for suggest_smart_replies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_suggest_smart_replies_with_metadata` + interceptor in new development instead of the `post_suggest_smart_replies` interceptor. + When both interceptors are used, this `post_suggest_smart_replies_with_metadata` interceptor runs after the + `post_suggest_smart_replies` interceptor. The (possibly modified) response returned by + `post_suggest_smart_replies` will be passed to + `post_suggest_smart_replies_with_metadata`. + """ + return response, metadata + def pre_update_participant( self, request: gcd_participant.UpdateParticipantRequest, @@ -437,12 +684,35 @@ def post_update_participant( ) -> gcd_participant.Participant: """Post-rpc interceptor for update_participant - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_participant_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Participants server but before - it is returned to user code. + it is returned to user code. This `post_update_participant` interceptor runs + before the `post_update_participant_with_metadata` interceptor. """ return response + def post_update_participant_with_metadata( + self, + response: gcd_participant.Participant, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_participant.Participant, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_participant + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Participants server but before it is returned to user code. + + We recommend only using this `post_update_participant_with_metadata` + interceptor in new development instead of the `post_update_participant` interceptor. + When both interceptors are used, this `post_update_participant_with_metadata` interceptor runs after the + `post_update_participant` interceptor. The (possibly modified) response returned by + `post_update_participant` will be passed to + `post_update_participant_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -780,6 +1050,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -933,6 +1207,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_compile_suggestion(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_compile_suggestion_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1086,6 +1364,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1229,6 +1511,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1374,6 +1660,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_participants(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_participants_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1521,6 +1811,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1693,6 +1987,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_articles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_articles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1846,6 +2144,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_faq_answers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_faq_answers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1999,6 +2301,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_knowledge_assist(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_knowledge_assist_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2152,6 +2458,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_suggest_smart_replies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_suggest_smart_replies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2305,6 +2615,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_participant(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_participant_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/__init__.py new file mode 100644 index 000000000000..9484b18ad01a --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import PhoneNumbersAsyncClient +from .client import PhoneNumbersClient + +__all__ = ( + "PhoneNumbersClient", + "PhoneNumbersAsyncClient", +) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py new file mode 100644 index 000000000000..a1e3466d761c --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/async_client.py @@ -0,0 +1,1064 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflow_v2beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflow_v2beta1.services.phone_numbers import pagers +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +from .client import PhoneNumbersClient +from .transports.base import DEFAULT_CLIENT_INFO, PhoneNumbersTransport +from .transports.grpc_asyncio import PhoneNumbersGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class PhoneNumbersAsyncClient: + """Service for managing + [PhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumber]. + """ + + _client: PhoneNumbersClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PhoneNumbersClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PhoneNumbersClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PhoneNumbersClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PhoneNumbersClient._DEFAULT_UNIVERSE + + phone_number_path = staticmethod(PhoneNumbersClient.phone_number_path) + parse_phone_number_path = staticmethod(PhoneNumbersClient.parse_phone_number_path) + common_billing_account_path = staticmethod( + PhoneNumbersClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PhoneNumbersClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PhoneNumbersClient.common_folder_path) + parse_common_folder_path = staticmethod(PhoneNumbersClient.parse_common_folder_path) + common_organization_path = staticmethod(PhoneNumbersClient.common_organization_path) + parse_common_organization_path = staticmethod( + PhoneNumbersClient.parse_common_organization_path + ) + common_project_path = staticmethod(PhoneNumbersClient.common_project_path) + parse_common_project_path = staticmethod( + PhoneNumbersClient.parse_common_project_path + ) + common_location_path = staticmethod(PhoneNumbersClient.common_location_path) + parse_common_location_path = staticmethod( + PhoneNumbersClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PhoneNumbersAsyncClient: The constructed client. + """ + return PhoneNumbersClient.from_service_account_info.__func__(PhoneNumbersAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PhoneNumbersAsyncClient: The constructed client. + """ + return PhoneNumbersClient.from_service_account_file.__func__(PhoneNumbersAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PhoneNumbersClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PhoneNumbersTransport: + """Returns the transport used by the client instance. + + Returns: + PhoneNumbersTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PhoneNumbersClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, PhoneNumbersTransport, Callable[..., PhoneNumbersTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the phone numbers async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PhoneNumbersTransport,Callable[..., PhoneNumbersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PhoneNumbersTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PhoneNumbersClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient`.", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "credentialsType": None, + }, + ) + + async def list_phone_numbers( + self, + request: Optional[Union[phone_number.ListPhoneNumbersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPhoneNumbersAsyncPager: + r"""Returns the list of all phone numbers in the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + async def sample_list_phone_numbers(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListPhoneNumbersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_phone_numbers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersRequest, dict]]): + The request object. The request message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + parent (:class:`str`): + Required. The project to list all ``PhoneNumber`` + resources from. Format: ``projects/``. + Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.services.phone_numbers.pagers.ListPhoneNumbersAsyncPager: + The response message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, phone_number.ListPhoneNumbersRequest): + request = phone_number.ListPhoneNumbersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_phone_numbers + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPhoneNumbersAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_phone_number( + self, + request: Optional[ + Union[gcd_phone_number.UpdatePhoneNumberRequest, dict] + ] = None, + *, + phone_number: Optional[gcd_phone_number.PhoneNumber] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcd_phone_number.PhoneNumber: + r"""Updates the specified ``PhoneNumber``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + async def sample_update_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdatePhoneNumberRequest( + ) + + # Make the request + response = await client.update_phone_number(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2beta1.types.UpdatePhoneNumberRequest, dict]]): + The request object. The request message for + [PhoneNumbers.UpdatePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber]. + phone_number (:class:`google.cloud.dialogflow_v2beta1.types.PhoneNumber`): + Required. The ``PhoneNumber`` to update. + This corresponds to the ``phone_number`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The mask to control which + fields get updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.types.PhoneNumber: + Represents a phone number. + PhoneNumber resources enable phone calls to be + answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([phone_number, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_phone_number.UpdatePhoneNumberRequest): + request = gcd_phone_number.UpdatePhoneNumberRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if phone_number is not None: + request.phone_number = phone_number + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_phone_number + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("phone_number.name", request.phone_number.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_phone_number( + self, + request: Optional[Union[phone_number.DeletePhoneNumberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.PhoneNumber: + r"""Requests deletion of a ``PhoneNumber``. The ``PhoneNumber`` is + moved into the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state immediately, and is deleted approximately 30 days later. + This method may only be called on a ``PhoneNumber`` in the + [ACTIVE][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.ACTIVE] + state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + async def sample_delete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_phone_number(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2beta1.types.DeletePhoneNumberRequest, dict]]): + The request object. The request message for + [PhoneNumbers.DeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.DeletePhoneNumber]. + name (:class:`str`): + Required. The unique identifier of the ``PhoneNumber`` + to delete. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.types.PhoneNumber: + Represents a phone number. + PhoneNumber resources enable phone calls to be + answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, phone_number.DeletePhoneNumberRequest): + request = phone_number.DeletePhoneNumberRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_phone_number + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def undelete_phone_number( + self, + request: Optional[Union[phone_number.UndeletePhoneNumberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.PhoneNumber: + r"""Cancels the deletion request for a ``PhoneNumber``. This method + may only be called on a ``PhoneNumber`` in the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + async def sample_undelete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UndeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = await client.undelete_phone_number(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dialogflow_v2beta1.types.UndeletePhoneNumberRequest, dict]]): + The request object. The request message for + [PhoneNumbers.UndeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UndeletePhoneNumber]. + name (:class:`str`): + Required. The unique identifier of the ``PhoneNumber`` + to delete. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.types.PhoneNumber: + Represents a phone number. + PhoneNumber resources enable phone calls to be + answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, phone_number.UndeletePhoneNumberRequest): + request = phone_number.UndeletePhoneNumberRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_phone_number + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PhoneNumbersAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PhoneNumbersAsyncClient",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py new file mode 100644 index 000000000000..f68a8617ac14 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/client.py @@ -0,0 +1,1494 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflow_v2beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflow_v2beta1.services.phone_numbers import pagers +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +from .transports.base import DEFAULT_CLIENT_INFO, PhoneNumbersTransport +from .transports.grpc import PhoneNumbersGrpcTransport +from .transports.grpc_asyncio import PhoneNumbersGrpcAsyncIOTransport +from .transports.rest import PhoneNumbersRestTransport + + +class PhoneNumbersClientMeta(type): + """Metaclass for the PhoneNumbers client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[PhoneNumbersTransport]] + _transport_registry["grpc"] = PhoneNumbersGrpcTransport + _transport_registry["grpc_asyncio"] = PhoneNumbersGrpcAsyncIOTransport + _transport_registry["rest"] = PhoneNumbersRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PhoneNumbersTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PhoneNumbersClient(metaclass=PhoneNumbersClientMeta): + """Service for managing + [PhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumber]. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dialogflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dialogflow.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PhoneNumbersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PhoneNumbersClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PhoneNumbersTransport: + """Returns the transport used by the client instance. + + Returns: + PhoneNumbersTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def phone_number_path( + project: str, + phone_number: str, + ) -> str: + """Returns a fully-qualified phone_number string.""" + return "projects/{project}/phoneNumbers/{phone_number}".format( + project=project, + phone_number=phone_number, + ) + + @staticmethod + def parse_phone_number_path(path: str) -> Dict[str, str]: + """Parses a phone_number path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/phoneNumbers/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PhoneNumbersClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PhoneNumbersClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PhoneNumbersClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PhoneNumbersClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, PhoneNumbersTransport, Callable[..., PhoneNumbersTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the phone numbers client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PhoneNumbersTransport,Callable[..., PhoneNumbersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PhoneNumbersTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PhoneNumbersClient._read_environment_variables() + self._client_cert_source = PhoneNumbersClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PhoneNumbersClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PhoneNumbersTransport) + if transport_provided: + # transport is a PhoneNumbersTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PhoneNumbersTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or PhoneNumbersClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PhoneNumbersTransport], Callable[..., PhoneNumbersTransport] + ] = ( + PhoneNumbersClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PhoneNumbersTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dialogflow_v2beta1.PhoneNumbersClient`.", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "credentialsType": None, + }, + ) + + def list_phone_numbers( + self, + request: Optional[Union[phone_number.ListPhoneNumbersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPhoneNumbersPager: + r"""Returns the list of all phone numbers in the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + def sample_list_phone_numbers(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListPhoneNumbersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_phone_numbers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersRequest, dict]): + The request object. The request message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + parent (str): + Required. The project to list all ``PhoneNumber`` + resources from. Format: ``projects/``. + Format: + ``projects//locations/``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.services.phone_numbers.pagers.ListPhoneNumbersPager: + The response message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, phone_number.ListPhoneNumbersRequest): + request = phone_number.ListPhoneNumbersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_phone_numbers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPhoneNumbersPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_phone_number( + self, + request: Optional[ + Union[gcd_phone_number.UpdatePhoneNumberRequest, dict] + ] = None, + *, + phone_number: Optional[gcd_phone_number.PhoneNumber] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcd_phone_number.PhoneNumber: + r"""Updates the specified ``PhoneNumber``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + def sample_update_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdatePhoneNumberRequest( + ) + + # Make the request + response = client.update_phone_number(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflow_v2beta1.types.UpdatePhoneNumberRequest, dict]): + The request object. The request message for + [PhoneNumbers.UpdatePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber]. + phone_number (google.cloud.dialogflow_v2beta1.types.PhoneNumber): + Required. The ``PhoneNumber`` to update. + This corresponds to the ``phone_number`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The mask to control which + fields get updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.types.PhoneNumber: + Represents a phone number. + PhoneNumber resources enable phone calls to be + answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([phone_number, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcd_phone_number.UpdatePhoneNumberRequest): + request = gcd_phone_number.UpdatePhoneNumberRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if phone_number is not None: + request.phone_number = phone_number + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_phone_number] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("phone_number.name", request.phone_number.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_phone_number( + self, + request: Optional[Union[phone_number.DeletePhoneNumberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.PhoneNumber: + r"""Requests deletion of a ``PhoneNumber``. The ``PhoneNumber`` is + moved into the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state immediately, and is deleted approximately 30 days later. + This method may only be called on a ``PhoneNumber`` in the + [ACTIVE][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.ACTIVE] + state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + def sample_delete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = client.delete_phone_number(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflow_v2beta1.types.DeletePhoneNumberRequest, dict]): + The request object. The request message for + [PhoneNumbers.DeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.DeletePhoneNumber]. + name (str): + Required. The unique identifier of the ``PhoneNumber`` + to delete. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.types.PhoneNumber: + Represents a phone number. + PhoneNumber resources enable phone calls to be + answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, phone_number.DeletePhoneNumberRequest): + request = phone_number.DeletePhoneNumberRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_phone_number] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def undelete_phone_number( + self, + request: Optional[Union[phone_number.UndeletePhoneNumberRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.PhoneNumber: + r"""Cancels the deletion request for a ``PhoneNumber``. This method + may only be called on a ``PhoneNumber`` in the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dialogflow_v2beta1 + + def sample_undelete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UndeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = client.undelete_phone_number(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dialogflow_v2beta1.types.UndeletePhoneNumberRequest, dict]): + The request object. The request message for + [PhoneNumbers.UndeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UndeletePhoneNumber]. + name (str): + Required. The unique identifier of the ``PhoneNumber`` + to delete. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dialogflow_v2beta1.types.PhoneNumber: + Represents a phone number. + PhoneNumber resources enable phone calls to be + answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, phone_number.UndeletePhoneNumberRequest): + request = phone_number.UndeletePhoneNumberRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_phone_number] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PhoneNumbersClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("PhoneNumbersClient",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/pagers.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/pagers.py new file mode 100644 index 000000000000..e0a1c264315f --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/pagers.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dialogflow_v2beta1.types import phone_number + + +class ListPhoneNumbersPager: + """A pager for iterating through ``list_phone_numbers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``phone_numbers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPhoneNumbers`` requests and continue to iterate + through the ``phone_numbers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., phone_number.ListPhoneNumbersResponse], + request: phone_number.ListPhoneNumbersRequest, + response: phone_number.ListPhoneNumbersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersRequest): + The initial request object. + response (google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = phone_number.ListPhoneNumbersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[phone_number.ListPhoneNumbersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[phone_number.PhoneNumber]: + for page in self.pages: + yield from page.phone_numbers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPhoneNumbersAsyncPager: + """A pager for iterating through ``list_phone_numbers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``phone_numbers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPhoneNumbers`` requests and continue to iterate + through the ``phone_numbers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[phone_number.ListPhoneNumbersResponse]], + request: phone_number.ListPhoneNumbersRequest, + response: phone_number.ListPhoneNumbersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersRequest): + The initial request object. + response (google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = phone_number.ListPhoneNumbersRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[phone_number.ListPhoneNumbersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[phone_number.PhoneNumber]: + async def async_generator(): + async for page in self.pages: + for response in page.phone_numbers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/README.rst b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/README.rst new file mode 100644 index 000000000000..73e2d97fed8b --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`PhoneNumbersTransport` is the ABC for all transports. +- public child `PhoneNumbersGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `PhoneNumbersGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BasePhoneNumbersRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `PhoneNumbersRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/__init__.py new file mode 100644 index 000000000000..eb14c87ef699 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PhoneNumbersTransport +from .grpc import PhoneNumbersGrpcTransport +from .grpc_asyncio import PhoneNumbersGrpcAsyncIOTransport +from .rest import PhoneNumbersRestInterceptor, PhoneNumbersRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PhoneNumbersTransport]] +_transport_registry["grpc"] = PhoneNumbersGrpcTransport +_transport_registry["grpc_asyncio"] = PhoneNumbersGrpcAsyncIOTransport +_transport_registry["rest"] = PhoneNumbersRestTransport + +__all__ = ( + "PhoneNumbersTransport", + "PhoneNumbersGrpcTransport", + "PhoneNumbersGrpcAsyncIOTransport", + "PhoneNumbersRestTransport", + "PhoneNumbersRestInterceptor", +) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/base.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/base.py new file mode 100644 index 000000000000..c13bb2c276df --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/base.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dialogflow_v2beta1 import gapic_version as package_version +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class PhoneNumbersTransport(abc.ABC): + """Abstract transport class for PhoneNumbers.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ) + + DEFAULT_HOST: str = "dialogflow.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dialogflow.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_phone_numbers: gapic_v1.method.wrap_method( + self.list_phone_numbers, + default_timeout=None, + client_info=client_info, + ), + self.update_phone_number: gapic_v1.method.wrap_method( + self.update_phone_number, + default_timeout=None, + client_info=client_info, + ), + self.delete_phone_number: gapic_v1.method.wrap_method( + self.delete_phone_number, + default_timeout=None, + client_info=client_info, + ), + self.undelete_phone_number: gapic_v1.method.wrap_method( + self.undelete_phone_number, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_phone_numbers( + self, + ) -> Callable[ + [phone_number.ListPhoneNumbersRequest], + Union[ + phone_number.ListPhoneNumbersResponse, + Awaitable[phone_number.ListPhoneNumbersResponse], + ], + ]: + raise NotImplementedError() + + @property + def update_phone_number( + self, + ) -> Callable[ + [gcd_phone_number.UpdatePhoneNumberRequest], + Union[gcd_phone_number.PhoneNumber, Awaitable[gcd_phone_number.PhoneNumber]], + ]: + raise NotImplementedError() + + @property + def delete_phone_number( + self, + ) -> Callable[ + [phone_number.DeletePhoneNumberRequest], + Union[phone_number.PhoneNumber, Awaitable[phone_number.PhoneNumber]], + ]: + raise NotImplementedError() + + @property + def undelete_phone_number( + self, + ) -> Callable[ + [phone_number.UndeletePhoneNumberRequest], + Union[phone_number.PhoneNumber, Awaitable[phone_number.PhoneNumber]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PhoneNumbersTransport",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/grpc.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/grpc.py new file mode 100644 index 000000000000..a523137e2272 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/grpc.py @@ -0,0 +1,542 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +from .base import DEFAULT_CLIENT_INFO, PhoneNumbersTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PhoneNumbersGrpcTransport(PhoneNumbersTransport): + """gRPC backend transport for PhoneNumbers. + + Service for managing + [PhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumber]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dialogflow.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_phone_numbers( + self, + ) -> Callable[ + [phone_number.ListPhoneNumbersRequest], phone_number.ListPhoneNumbersResponse + ]: + r"""Return a callable for the list phone numbers method over gRPC. + + Returns the list of all phone numbers in the + specified project. + + Returns: + Callable[[~.ListPhoneNumbersRequest], + ~.ListPhoneNumbersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_phone_numbers" not in self._stubs: + self._stubs["list_phone_numbers"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/ListPhoneNumbers", + request_serializer=phone_number.ListPhoneNumbersRequest.serialize, + response_deserializer=phone_number.ListPhoneNumbersResponse.deserialize, + ) + return self._stubs["list_phone_numbers"] + + @property + def update_phone_number( + self, + ) -> Callable[ + [gcd_phone_number.UpdatePhoneNumberRequest], gcd_phone_number.PhoneNumber + ]: + r"""Return a callable for the update phone number method over gRPC. + + Updates the specified ``PhoneNumber``. + + Returns: + Callable[[~.UpdatePhoneNumberRequest], + ~.PhoneNumber]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_phone_number" not in self._stubs: + self._stubs["update_phone_number"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/UpdatePhoneNumber", + request_serializer=gcd_phone_number.UpdatePhoneNumberRequest.serialize, + response_deserializer=gcd_phone_number.PhoneNumber.deserialize, + ) + return self._stubs["update_phone_number"] + + @property + def delete_phone_number( + self, + ) -> Callable[[phone_number.DeletePhoneNumberRequest], phone_number.PhoneNumber]: + r"""Return a callable for the delete phone number method over gRPC. + + Requests deletion of a ``PhoneNumber``. The ``PhoneNumber`` is + moved into the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state immediately, and is deleted approximately 30 days later. + This method may only be called on a ``PhoneNumber`` in the + [ACTIVE][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.ACTIVE] + state. + + Returns: + Callable[[~.DeletePhoneNumberRequest], + ~.PhoneNumber]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_phone_number" not in self._stubs: + self._stubs["delete_phone_number"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/DeletePhoneNumber", + request_serializer=phone_number.DeletePhoneNumberRequest.serialize, + response_deserializer=phone_number.PhoneNumber.deserialize, + ) + return self._stubs["delete_phone_number"] + + @property + def undelete_phone_number( + self, + ) -> Callable[[phone_number.UndeletePhoneNumberRequest], phone_number.PhoneNumber]: + r"""Return a callable for the undelete phone number method over gRPC. + + Cancels the deletion request for a ``PhoneNumber``. This method + may only be called on a ``PhoneNumber`` in the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state. + + Returns: + Callable[[~.UndeletePhoneNumberRequest], + ~.PhoneNumber]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "undelete_phone_number" not in self._stubs: + self._stubs["undelete_phone_number"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/UndeletePhoneNumber", + request_serializer=phone_number.UndeletePhoneNumberRequest.serialize, + response_deserializer=phone_number.PhoneNumber.deserialize, + ) + return self._stubs["undelete_phone_number"] + + def close(self): + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PhoneNumbersGrpcTransport",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/grpc_asyncio.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/grpc_asyncio.py new file mode 100644 index 000000000000..813f7f0e4ed0 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/grpc_asyncio.py @@ -0,0 +1,610 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +from .base import DEFAULT_CLIENT_INFO, PhoneNumbersTransport +from .grpc import PhoneNumbersGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PhoneNumbersGrpcAsyncIOTransport(PhoneNumbersTransport): + """gRPC AsyncIO backend transport for PhoneNumbers. + + Service for managing + [PhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumber]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dialogflow.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_phone_numbers( + self, + ) -> Callable[ + [phone_number.ListPhoneNumbersRequest], + Awaitable[phone_number.ListPhoneNumbersResponse], + ]: + r"""Return a callable for the list phone numbers method over gRPC. + + Returns the list of all phone numbers in the + specified project. + + Returns: + Callable[[~.ListPhoneNumbersRequest], + Awaitable[~.ListPhoneNumbersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_phone_numbers" not in self._stubs: + self._stubs["list_phone_numbers"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/ListPhoneNumbers", + request_serializer=phone_number.ListPhoneNumbersRequest.serialize, + response_deserializer=phone_number.ListPhoneNumbersResponse.deserialize, + ) + return self._stubs["list_phone_numbers"] + + @property + def update_phone_number( + self, + ) -> Callable[ + [gcd_phone_number.UpdatePhoneNumberRequest], + Awaitable[gcd_phone_number.PhoneNumber], + ]: + r"""Return a callable for the update phone number method over gRPC. + + Updates the specified ``PhoneNumber``. + + Returns: + Callable[[~.UpdatePhoneNumberRequest], + Awaitable[~.PhoneNumber]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_phone_number" not in self._stubs: + self._stubs["update_phone_number"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/UpdatePhoneNumber", + request_serializer=gcd_phone_number.UpdatePhoneNumberRequest.serialize, + response_deserializer=gcd_phone_number.PhoneNumber.deserialize, + ) + return self._stubs["update_phone_number"] + + @property + def delete_phone_number( + self, + ) -> Callable[ + [phone_number.DeletePhoneNumberRequest], Awaitable[phone_number.PhoneNumber] + ]: + r"""Return a callable for the delete phone number method over gRPC. + + Requests deletion of a ``PhoneNumber``. The ``PhoneNumber`` is + moved into the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state immediately, and is deleted approximately 30 days later. + This method may only be called on a ``PhoneNumber`` in the + [ACTIVE][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.ACTIVE] + state. + + Returns: + Callable[[~.DeletePhoneNumberRequest], + Awaitable[~.PhoneNumber]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_phone_number" not in self._stubs: + self._stubs["delete_phone_number"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/DeletePhoneNumber", + request_serializer=phone_number.DeletePhoneNumberRequest.serialize, + response_deserializer=phone_number.PhoneNumber.deserialize, + ) + return self._stubs["delete_phone_number"] + + @property + def undelete_phone_number( + self, + ) -> Callable[ + [phone_number.UndeletePhoneNumberRequest], Awaitable[phone_number.PhoneNumber] + ]: + r"""Return a callable for the undelete phone number method over gRPC. + + Cancels the deletion request for a ``PhoneNumber``. This method + may only be called on a ``PhoneNumber`` in the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state. + + Returns: + Callable[[~.UndeletePhoneNumberRequest], + Awaitable[~.PhoneNumber]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "undelete_phone_number" not in self._stubs: + self._stubs["undelete_phone_number"] = self._logged_channel.unary_unary( + "/google.cloud.dialogflow.v2beta1.PhoneNumbers/UndeletePhoneNumber", + request_serializer=phone_number.UndeletePhoneNumberRequest.serialize, + response_deserializer=phone_number.PhoneNumber.deserialize, + ) + return self._stubs["undelete_phone_number"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_phone_numbers: self._wrap_method( + self.list_phone_numbers, + default_timeout=None, + client_info=client_info, + ), + self.update_phone_number: self._wrap_method( + self.update_phone_number, + default_timeout=None, + client_info=client_info, + ), + self.delete_phone_number: self._wrap_method( + self.delete_phone_number, + default_timeout=None, + client_info=client_info, + ), + self.undelete_phone_number: self._wrap_method( + self.undelete_phone_number, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("PhoneNumbersGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/rest.py new file mode 100644 index 000000000000..9ed284b0f327 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/rest.py @@ -0,0 +1,1858 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BasePhoneNumbersRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class PhoneNumbersRestInterceptor: + """Interceptor for PhoneNumbers. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PhoneNumbersRestTransport. + + .. code-block:: python + class MyCustomPhoneNumbersInterceptor(PhoneNumbersRestInterceptor): + def pre_delete_phone_number(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_phone_number(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_phone_numbers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_phone_numbers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_undelete_phone_number(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_undelete_phone_number(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_phone_number(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_phone_number(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PhoneNumbersRestTransport(interceptor=MyCustomPhoneNumbersInterceptor()) + client = PhoneNumbersClient(transport=transport) + + + """ + + def pre_delete_phone_number( + self, + request: phone_number.DeletePhoneNumberRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + phone_number.DeletePhoneNumberRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_phone_number + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_delete_phone_number( + self, response: phone_number.PhoneNumber + ) -> phone_number.PhoneNumber: + """Post-rpc interceptor for delete_phone_number + + DEPRECATED. Please use the `post_delete_phone_number_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. This `post_delete_phone_number` interceptor runs + before the `post_delete_phone_number_with_metadata` interceptor. + """ + return response + + def post_delete_phone_number_with_metadata( + self, + response: phone_number.PhoneNumber, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[phone_number.PhoneNumber, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_phone_number + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PhoneNumbers server but before it is returned to user code. + + We recommend only using this `post_delete_phone_number_with_metadata` + interceptor in new development instead of the `post_delete_phone_number` interceptor. + When both interceptors are used, this `post_delete_phone_number_with_metadata` interceptor runs after the + `post_delete_phone_number` interceptor. The (possibly modified) response returned by + `post_delete_phone_number` will be passed to + `post_delete_phone_number_with_metadata`. + """ + return response, metadata + + def pre_list_phone_numbers( + self, + request: phone_number.ListPhoneNumbersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + phone_number.ListPhoneNumbersRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_phone_numbers + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_list_phone_numbers( + self, response: phone_number.ListPhoneNumbersResponse + ) -> phone_number.ListPhoneNumbersResponse: + """Post-rpc interceptor for list_phone_numbers + + DEPRECATED. Please use the `post_list_phone_numbers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. This `post_list_phone_numbers` interceptor runs + before the `post_list_phone_numbers_with_metadata` interceptor. + """ + return response + + def post_list_phone_numbers_with_metadata( + self, + response: phone_number.ListPhoneNumbersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + phone_number.ListPhoneNumbersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_phone_numbers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PhoneNumbers server but before it is returned to user code. + + We recommend only using this `post_list_phone_numbers_with_metadata` + interceptor in new development instead of the `post_list_phone_numbers` interceptor. + When both interceptors are used, this `post_list_phone_numbers_with_metadata` interceptor runs after the + `post_list_phone_numbers` interceptor. The (possibly modified) response returned by + `post_list_phone_numbers` will be passed to + `post_list_phone_numbers_with_metadata`. + """ + return response, metadata + + def pre_undelete_phone_number( + self, + request: phone_number.UndeletePhoneNumberRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + phone_number.UndeletePhoneNumberRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for undelete_phone_number + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_undelete_phone_number( + self, response: phone_number.PhoneNumber + ) -> phone_number.PhoneNumber: + """Post-rpc interceptor for undelete_phone_number + + DEPRECATED. Please use the `post_undelete_phone_number_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. This `post_undelete_phone_number` interceptor runs + before the `post_undelete_phone_number_with_metadata` interceptor. + """ + return response + + def post_undelete_phone_number_with_metadata( + self, + response: phone_number.PhoneNumber, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[phone_number.PhoneNumber, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undelete_phone_number + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PhoneNumbers server but before it is returned to user code. + + We recommend only using this `post_undelete_phone_number_with_metadata` + interceptor in new development instead of the `post_undelete_phone_number` interceptor. + When both interceptors are used, this `post_undelete_phone_number_with_metadata` interceptor runs after the + `post_undelete_phone_number` interceptor. The (possibly modified) response returned by + `post_undelete_phone_number` will be passed to + `post_undelete_phone_number_with_metadata`. + """ + return response, metadata + + def pre_update_phone_number( + self, + request: gcd_phone_number.UpdatePhoneNumberRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_phone_number.UpdatePhoneNumberRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_phone_number + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_update_phone_number( + self, response: gcd_phone_number.PhoneNumber + ) -> gcd_phone_number.PhoneNumber: + """Post-rpc interceptor for update_phone_number + + DEPRECATED. Please use the `post_update_phone_number_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. This `post_update_phone_number` interceptor runs + before the `post_update_phone_number_with_metadata` interceptor. + """ + return response + + def post_update_phone_number_with_metadata( + self, + response: gcd_phone_number.PhoneNumber, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_phone_number.PhoneNumber, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_phone_number + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the PhoneNumbers server but before it is returned to user code. + + We recommend only using this `post_update_phone_number_with_metadata` + interceptor in new development instead of the `post_update_phone_number` interceptor. + When both interceptors are used, this `post_update_phone_number_with_metadata` interceptor runs after the + `post_update_phone_number` interceptor. The (possibly modified) response returned by + `post_update_phone_number` will be passed to + `post_update_phone_number_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the PhoneNumbers server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the PhoneNumbers server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PhoneNumbersRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PhoneNumbersRestInterceptor + + +class PhoneNumbersRestTransport(_BasePhoneNumbersRestTransport): + """REST backend synchronous transport for PhoneNumbers. + + Service for managing + [PhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumber]. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PhoneNumbersRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dialogflow.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PhoneNumbersRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeletePhoneNumber( + _BasePhoneNumbersRestTransport._BaseDeletePhoneNumber, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.DeletePhoneNumber") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: phone_number.DeletePhoneNumberRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.PhoneNumber: + r"""Call the delete phone number method over HTTP. + + Args: + request (~.phone_number.DeletePhoneNumberRequest): + The request object. The request message for + [PhoneNumbers.DeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.DeletePhoneNumber]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.phone_number.PhoneNumber: + Represents a phone number. ``PhoneNumber`` resources + enable phone calls to be answered by Dialogflow services + and are added to a project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseDeletePhoneNumber._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_phone_number( + request, metadata + ) + transcoded_request = _BasePhoneNumbersRestTransport._BaseDeletePhoneNumber._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseDeletePhoneNumber._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.DeletePhoneNumber", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "DeletePhoneNumber", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._DeletePhoneNumber._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = phone_number.PhoneNumber() + pb_resp = phone_number.PhoneNumber.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_phone_number(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_phone_number_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = phone_number.PhoneNumber.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.delete_phone_number", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "DeletePhoneNumber", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListPhoneNumbers( + _BasePhoneNumbersRestTransport._BaseListPhoneNumbers, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.ListPhoneNumbers") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: phone_number.ListPhoneNumbersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.ListPhoneNumbersResponse: + r"""Call the list phone numbers method over HTTP. + + Args: + request (~.phone_number.ListPhoneNumbersRequest): + The request object. The request message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.phone_number.ListPhoneNumbersResponse: + The response message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseListPhoneNumbers._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_phone_numbers( + request, metadata + ) + transcoded_request = _BasePhoneNumbersRestTransport._BaseListPhoneNumbers._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseListPhoneNumbers._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.ListPhoneNumbers", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "ListPhoneNumbers", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._ListPhoneNumbers._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = phone_number.ListPhoneNumbersResponse() + pb_resp = phone_number.ListPhoneNumbersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_phone_numbers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_phone_numbers_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = phone_number.ListPhoneNumbersResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.list_phone_numbers", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "ListPhoneNumbers", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UndeletePhoneNumber( + _BasePhoneNumbersRestTransport._BaseUndeletePhoneNumber, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.UndeletePhoneNumber") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: phone_number.UndeletePhoneNumberRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> phone_number.PhoneNumber: + r"""Call the undelete phone number method over HTTP. + + Args: + request (~.phone_number.UndeletePhoneNumberRequest): + The request object. The request message for + [PhoneNumbers.UndeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UndeletePhoneNumber]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.phone_number.PhoneNumber: + Represents a phone number. ``PhoneNumber`` resources + enable phone calls to be answered by Dialogflow services + and are added to a project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseUndeletePhoneNumber._get_http_options() + ) + + request, metadata = self._interceptor.pre_undelete_phone_number( + request, metadata + ) + transcoded_request = _BasePhoneNumbersRestTransport._BaseUndeletePhoneNumber._get_transcoded_request( + http_options, request + ) + + body = _BasePhoneNumbersRestTransport._BaseUndeletePhoneNumber._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseUndeletePhoneNumber._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.UndeletePhoneNumber", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "UndeletePhoneNumber", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._UndeletePhoneNumber._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = phone_number.PhoneNumber() + pb_resp = phone_number.PhoneNumber.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_undelete_phone_number(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undelete_phone_number_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = phone_number.PhoneNumber.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.undelete_phone_number", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "UndeletePhoneNumber", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdatePhoneNumber( + _BasePhoneNumbersRestTransport._BaseUpdatePhoneNumber, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.UpdatePhoneNumber") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcd_phone_number.UpdatePhoneNumberRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcd_phone_number.PhoneNumber: + r"""Call the update phone number method over HTTP. + + Args: + request (~.gcd_phone_number.UpdatePhoneNumberRequest): + The request object. The request message for + [PhoneNumbers.UpdatePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcd_phone_number.PhoneNumber: + Represents a phone number. ``PhoneNumber`` resources + enable phone calls to be answered by Dialogflow services + and are added to a project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseUpdatePhoneNumber._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_phone_number( + request, metadata + ) + transcoded_request = _BasePhoneNumbersRestTransport._BaseUpdatePhoneNumber._get_transcoded_request( + http_options, request + ) + + body = _BasePhoneNumbersRestTransport._BaseUpdatePhoneNumber._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseUpdatePhoneNumber._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.UpdatePhoneNumber", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "UpdatePhoneNumber", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._UpdatePhoneNumber._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcd_phone_number.PhoneNumber() + pb_resp = gcd_phone_number.PhoneNumber.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_phone_number(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_phone_number_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcd_phone_number.PhoneNumber.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.update_phone_number", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "UpdatePhoneNumber", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def delete_phone_number( + self, + ) -> Callable[[phone_number.DeletePhoneNumberRequest], phone_number.PhoneNumber]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeletePhoneNumber(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_phone_numbers( + self, + ) -> Callable[ + [phone_number.ListPhoneNumbersRequest], phone_number.ListPhoneNumbersResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListPhoneNumbers(self._session, self._host, self._interceptor) # type: ignore + + @property + def undelete_phone_number( + self, + ) -> Callable[[phone_number.UndeletePhoneNumberRequest], phone_number.PhoneNumber]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UndeletePhoneNumber(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_phone_number( + self, + ) -> Callable[ + [gcd_phone_number.UpdatePhoneNumberRequest], gcd_phone_number.PhoneNumber + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdatePhoneNumber(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BasePhoneNumbersRestTransport._BaseGetLocation, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = ( + _BasePhoneNumbersRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BasePhoneNumbersRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.GetLocation", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BasePhoneNumbersRestTransport._BaseListLocations, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BasePhoneNumbersRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.ListLocations", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BasePhoneNumbersRestTransport._BaseCancelOperation, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BasePhoneNumbersRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.CancelOperation", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BasePhoneNumbersRestTransport._BaseGetOperation, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BasePhoneNumbersRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BasePhoneNumbersRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.GetOperation", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BasePhoneNumbersRestTransport._BaseListOperations, PhoneNumbersRestStub + ): + def __hash__(self): + return hash("PhoneNumbersRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BasePhoneNumbersRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BasePhoneNumbersRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePhoneNumbersRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dialogflow_v2beta1.PhoneNumbersClient.ListOperations", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PhoneNumbersRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PhoneNumbersRestTransport",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/rest_base.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/rest_base.py new file mode 100644 index 000000000000..f3a88f441234 --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/phone_numbers/transports/rest_base.py @@ -0,0 +1,457 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +from .base import DEFAULT_CLIENT_INFO, PhoneNumbersTransport + + +class _BasePhoneNumbersRestTransport(PhoneNumbersTransport): + """Base REST backend transport for PhoneNumbers. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "dialogflow.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dialogflow.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDeletePhoneNumber: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v2beta1/{name=projects/*/phoneNumbers/*}", + }, + { + "method": "delete", + "uri": "/v2beta1/{name=projects/*/locations/*/phoneNumbers/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = phone_number.DeletePhoneNumberRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePhoneNumbersRestTransport._BaseDeletePhoneNumber._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListPhoneNumbers: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta1/{parent=projects/*}/phoneNumbers", + }, + { + "method": "get", + "uri": "/v2beta1/{parent=projects/*/locations/*}/phoneNumbers", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = phone_number.ListPhoneNumbersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePhoneNumbersRestTransport._BaseListPhoneNumbers._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUndeletePhoneNumber: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta1/{name=projects/*/phoneNumbers/*}:undelete", + "body": "*", + }, + { + "method": "post", + "uri": "/v2beta1/{name=projects/*/locations/*/phoneNumbers/*}:undelete", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = phone_number.UndeletePhoneNumberRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePhoneNumbersRestTransport._BaseUndeletePhoneNumber._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdatePhoneNumber: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v2beta1/{phone_number.name=projects/*/phoneNumbers/*}", + "body": "phone_number", + }, + { + "method": "patch", + "uri": "/v2beta1/{phone_number.name=projects/*/locations/*/phoneNumbers/*}", + "body": "phone_number", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcd_phone_number.UpdatePhoneNumberRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePhoneNumbersRestTransport._BaseUpdatePhoneNumber._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2beta1/{name=projects/*/operations/*}:cancel", + }, + { + "method": "post", + "uri": "/v2beta1/{name=projects/*/locations/*/operations/*}:cancel", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta1/{name=projects/*/operations/*}", + }, + { + "method": "get", + "uri": "/v2beta1/{name=projects/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v2beta1/{name=projects/*}/operations", + }, + { + "method": "get", + "uri": "/v2beta1/{name=projects/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BasePhoneNumbersRestTransport",) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py index ff6e0f3b08ee..efaef6a2d892 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -494,6 +496,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1410,16 +1439,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1465,16 +1498,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1575,16 +1612,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1630,16 +1671,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/transports/rest.py index f5a1e75a7c76..51f2452049cf 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/session_entity_types/transports/rest.py @@ -136,12 +136,38 @@ def post_create_session_entity_type( ) -> gcd_session_entity_type.SessionEntityType: """Post-rpc interceptor for create_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_create_session_entity_type` interceptor runs + before the `post_create_session_entity_type_with_metadata` interceptor. """ return response + def post_create_session_entity_type_with_metadata( + self, + response: gcd_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_create_session_entity_type_with_metadata` + interceptor in new development instead of the `post_create_session_entity_type` interceptor. + When both interceptors are used, this `post_create_session_entity_type_with_metadata` interceptor runs after the + `post_create_session_entity_type` interceptor. The (possibly modified) response returned by + `post_create_session_entity_type` will be passed to + `post_create_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_delete_session_entity_type( self, request: session_entity_type.DeleteSessionEntityTypeRequest, @@ -177,12 +203,37 @@ def post_get_session_entity_type( ) -> session_entity_type.SessionEntityType: """Post-rpc interceptor for get_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_get_session_entity_type` interceptor runs + before the `post_get_session_entity_type_with_metadata` interceptor. """ return response + def post_get_session_entity_type_with_metadata( + self, + response: session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.SessionEntityType, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_get_session_entity_type_with_metadata` + interceptor in new development instead of the `post_get_session_entity_type` interceptor. + When both interceptors are used, this `post_get_session_entity_type_with_metadata` interceptor runs after the + `post_get_session_entity_type` interceptor. The (possibly modified) response returned by + `post_get_session_entity_type` will be passed to + `post_get_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_list_session_entity_types( self, request: session_entity_type.ListSessionEntityTypesRequest, @@ -203,12 +254,38 @@ def post_list_session_entity_types( ) -> session_entity_type.ListSessionEntityTypesResponse: """Post-rpc interceptor for list_session_entity_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_session_entity_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_list_session_entity_types` interceptor runs + before the `post_list_session_entity_types_with_metadata` interceptor. """ return response + def post_list_session_entity_types_with_metadata( + self, + response: session_entity_type.ListSessionEntityTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + session_entity_type.ListSessionEntityTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_session_entity_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_list_session_entity_types_with_metadata` + interceptor in new development instead of the `post_list_session_entity_types` interceptor. + When both interceptors are used, this `post_list_session_entity_types_with_metadata` interceptor runs after the + `post_list_session_entity_types` interceptor. The (possibly modified) response returned by + `post_list_session_entity_types` will be passed to + `post_list_session_entity_types_with_metadata`. + """ + return response, metadata + def pre_update_session_entity_type( self, request: gcd_session_entity_type.UpdateSessionEntityTypeRequest, @@ -229,12 +306,38 @@ def post_update_session_entity_type( ) -> gcd_session_entity_type.SessionEntityType: """Post-rpc interceptor for update_session_entity_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_entity_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SessionEntityTypes server but before - it is returned to user code. + it is returned to user code. This `post_update_session_entity_type` interceptor runs + before the `post_update_session_entity_type_with_metadata` interceptor. """ return response + def post_update_session_entity_type_with_metadata( + self, + response: gcd_session_entity_type.SessionEntityType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_session_entity_type.SessionEntityType, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_session_entity_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SessionEntityTypes server but before it is returned to user code. + + We recommend only using this `post_update_session_entity_type_with_metadata` + interceptor in new development instead of the `post_update_session_entity_type` interceptor. + When both interceptors are used, this `post_update_session_entity_type_with_metadata` interceptor runs after the + `post_update_session_entity_type` interceptor. The (possibly modified) response returned by + `post_update_session_entity_type` will be passed to + `post_update_session_entity_type_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -586,6 +689,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -857,6 +964,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1007,6 +1118,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_session_entity_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_session_entity_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1174,6 +1289,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session_entity_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_entity_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py index 2958f1cd9a2f..a900e71999a8 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -595,6 +597,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1171,16 +1200,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1226,16 +1259,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1336,16 +1373,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1391,16 +1432,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/transports/rest.py index 45054242c57a..605210897e81 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sessions/transports/rest.py @@ -104,12 +104,37 @@ def post_detect_intent( ) -> gcd_session.DetectIntentResponse: """Post-rpc interceptor for detect_intent - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detect_intent_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Sessions server but before - it is returned to user code. + it is returned to user code. This `post_detect_intent` interceptor runs + before the `post_detect_intent_with_metadata` interceptor. """ return response + def post_detect_intent_with_metadata( + self, + response: gcd_session.DetectIntentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_session.DetectIntentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for detect_intent + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Sessions server but before it is returned to user code. + + We recommend only using this `post_detect_intent_with_metadata` + interceptor in new development instead of the `post_detect_intent` interceptor. + When both interceptors are used, this `post_detect_intent_with_metadata` interceptor runs after the + `post_detect_intent` interceptor. The (possibly modified) response returned by + `post_detect_intent` will be passed to + `post_detect_intent_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -450,6 +475,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_detect_intent(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detect_intent_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/client.py index dd081205f989..a5160c06f125 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -489,6 +491,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1301,16 +1330,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1356,16 +1389,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1466,16 +1503,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1521,16 +1562,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/transports/rest.py index c140a7e67b7f..6142706f1ffa 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/sip_trunks/transports/rest.py @@ -133,12 +133,35 @@ def post_create_sip_trunk( ) -> gcd_sip_trunk.SipTrunk: """Post-rpc interceptor for create_sip_trunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sip_trunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SipTrunks server but before - it is returned to user code. + it is returned to user code. This `post_create_sip_trunk` interceptor runs + before the `post_create_sip_trunk_with_metadata` interceptor. """ return response + def post_create_sip_trunk_with_metadata( + self, + response: gcd_sip_trunk.SipTrunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_sip_trunk.SipTrunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_sip_trunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SipTrunks server but before it is returned to user code. + + We recommend only using this `post_create_sip_trunk_with_metadata` + interceptor in new development instead of the `post_create_sip_trunk` interceptor. + When both interceptors are used, this `post_create_sip_trunk_with_metadata` interceptor runs after the + `post_create_sip_trunk` interceptor. The (possibly modified) response returned by + `post_create_sip_trunk` will be passed to + `post_create_sip_trunk_with_metadata`. + """ + return response, metadata + def pre_delete_sip_trunk( self, request: sip_trunk.DeleteSipTrunkRequest, @@ -168,12 +191,35 @@ def pre_get_sip_trunk( def post_get_sip_trunk(self, response: sip_trunk.SipTrunk) -> sip_trunk.SipTrunk: """Post-rpc interceptor for get_sip_trunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sip_trunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SipTrunks server but before - it is returned to user code. + it is returned to user code. This `post_get_sip_trunk` interceptor runs + before the `post_get_sip_trunk_with_metadata` interceptor. """ return response + def post_get_sip_trunk_with_metadata( + self, + response: sip_trunk.SipTrunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sip_trunk.SipTrunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_sip_trunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SipTrunks server but before it is returned to user code. + + We recommend only using this `post_get_sip_trunk_with_metadata` + interceptor in new development instead of the `post_get_sip_trunk` interceptor. + When both interceptors are used, this `post_get_sip_trunk_with_metadata` interceptor runs after the + `post_get_sip_trunk` interceptor. The (possibly modified) response returned by + `post_get_sip_trunk` will be passed to + `post_get_sip_trunk_with_metadata`. + """ + return response, metadata + def pre_list_sip_trunks( self, request: sip_trunk.ListSipTrunksRequest, @@ -191,12 +237,37 @@ def post_list_sip_trunks( ) -> sip_trunk.ListSipTrunksResponse: """Post-rpc interceptor for list_sip_trunks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sip_trunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SipTrunks server but before - it is returned to user code. + it is returned to user code. This `post_list_sip_trunks` interceptor runs + before the `post_list_sip_trunks_with_metadata` interceptor. """ return response + def post_list_sip_trunks_with_metadata( + self, + response: sip_trunk.ListSipTrunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sip_trunk.ListSipTrunksResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_sip_trunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SipTrunks server but before it is returned to user code. + + We recommend only using this `post_list_sip_trunks_with_metadata` + interceptor in new development instead of the `post_list_sip_trunks` interceptor. + When both interceptors are used, this `post_list_sip_trunks_with_metadata` interceptor runs after the + `post_list_sip_trunks` interceptor. The (possibly modified) response returned by + `post_list_sip_trunks` will be passed to + `post_list_sip_trunks_with_metadata`. + """ + return response, metadata + def pre_update_sip_trunk( self, request: gcd_sip_trunk.UpdateSipTrunkRequest, @@ -216,12 +287,35 @@ def post_update_sip_trunk( ) -> gcd_sip_trunk.SipTrunk: """Post-rpc interceptor for update_sip_trunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_sip_trunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SipTrunks server but before - it is returned to user code. + it is returned to user code. This `post_update_sip_trunk` interceptor runs + before the `post_update_sip_trunk_with_metadata` interceptor. """ return response + def post_update_sip_trunk_with_metadata( + self, + response: gcd_sip_trunk.SipTrunk, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_sip_trunk.SipTrunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_sip_trunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SipTrunks server but before it is returned to user code. + + We recommend only using this `post_update_sip_trunk_with_metadata` + interceptor in new development instead of the `post_update_sip_trunk` interceptor. + When both interceptors are used, this `post_update_sip_trunk_with_metadata` interceptor runs after the + `post_update_sip_trunk` interceptor. The (possibly modified) response returned by + `post_update_sip_trunk` will be passed to + `post_update_sip_trunk_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -569,6 +663,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_sip_trunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sip_trunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -829,6 +927,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_sip_trunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sip_trunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -976,6 +1078,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sip_trunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sip_trunks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1135,6 +1241,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_sip_trunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_sip_trunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py index 34c71f34b62a..1361ccad80f1 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -487,6 +489,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1363,16 +1392,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1418,16 +1451,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1528,16 +1565,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1583,16 +1624,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/transports/rest.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/transports/rest.py index 3151e778e1ae..3200ca269ae6 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/transports/rest.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/services/versions/transports/rest.py @@ -131,12 +131,35 @@ def pre_create_version( def post_create_version(self, response: gcd_version.Version) -> gcd_version.Version: """Post-rpc interceptor for create_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_create_version` interceptor runs + before the `post_create_version_with_metadata` interceptor. """ return response + def post_create_version_with_metadata( + self, + response: gcd_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_create_version_with_metadata` + interceptor in new development instead of the `post_create_version` interceptor. + When both interceptors are used, this `post_create_version_with_metadata` interceptor runs after the + `post_create_version` interceptor. The (possibly modified) response returned by + `post_create_version` will be passed to + `post_create_version_with_metadata`. + """ + return response, metadata + def pre_delete_version( self, request: version.DeleteVersionRequest, @@ -164,12 +187,35 @@ def pre_get_version( def post_get_version(self, response: version.Version) -> version.Version: """Post-rpc interceptor for get_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_get_version` interceptor runs + before the `post_get_version_with_metadata` interceptor. """ return response + def post_get_version_with_metadata( + self, + response: version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_get_version_with_metadata` + interceptor in new development instead of the `post_get_version` interceptor. + When both interceptors are used, this `post_get_version_with_metadata` interceptor runs after the + `post_get_version` interceptor. The (possibly modified) response returned by + `post_get_version` will be passed to + `post_get_version_with_metadata`. + """ + return response, metadata + def pre_list_versions( self, request: version.ListVersionsRequest, @@ -187,12 +233,35 @@ def post_list_versions( ) -> version.ListVersionsResponse: """Post-rpc interceptor for list_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_list_versions` interceptor runs + before the `post_list_versions_with_metadata` interceptor. """ return response + def post_list_versions_with_metadata( + self, + response: version.ListVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[version.ListVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_list_versions_with_metadata` + interceptor in new development instead of the `post_list_versions` interceptor. + When both interceptors are used, this `post_list_versions_with_metadata` interceptor runs after the + `post_list_versions` interceptor. The (possibly modified) response returned by + `post_list_versions` will be passed to + `post_list_versions_with_metadata`. + """ + return response, metadata + def pre_update_version( self, request: gcd_version.UpdateVersionRequest, @@ -210,12 +279,35 @@ def pre_update_version( def post_update_version(self, response: gcd_version.Version) -> gcd_version.Version: """Post-rpc interceptor for update_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Versions server but before - it is returned to user code. + it is returned to user code. This `post_update_version` interceptor runs + before the `post_update_version_with_metadata` interceptor. """ return response + def post_update_version_with_metadata( + self, + response: gcd_version.Version, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_version.Version, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Versions server but before it is returned to user code. + + We recommend only using this `post_update_version_with_metadata` + interceptor in new development instead of the `post_update_version` interceptor. + When both interceptors are used, this `post_update_version_with_metadata` interceptor runs after the + `post_update_version` interceptor. The (possibly modified) response returned by + `post_update_version` will be passed to + `post_update_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -574,6 +666,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -847,6 +943,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -992,6 +1092,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1162,6 +1266,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py index 5a69784cfe44..0cdffef2eb58 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/__init__.py @@ -247,6 +247,14 @@ SuggestSmartRepliesResponse, UpdateParticipantRequest, ) +from .phone_number import ( + DeletePhoneNumberRequest, + ListPhoneNumbersRequest, + ListPhoneNumbersResponse, + PhoneNumber, + UndeletePhoneNumberRequest, + UpdatePhoneNumberRequest, +) from .session import ( CloudConversationDebuggingInfo, DetectIntentRequest, @@ -506,6 +514,12 @@ "SuggestSmartRepliesRequest", "SuggestSmartRepliesResponse", "UpdateParticipantRequest", + "DeletePhoneNumberRequest", + "ListPhoneNumbersRequest", + "ListPhoneNumbersResponse", + "PhoneNumber", + "UndeletePhoneNumberRequest", + "UpdatePhoneNumberRequest", "CloudConversationDebuggingInfo", "DetectIntentRequest", "DetectIntentResponse", diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/answer_record.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/answer_record.py index c8d06d8a2f74..34f376bbeb6b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/answer_record.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/answer_record.py @@ -466,7 +466,20 @@ class ListAnswerRecordsRequest(proto.Message): ``projects//locations/``. filter (str): Optional. Filters to restrict results to specific answer - records. + records. The expression has the following syntax: + + [AND ] ... + + The following fields and operators are supported: + + - conversation_id with equals(=) operator + + Examples: + + - "conversation_id=bar" matches answer records in the + projects/foo/locations/global/conversations/bar + conversation (assuming the parent is + projects/foo/locations/global). For more information about filtering, see `API Filtering `__. diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py index 904c06e46640..2192c76a415b 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation.py @@ -102,6 +102,9 @@ class Conversation(proto.Message): end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the conversation was finished. + telephony_connection_info (google.cloud.dialogflow_v2beta1.types.Conversation.TelephonyConnectionInfo): + Output only. The telephony connection + information. """ class LifecycleState(proto.Enum): @@ -121,10 +124,9 @@ class LifecycleState(proto.Enum): COMPLETED = 2 class ConversationStage(proto.Enum): - r"""Enumeration of the different conversation stages a - conversation can be in. Reference: - - https://cloud.google.com/dialogflow/priv/docs/contact-center/basics#stages + r"""Enumeration of the different conversation stages a conversation can + be in. Reference: + https://cloud.google.com/agent-assist/docs/basics#conversation_stages Values: CONVERSATION_STAGE_UNSPECIFIED (0): @@ -141,6 +143,88 @@ class ConversationStage(proto.Enum): VIRTUAL_AGENT_STAGE = 1 HUMAN_ASSIST_STAGE = 2 + class TelephonyConnectionInfo(proto.Message): + r"""The information about phone calls connected via phone gateway + to the conversation. + + Attributes: + dialed_number (str): + Output only. The number dialed to connect + this call in E.164 format. + sdp (str): + Optional. SDP of the call. It's initially the + SDP answer to the endpoint, but maybe later + updated for the purpose of making the link + active, etc. + sip_headers (MutableSequence[google.cloud.dialogflow_v2beta1.types.Conversation.TelephonyConnectionInfo.SipHeader]): + Output only. The SIP headers from the initial + SIP INVITE. + extra_mime_contents (MutableSequence[google.cloud.dialogflow_v2beta1.types.Conversation.TelephonyConnectionInfo.MimeContent]): + Output only. The mime content from the + initial SIP INVITE. + """ + + class SipHeader(proto.Message): + r"""The SIP headers from the initial SIP INVITE. + + Attributes: + name (str): + Optional. The name of the header. + value (str): + Optional. The value of the header. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class MimeContent(proto.Message): + r"""The mime content from the initial SIP INVITE. + + Attributes: + mime_type (str): + Optional. The mime type of the content. + content (bytes): + Optional. The content payload. + """ + + mime_type: str = proto.Field( + proto.STRING, + number=1, + ) + content: bytes = proto.Field( + proto.BYTES, + number=2, + ) + + dialed_number: str = proto.Field( + proto.STRING, + number=2, + ) + sdp: str = proto.Field( + proto.STRING, + number=5, + ) + sip_headers: MutableSequence[ + "Conversation.TelephonyConnectionInfo.SipHeader" + ] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="Conversation.TelephonyConnectionInfo.SipHeader", + ) + extra_mime_contents: MutableSequence[ + "Conversation.TelephonyConnectionInfo.MimeContent" + ] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="Conversation.TelephonyConnectionInfo.MimeContent", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -174,6 +258,11 @@ class ConversationStage(proto.Enum): number=6, message=timestamp_pb2.Timestamp, ) + telephony_connection_info: TelephonyConnectionInfo = proto.Field( + proto.MESSAGE, + number=10, + message=TelephonyConnectionInfo, + ) class ConversationPhoneNumber(proto.Message): @@ -181,11 +270,18 @@ class ConversationPhoneNumber(proto.Message): allows for connecting a particular conversation over telephony. Attributes: + country_code (int): + Output only. Desired country code for the + phone number. phone_number (str): Output only. The phone number to connect to this conversation. """ + country_code: int = proto.Field( + proto.INT32, + number=2, + ) phone_number: str = proto.Field( proto.STRING, number=3, @@ -1066,8 +1162,30 @@ class ControlPoint(proto.Message): defined through these control points can only be monotonically increasing or decreasing(constant values are acceptable). + Attributes: + attribute_value (str): + Optional. Can be one of: + + 1. The numerical field value. + 2. The duration spec for freshness: The value must be + formatted as an XSD ``dayTimeDuration`` value (a + restricted subset of an ISO 8601 duration value). The + pattern for this is: ``[nD][T[nH][nM][nS]]``. + boost_amount (float): + Optional. The value between -1 to 1 by which to boost the + score if the attribute_value evaluates to the value + specified above. """ + attribute_value: str = proto.Field( + proto.STRING, + number=1, + ) + boost_amount: float = proto.Field( + proto.FLOAT, + number=2, + ) + field_name: str = proto.Field( proto.STRING, number=1, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_event.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_event.py index 3f6d53efa77f..ad94f74d2e28 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_event.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_event.py @@ -20,7 +20,7 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.dialogflow_v2beta1.types import participant +from google.cloud.dialogflow_v2beta1.types import participant, session __protobuf__ = proto.module( package="google.cloud.dialogflow.v2beta1", @@ -34,6 +34,10 @@ class ConversationEvent(proto.Message): r"""Represents a notification sent to Pub/Sub subscribers for conversation lifecycle events. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -51,6 +55,10 @@ class ConversationEvent(proto.Message): new_message_payload (google.cloud.dialogflow_v2beta1.types.Message): Payload of NEW_MESSAGE event. + This field is a member of `oneof`_ ``payload``. + new_recognition_result_payload (google.cloud.dialogflow_v2beta1.types.StreamingRecognitionResult): + Payload of NEW_RECOGNITION_RESULT event. + This field is a member of `oneof`_ ``payload``. """ @@ -76,6 +84,11 @@ class Type(proto.Enum): An existing conversation has received a new message, either from API or telephony. It is configured in [ConversationProfile.new_message_event_notification_config][google.cloud.dialogflow.v2beta1.ConversationProfile.new_message_event_notification_config] + NEW_RECOGNITION_RESULT (7): + An existing conversation has received a new speech + recognition result. This is mainly for delivering + intermediate transcripts. The notification is configured in + [ConversationProfile.new_recognition_event_notification_config][]. UNRECOVERABLE_ERROR (4): Unrecoverable error during a telephone call. @@ -95,6 +108,7 @@ class Type(proto.Enum): CONVERSATION_FINISHED = 2 HUMAN_INTERVENTION_NEEDED = 3 NEW_MESSAGE = 5 + NEW_RECOGNITION_RESULT = 7 UNRECOVERABLE_ERROR = 4 conversation: str = proto.Field( @@ -117,6 +131,12 @@ class Type(proto.Enum): oneof="payload", message=participant.Message, ) + new_recognition_result_payload: session.StreamingRecognitionResult = proto.Field( + proto.MESSAGE, + number=5, + oneof="payload", + message=session.StreamingRecognitionResult, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py index 2833990358f9..af969b3c905f 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/conversation_profile.py @@ -85,6 +85,18 @@ class ConversationProfile(proto.Message): Configuration for publishing new message events. Event will be sent in format of [ConversationEvent][google.cloud.dialogflow.v2beta1.ConversationEvent] + new_recognition_result_notification_config (google.cloud.dialogflow_v2beta1.types.NotificationConfig): + Optional. Configuration for publishing transcription + intermediate results. Event will be sent in format of + [ConversationEvent][google.cloud.dialogflow.v2beta1.ConversationEvent]. + If configured, the following information will be populated + as + [ConversationEvent][google.cloud.dialogflow.v2beta1.ConversationEvent] + Pub/Sub message attributes: + + - "participant_id" + - "participant_role" + - "message_id". stt_config (google.cloud.dialogflow_v2beta1.types.SpeechToTextConfig): Settings for speech transcription. language_code (str): @@ -160,6 +172,11 @@ class ConversationProfile(proto.Message): number=8, message="NotificationConfig", ) + new_recognition_result_notification_config: "NotificationConfig" = proto.Field( + proto.MESSAGE, + number=21, + message="NotificationConfig", + ) stt_config: audio_config.SpeechToTextConfig = proto.Field( proto.MESSAGE, number=9, diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py index 8b906d0d1c40..887a38aafce0 100644 --- a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/participant.py @@ -838,7 +838,8 @@ class AnalyzeContentRequest(proto.Message): human agent's perspective. It is used for identifying the same message under one participant. - Given two messages under the same participant: + For BatchCreateMessages API only: Given two messages under + the same participant: - If send time are different regardless of whether the content of the messages are exactly the same, the @@ -1378,6 +1379,9 @@ class StreamingAnalyzeContentResponse(proto.Message): Debugging info that would get populated when ``StreamingAnalyzeContentRequest.enable_debugging_info`` is set to true. + speech_model (str): + The name of the actual Cloud speech model + used for speech recognition. """ recognition_result: session.StreamingRecognitionResult = proto.Field( @@ -1428,6 +1432,10 @@ class StreamingAnalyzeContentResponse(proto.Message): number=11, message=session.CloudConversationDebuggingInfo, ) + speech_model: str = proto.Field( + proto.STRING, + number=13, + ) class AnnotatedMessagePart(proto.Message): diff --git a/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py new file mode 100644 index 000000000000..2ea103e20ceb --- /dev/null +++ b/packages/google-cloud-dialogflow/google/cloud/dialogflow_v2beta1/types/phone_number.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dialogflow.v2beta1", + manifest={ + "PhoneNumber", + "DeletePhoneNumberRequest", + "UndeletePhoneNumberRequest", + "ListPhoneNumbersRequest", + "ListPhoneNumbersResponse", + "UpdatePhoneNumberRequest", + }, +) + + +class PhoneNumber(proto.Message): + r"""Represents a phone number. ``PhoneNumber`` resources enable phone + calls to be answered by Dialogflow services and are added to a + project through a + [PhoneNumberOrder][google.cloud.dialogflow.v2beta1.PhoneNumberOrder]. + + Attributes: + name (str): + Optional. The unique identifier of this phone number. + Required for + [PhoneNumbers.UpdatePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber] + method. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + phone_number (str): + Output only. Phone number in + `E.164 `__ format. An + example of a correctly formatted phone number: +15556767888. + conversation_profile (str): + Optional. The conversation profile calls to this + ``PhoneNumber`` should use. The project ID here should be + the same as the one in + [name][google.cloud.dialogflow.v2beta1.PhoneNumber.name]. + Format: + ``projects//conversationProfiles/``. + Format: + ``projects//locations//conversationProfiles/``. + lifecycle_state (google.cloud.dialogflow_v2beta1.types.PhoneNumber.LifecycleState): + Output only. The state of the ``PhoneNumber``. Defaults to + ``ACTIVE``. ``PhoneNumber`` objects set to + ``DELETE_REQUESTED`` always decline incoming calls and can + be removed completely within 30 days. + """ + + class LifecycleState(proto.Enum): + r"""The states that a ``PhoneNumber`` can be in. + + Values: + LIFECYCLE_STATE_UNSPECIFIED (0): + This value is never used. + ACTIVE (1): + Number is active and can receive phone calls. + DELETE_REQUESTED (2): + Number is pending deletion, and cannot + receive calls. + """ + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + phone_number: str = proto.Field( + proto.STRING, + number=2, + ) + conversation_profile: str = proto.Field( + proto.STRING, + number=3, + ) + lifecycle_state: LifecycleState = proto.Field( + proto.ENUM, + number=4, + enum=LifecycleState, + ) + + +class DeletePhoneNumberRequest(proto.Message): + r"""The request message for + [PhoneNumbers.DeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.DeletePhoneNumber]. + + Attributes: + name (str): + Required. The unique identifier of the ``PhoneNumber`` to + delete. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UndeletePhoneNumberRequest(proto.Message): + r"""The request message for + [PhoneNumbers.UndeletePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UndeletePhoneNumber]. + + Attributes: + name (str): + Required. The unique identifier of the ``PhoneNumber`` to + delete. Format: + ``projects//phoneNumbers/``. + Format: + ``projects//locations//phoneNumbers/``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListPhoneNumbersRequest(proto.Message): + r"""The request message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + + Attributes: + parent (str): + Required. The project to list all ``PhoneNumber`` resources + from. Format: ``projects/``. Format: + ``projects//locations/``. + page_size (int): + Optional. The maximum number of items to + return in a single page. The default value is + 100. The maximum value is 1000. + page_token (str): + Optional. The next_page_token value returned from a previous + list request. + show_deleted (bool): + Optional. Controls whether ``PhoneNumber`` resources in the + [DELETE_REQUESTED][google.cloud.dialogflow.v2beta1.PhoneNumber.LifecycleState.DELETE_REQUESTED] + state should be returned. Defaults to false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + show_deleted: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListPhoneNumbersResponse(proto.Message): + r"""The response message for + [PhoneNumbers.ListPhoneNumbers][google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers]. + + Attributes: + phone_numbers (MutableSequence[google.cloud.dialogflow_v2beta1.types.PhoneNumber]): + The list of ``PhoneNumber`` resources. There is a maximum + number of items returned based on the page_size field in the + request. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + phone_numbers: MutableSequence["PhoneNumber"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PhoneNumber", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdatePhoneNumberRequest(proto.Message): + r"""The request message for + [PhoneNumbers.UpdatePhoneNumber][google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber]. + + Attributes: + phone_number (google.cloud.dialogflow_v2beta1.types.PhoneNumber): + Required. The ``PhoneNumber`` to update. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The mask to control which fields + get updated. + """ + + phone_number: "PhoneNumber" = proto.Field( + proto.MESSAGE, + number=1, + message="PhoneNumber", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_async.py new file mode 100644 index 000000000000..86882bd49d44 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePhoneNumber +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_DeletePhoneNumber_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_delete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_phone_number(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_DeletePhoneNumber_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_sync.py new file mode 100644 index 000000000000..ced9cdfd7d66 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePhoneNumber +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_DeletePhoneNumber_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_delete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.DeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = client.delete_phone_number(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_DeletePhoneNumber_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_async.py new file mode 100644 index 000000000000..540a9228145a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPhoneNumbers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_ListPhoneNumbers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_list_phone_numbers(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListPhoneNumbersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_phone_numbers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_ListPhoneNumbers_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_sync.py new file mode 100644 index 000000000000..60839b8e48b0 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPhoneNumbers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_ListPhoneNumbers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_list_phone_numbers(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.ListPhoneNumbersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_phone_numbers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_ListPhoneNumbers_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_async.py new file mode 100644 index 000000000000..24a7d81f20ec --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeletePhoneNumber +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_UndeletePhoneNumber_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_undelete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UndeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = await client.undelete_phone_number(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_UndeletePhoneNumber_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_sync.py new file mode 100644 index 000000000000..a1623b167304 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeletePhoneNumber +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_UndeletePhoneNumber_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_undelete_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UndeletePhoneNumberRequest( + name="name_value", + ) + + # Make the request + response = client.undelete_phone_number(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_UndeletePhoneNumber_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_update_phone_number_async.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_update_phone_number_async.py new file mode 100644 index 000000000000..621a0d662066 --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_update_phone_number_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePhoneNumber +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_UpdatePhoneNumber_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +async def sample_update_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersAsyncClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdatePhoneNumberRequest( + ) + + # Make the request + response = await client.update_phone_number(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_UpdatePhoneNumber_async] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_update_phone_number_sync.py b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_update_phone_number_sync.py new file mode 100644 index 000000000000..d7e480659f0a --- /dev/null +++ b/packages/google-cloud-dialogflow/samples/generated_samples/dialogflow_v2beta1_generated_phone_numbers_update_phone_number_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdatePhoneNumber +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dialogflow + + +# [START dialogflow_v2beta1_generated_PhoneNumbers_UpdatePhoneNumber_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dialogflow_v2beta1 + + +def sample_update_phone_number(): + # Create a client + client = dialogflow_v2beta1.PhoneNumbersClient() + + # Initialize request argument(s) + request = dialogflow_v2beta1.UpdatePhoneNumberRequest( + ) + + # Make the request + response = client.update_phone_number(request=request) + + # Handle the response + print(response) + +# [END dialogflow_v2beta1_generated_PhoneNumbers_UpdatePhoneNumber_sync] diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json index 5fba10b79137..4432fa51d932 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.37.0" + "version": "2.39.0" }, "snippets": [ { diff --git a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json index e97b1de71665..43999ab1f7a3 100644 --- a/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json +++ b/packages/google-cloud-dialogflow/samples/generated_samples/snippet_metadata_google.cloud.dialogflow.v2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dialogflow", - "version": "2.37.0" + "version": "2.39.0" }, "snippets": [ { @@ -14840,6 +14840,658 @@ ], "title": "dialogflow_v2beta1_generated_participants_update_participant_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient", + "shortName": "PhoneNumbersAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.delete_phone_number", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.DeletePhoneNumber", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "DeletePhoneNumber" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeletePhoneNumberRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.PhoneNumber", + "shortName": "delete_phone_number" + }, + "description": "Sample for DeletePhoneNumber", + "file": "dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_DeletePhoneNumber_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient", + "shortName": "PhoneNumbersClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient.delete_phone_number", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.DeletePhoneNumber", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "DeletePhoneNumber" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.DeletePhoneNumberRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.PhoneNumber", + "shortName": "delete_phone_number" + }, + "description": "Sample for DeletePhoneNumber", + "file": "dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_DeletePhoneNumber_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_delete_phone_number_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient", + "shortName": "PhoneNumbersAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.list_phone_numbers", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "ListPhoneNumbers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.phone_numbers.pagers.ListPhoneNumbersAsyncPager", + "shortName": "list_phone_numbers" + }, + "description": "Sample for ListPhoneNumbers", + "file": "dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_ListPhoneNumbers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient", + "shortName": "PhoneNumbersClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient.list_phone_numbers", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.ListPhoneNumbers", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "ListPhoneNumbers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.ListPhoneNumbersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.services.phone_numbers.pagers.ListPhoneNumbersPager", + "shortName": "list_phone_numbers" + }, + "description": "Sample for ListPhoneNumbers", + "file": "dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_ListPhoneNumbers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_list_phone_numbers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient", + "shortName": "PhoneNumbersAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.undelete_phone_number", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.UndeletePhoneNumber", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "UndeletePhoneNumber" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UndeletePhoneNumberRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.PhoneNumber", + "shortName": "undelete_phone_number" + }, + "description": "Sample for UndeletePhoneNumber", + "file": "dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_UndeletePhoneNumber_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient", + "shortName": "PhoneNumbersClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient.undelete_phone_number", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.UndeletePhoneNumber", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "UndeletePhoneNumber" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UndeletePhoneNumberRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.PhoneNumber", + "shortName": "undelete_phone_number" + }, + "description": "Sample for UndeletePhoneNumber", + "file": "dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_UndeletePhoneNumber_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_undelete_phone_number_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient", + "shortName": "PhoneNumbersAsyncClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersAsyncClient.update_phone_number", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "UpdatePhoneNumber" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdatePhoneNumberRequest" + }, + { + "name": "phone_number", + "type": "google.cloud.dialogflow_v2beta1.types.PhoneNumber" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.PhoneNumber", + "shortName": "update_phone_number" + }, + "description": "Sample for UpdatePhoneNumber", + "file": "dialogflow_v2beta1_generated_phone_numbers_update_phone_number_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_UpdatePhoneNumber_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_update_phone_number_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient", + "shortName": "PhoneNumbersClient" + }, + "fullName": "google.cloud.dialogflow_v2beta1.PhoneNumbersClient.update_phone_number", + "method": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers.UpdatePhoneNumber", + "service": { + "fullName": "google.cloud.dialogflow.v2beta1.PhoneNumbers", + "shortName": "PhoneNumbers" + }, + "shortName": "UpdatePhoneNumber" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dialogflow_v2beta1.types.UpdatePhoneNumberRequest" + }, + { + "name": "phone_number", + "type": "google.cloud.dialogflow_v2beta1.types.PhoneNumber" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dialogflow_v2beta1.types.PhoneNumber", + "shortName": "update_phone_number" + }, + "description": "Sample for UpdatePhoneNumber", + "file": "dialogflow_v2beta1_generated_phone_numbers_update_phone_number_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dialogflow_v2beta1_generated_PhoneNumbers_UpdatePhoneNumber_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dialogflow_v2beta1_generated_phone_numbers_update_phone_number_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py b/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py index 25e7259e5e0b..4ecf52865486 100644 --- a/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py +++ b/packages/google-cloud-dialogflow/scripts/fixup_dialogflow_v2beta1_keywords.py @@ -74,6 +74,7 @@ class dialogflowCallTransformer(cst.CSTTransformer): 'delete_generator': ('name', ), 'delete_intent': ('name', ), 'delete_knowledge_base': ('name', 'force', ), + 'delete_phone_number': ('name', ), 'delete_session_entity_type': ('name', ), 'delete_sip_trunk': ('name', ), 'delete_version': ('name', ), @@ -115,6 +116,7 @@ class dialogflowCallTransformer(cst.CSTTransformer): 'list_knowledge_bases': ('parent', 'page_size', 'page_token', 'filter', ), 'list_messages': ('parent', 'filter', 'page_size', 'page_token', ), 'list_participants': ('parent', 'page_size', 'page_token', ), + 'list_phone_numbers': ('parent', 'page_size', 'page_token', 'show_deleted', ), 'list_session_entity_types': ('parent', 'page_size', 'page_token', ), 'list_sip_trunks': ('parent', 'page_size', 'page_token', ), 'list_suggestions': ('parent', 'page_size', 'page_token', 'filter', ), @@ -133,6 +135,7 @@ class dialogflowCallTransformer(cst.CSTTransformer): 'suggest_knowledge_assist': ('parent', 'latest_message', 'context_size', 'previous_suggested_query', ), 'suggest_smart_replies': ('parent', 'current_text_input', 'latest_message', 'context_size', ), 'train_agent': ('parent', ), + 'undelete_phone_number': ('name', ), 'update_answer_record': ('answer_record', 'update_mask', ), 'update_context': ('context', 'update_mask', ), 'update_conversation_profile': ('conversation_profile', 'update_mask', ), @@ -144,6 +147,7 @@ class dialogflowCallTransformer(cst.CSTTransformer): 'update_intent': ('intent', 'language_code', 'update_mask', 'intent_view', ), 'update_knowledge_base': ('knowledge_base', 'update_mask', ), 'update_participant': ('participant', 'update_mask', ), + 'update_phone_number': ('phone_number', 'update_mask', ), 'update_session_entity_type': ('session_entity_type', 'update_mask', ), 'update_sip_trunk': ('sip_trunk', 'update_mask', ), 'update_version': ('version', 'update_mask', ), diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py index aced1370e2d5..c65e823d8356 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_agents.py @@ -76,6 +76,13 @@ from google.cloud.dialogflow_v2.types import agent as gcd_agent from google.cloud.dialogflow_v2.types import validation_result +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6134,10 +6184,13 @@ def test_get_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetAgentRequest.pb(agent.GetAgentRequest()) transcode.return_value = { "method": "post", @@ -6159,6 +6212,7 @@ def test_get_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.Agent() + post_with_metadata.return_value = agent.Agent(), metadata client.get_agent( request, @@ -6170,6 +6224,7 @@ def test_get_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_agent_rest_bad_request(request_type=gcd_agent.SetAgentRequest): @@ -6358,10 +6413,13 @@ def test_set_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_set_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_set_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_set_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_agent.SetAgentRequest.pb(gcd_agent.SetAgentRequest()) transcode.return_value = { "method": "post", @@ -6383,6 +6441,7 @@ def test_set_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_agent.Agent() + post_with_metadata.return_value = gcd_agent.Agent(), metadata client.set_agent( request, @@ -6394,6 +6453,7 @@ def test_set_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_agent_rest_bad_request(request_type=agent.DeleteAgentRequest): @@ -6577,10 +6637,13 @@ def test_search_agents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_search_agents" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_search_agents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_search_agents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.SearchAgentsRequest.pb(agent.SearchAgentsRequest()) transcode.return_value = { "method": "post", @@ -6602,6 +6665,7 @@ def test_search_agents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.SearchAgentsResponse() + post_with_metadata.return_value = agent.SearchAgentsResponse(), metadata client.search_agents( request, @@ -6613,6 +6677,7 @@ def test_search_agents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_train_agent_rest_bad_request(request_type=agent.TrainAgentRequest): @@ -6689,10 +6754,13 @@ def test_train_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_train_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_train_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_train_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.TrainAgentRequest.pb(agent.TrainAgentRequest()) transcode.return_value = { "method": "post", @@ -6714,6 +6782,7 @@ def test_train_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_agent( request, @@ -6725,6 +6794,7 @@ def test_train_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_agent_rest_bad_request(request_type=agent.ExportAgentRequest): @@ -6801,10 +6871,13 @@ def test_export_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_export_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_export_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_export_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ExportAgentRequest.pb(agent.ExportAgentRequest()) transcode.return_value = { "method": "post", @@ -6826,6 +6899,7 @@ def test_export_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_agent( request, @@ -6837,6 +6911,7 @@ def test_export_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_agent_rest_bad_request(request_type=agent.ImportAgentRequest): @@ -6913,10 +6988,13 @@ def test_import_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_import_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_import_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_import_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ImportAgentRequest.pb(agent.ImportAgentRequest()) transcode.return_value = { "method": "post", @@ -6938,6 +7016,7 @@ def test_import_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_agent( request, @@ -6949,6 +7028,7 @@ def test_import_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_agent_rest_bad_request(request_type=agent.RestoreAgentRequest): @@ -7025,10 +7105,13 @@ def test_restore_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_restore_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_restore_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_restore_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.RestoreAgentRequest.pb(agent.RestoreAgentRequest()) transcode.return_value = { "method": "post", @@ -7050,6 +7133,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_agent( request, @@ -7061,6 +7145,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_validation_result_rest_bad_request( @@ -7140,10 +7225,13 @@ def test_get_validation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_validation_result" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_validation_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_validation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetValidationResultRequest.pb( agent.GetValidationResultRequest() ) @@ -7169,6 +7257,7 @@ def test_get_validation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = validation_result.ValidationResult() + post_with_metadata.return_value = validation_result.ValidationResult(), metadata client.get_validation_result( request, @@ -7180,6 +7269,7 @@ def test_get_validation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py index ce401ed83240..eb6a842340be 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_answer_records.py @@ -67,6 +67,13 @@ from google.cloud.dialogflow_v2.types import answer_record from google.cloud.dialogflow_v2.types import answer_record as gcd_answer_record +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +328,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AnswerRecordsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AnswerRecordsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2726,10 +2776,14 @@ def test_list_answer_records_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnswerRecordsRestInterceptor, "post_list_answer_records" ) as post, mock.patch.object( + transports.AnswerRecordsRestInterceptor, + "post_list_answer_records_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnswerRecordsRestInterceptor, "pre_list_answer_records" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = answer_record.ListAnswerRecordsRequest.pb( answer_record.ListAnswerRecordsRequest() ) @@ -2755,6 +2809,10 @@ def test_list_answer_records_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = answer_record.ListAnswerRecordsResponse() + post_with_metadata.return_value = ( + answer_record.ListAnswerRecordsResponse(), + metadata, + ) client.list_answer_records( request, @@ -2766,6 +2824,7 @@ def test_list_answer_records_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_answer_record_rest_bad_request( @@ -3177,10 +3236,14 @@ def test_update_answer_record_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnswerRecordsRestInterceptor, "post_update_answer_record" ) as post, mock.patch.object( + transports.AnswerRecordsRestInterceptor, + "post_update_answer_record_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnswerRecordsRestInterceptor, "pre_update_answer_record" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_answer_record.UpdateAnswerRecordRequest.pb( gcd_answer_record.UpdateAnswerRecordRequest() ) @@ -3206,6 +3269,7 @@ def test_update_answer_record_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_answer_record.AnswerRecord() + post_with_metadata.return_value = gcd_answer_record.AnswerRecord(), metadata client.update_answer_record( request, @@ -3217,6 +3281,7 @@ def test_update_answer_record_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py index 43fdefbc3c57..dff779bbc974 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_contexts.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2.types import context from google.cloud.dialogflow_v2.types import context as gcd_context +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ContextsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ContextsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4799,10 +4849,13 @@ def test_list_contexts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_list_contexts" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_list_contexts_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_list_contexts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = context.ListContextsRequest.pb(context.ListContextsRequest()) transcode.return_value = { "method": "post", @@ -4826,6 +4879,7 @@ def test_list_contexts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = context.ListContextsResponse() + post_with_metadata.return_value = context.ListContextsResponse(), metadata client.list_contexts( request, @@ -4837,6 +4891,7 @@ def test_list_contexts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_context_rest_bad_request(request_type=context.GetContextRequest): @@ -4919,10 +4974,13 @@ def test_get_context_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_get_context" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_get_context_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_get_context" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = context.GetContextRequest.pb(context.GetContextRequest()) transcode.return_value = { "method": "post", @@ -4944,6 +5002,7 @@ def test_get_context_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = context.Context() + post_with_metadata.return_value = context.Context(), metadata client.get_context( request, @@ -4955,6 +5014,7 @@ def test_get_context_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_context_rest_bad_request(request_type=gcd_context.CreateContextRequest): @@ -5109,10 +5169,13 @@ def test_create_context_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_create_context" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_create_context_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_create_context" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_context.CreateContextRequest.pb( gcd_context.CreateContextRequest() ) @@ -5136,6 +5199,7 @@ def test_create_context_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_context.Context() + post_with_metadata.return_value = gcd_context.Context(), metadata client.create_context( request, @@ -5147,6 +5211,7 @@ def test_create_context_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_context_rest_bad_request(request_type=gcd_context.UpdateContextRequest): @@ -5305,10 +5370,13 @@ def test_update_context_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_update_context" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_update_context_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_update_context" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_context.UpdateContextRequest.pb( gcd_context.UpdateContextRequest() ) @@ -5332,6 +5400,7 @@ def test_update_context_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_context.Context() + post_with_metadata.return_value = gcd_context.Context(), metadata client.update_context( request, @@ -5343,6 +5412,7 @@ def test_update_context_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_context_rest_bad_request(request_type=context.DeleteContextRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py index a7e9c4b55913..ae82c2f6aba8 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_datasets.py @@ -77,6 +77,13 @@ from google.cloud.dialogflow_v2.types import conversation_dataset from google.cloud.dialogflow_v2.types import gcs +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -350,6 +357,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationDatasetsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationDatasetsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4541,11 +4591,15 @@ def test_create_conversation_dataset_rest_interceptors(null_interceptor): transports.ConversationDatasetsRestInterceptor, "post_create_conversation_dataset", ) as post, mock.patch.object( + transports.ConversationDatasetsRestInterceptor, + "post_create_conversation_dataset_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationDatasetsRestInterceptor, "pre_create_conversation_dataset", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_dataset.CreateConversationDatasetRequest.pb( gcd_conversation_dataset.CreateConversationDatasetRequest() ) @@ -4569,6 +4623,7 @@ def test_create_conversation_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_conversation_dataset( request, @@ -4580,6 +4635,7 @@ def test_create_conversation_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_dataset_rest_bad_request( @@ -4674,10 +4730,14 @@ def test_get_conversation_dataset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationDatasetsRestInterceptor, "post_get_conversation_dataset" ) as post, mock.patch.object( + transports.ConversationDatasetsRestInterceptor, + "post_get_conversation_dataset_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationDatasetsRestInterceptor, "pre_get_conversation_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_dataset.GetConversationDatasetRequest.pb( conversation_dataset.GetConversationDatasetRequest() ) @@ -4703,6 +4763,10 @@ def test_get_conversation_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_dataset.ConversationDataset() + post_with_metadata.return_value = ( + conversation_dataset.ConversationDataset(), + metadata, + ) client.get_conversation_dataset( request, @@ -4714,6 +4778,7 @@ def test_get_conversation_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversation_datasets_rest_bad_request( @@ -4801,10 +4866,14 @@ def test_list_conversation_datasets_rest_interceptors(null_interceptor): transports.ConversationDatasetsRestInterceptor, "post_list_conversation_datasets", ) as post, mock.patch.object( + transports.ConversationDatasetsRestInterceptor, + "post_list_conversation_datasets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationDatasetsRestInterceptor, "pre_list_conversation_datasets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_dataset.ListConversationDatasetsRequest.pb( conversation_dataset.ListConversationDatasetsRequest() ) @@ -4830,6 +4899,10 @@ def test_list_conversation_datasets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_dataset.ListConversationDatasetsResponse() + post_with_metadata.return_value = ( + conversation_dataset.ListConversationDatasetsResponse(), + metadata, + ) client.list_conversation_datasets( request, @@ -4841,6 +4914,7 @@ def test_list_conversation_datasets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_dataset_rest_bad_request( @@ -4926,11 +5000,15 @@ def test_delete_conversation_dataset_rest_interceptors(null_interceptor): transports.ConversationDatasetsRestInterceptor, "post_delete_conversation_dataset", ) as post, mock.patch.object( + transports.ConversationDatasetsRestInterceptor, + "post_delete_conversation_dataset_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationDatasetsRestInterceptor, "pre_delete_conversation_dataset", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_dataset.DeleteConversationDatasetRequest.pb( conversation_dataset.DeleteConversationDatasetRequest() ) @@ -4954,6 +5032,7 @@ def test_delete_conversation_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_conversation_dataset( request, @@ -4965,6 +5044,7 @@ def test_delete_conversation_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_conversation_data_rest_bad_request( @@ -5045,10 +5125,14 @@ def test_import_conversation_data_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConversationDatasetsRestInterceptor, "post_import_conversation_data" ) as post, mock.patch.object( + transports.ConversationDatasetsRestInterceptor, + "post_import_conversation_data_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationDatasetsRestInterceptor, "pre_import_conversation_data" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_dataset.ImportConversationDataRequest.pb( conversation_dataset.ImportConversationDataRequest() ) @@ -5072,6 +5156,7 @@ def test_import_conversation_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_conversation_data( request, @@ -5083,6 +5168,7 @@ def test_import_conversation_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py index 3a2f483d9d78..fc45fa66702e 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_models.py @@ -76,6 +76,13 @@ ) from google.cloud.dialogflow_v2.types import conversation_model +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationModelsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationModelsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7035,10 +7085,14 @@ def test_create_conversation_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConversationModelsRestInterceptor, "post_create_conversation_model" ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_create_conversation_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_create_conversation_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_model.CreateConversationModelRequest.pb( gcd_conversation_model.CreateConversationModelRequest() ) @@ -7062,6 +7116,7 @@ def test_create_conversation_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_conversation_model( request, @@ -7073,6 +7128,7 @@ def test_create_conversation_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_model_rest_bad_request( @@ -7167,10 +7223,14 @@ def test_get_conversation_model_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationModelsRestInterceptor, "post_get_conversation_model" ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_get_conversation_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_get_conversation_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.GetConversationModelRequest.pb( conversation_model.GetConversationModelRequest() ) @@ -7196,6 +7256,10 @@ def test_get_conversation_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_model.ConversationModel() + post_with_metadata.return_value = ( + conversation_model.ConversationModel(), + metadata, + ) client.get_conversation_model( request, @@ -7207,6 +7271,7 @@ def test_get_conversation_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversation_models_rest_bad_request( @@ -7293,10 +7358,14 @@ def test_list_conversation_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationModelsRestInterceptor, "post_list_conversation_models" ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_list_conversation_models_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_list_conversation_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.ListConversationModelsRequest.pb( conversation_model.ListConversationModelsRequest() ) @@ -7322,6 +7391,10 @@ def test_list_conversation_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_model.ListConversationModelsResponse() + post_with_metadata.return_value = ( + conversation_model.ListConversationModelsResponse(), + metadata, + ) client.list_conversation_models( request, @@ -7333,6 +7406,7 @@ def test_list_conversation_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_model_rest_bad_request( @@ -7413,10 +7487,14 @@ def test_delete_conversation_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConversationModelsRestInterceptor, "post_delete_conversation_model" ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_delete_conversation_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_delete_conversation_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.DeleteConversationModelRequest.pb( conversation_model.DeleteConversationModelRequest() ) @@ -7440,6 +7518,7 @@ def test_delete_conversation_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_conversation_model( request, @@ -7451,6 +7530,7 @@ def test_delete_conversation_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_conversation_model_rest_bad_request( @@ -7531,10 +7611,14 @@ def test_deploy_conversation_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConversationModelsRestInterceptor, "post_deploy_conversation_model" ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_deploy_conversation_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_deploy_conversation_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.DeployConversationModelRequest.pb( conversation_model.DeployConversationModelRequest() ) @@ -7558,6 +7642,7 @@ def test_deploy_conversation_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_conversation_model( request, @@ -7569,6 +7654,7 @@ def test_deploy_conversation_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_conversation_model_rest_bad_request( @@ -7649,10 +7735,14 @@ def test_undeploy_conversation_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ConversationModelsRestInterceptor, "post_undeploy_conversation_model" ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_undeploy_conversation_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_undeploy_conversation_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.UndeployConversationModelRequest.pb( conversation_model.UndeployConversationModelRequest() ) @@ -7676,6 +7766,7 @@ def test_undeploy_conversation_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undeploy_conversation_model( request, @@ -7687,6 +7778,7 @@ def test_undeploy_conversation_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_model_evaluation_rest_bad_request( @@ -7780,11 +7872,15 @@ def test_get_conversation_model_evaluation_rest_interceptors(null_interceptor): transports.ConversationModelsRestInterceptor, "post_get_conversation_model_evaluation", ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_get_conversation_model_evaluation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_get_conversation_model_evaluation", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.GetConversationModelEvaluationRequest.pb( conversation_model.GetConversationModelEvaluationRequest() ) @@ -7810,6 +7906,10 @@ def test_get_conversation_model_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_model.ConversationModelEvaluation() + post_with_metadata.return_value = ( + conversation_model.ConversationModelEvaluation(), + metadata, + ) client.get_conversation_model_evaluation( request, @@ -7821,6 +7921,7 @@ def test_get_conversation_model_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversation_model_evaluations_rest_bad_request( @@ -7908,11 +8009,15 @@ def test_list_conversation_model_evaluations_rest_interceptors(null_interceptor) transports.ConversationModelsRestInterceptor, "post_list_conversation_model_evaluations", ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_list_conversation_model_evaluations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_list_conversation_model_evaluations", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.ListConversationModelEvaluationsRequest.pb( conversation_model.ListConversationModelEvaluationsRequest() ) @@ -7942,6 +8047,10 @@ def test_list_conversation_model_evaluations_rest_interceptors(null_interceptor) post.return_value = ( conversation_model.ListConversationModelEvaluationsResponse() ) + post_with_metadata.return_value = ( + conversation_model.ListConversationModelEvaluationsResponse(), + metadata, + ) client.list_conversation_model_evaluations( request, @@ -7953,6 +8062,7 @@ def test_list_conversation_model_evaluations_rest_interceptors(null_interceptor) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversation_model_evaluation_rest_bad_request( @@ -8038,11 +8148,15 @@ def test_create_conversation_model_evaluation_rest_interceptors(null_interceptor transports.ConversationModelsRestInterceptor, "post_create_conversation_model_evaluation", ) as post, mock.patch.object( + transports.ConversationModelsRestInterceptor, + "post_create_conversation_model_evaluation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationModelsRestInterceptor, "pre_create_conversation_model_evaluation", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_model.CreateConversationModelEvaluationRequest.pb( conversation_model.CreateConversationModelEvaluationRequest() ) @@ -8066,6 +8180,7 @@ def test_create_conversation_model_evaluation_rest_interceptors(null_interceptor ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_conversation_model_evaluation( request, @@ -8077,6 +8192,7 @@ def test_create_conversation_model_evaluation_rest_interceptors(null_interceptor pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py index 87487ae97adb..8b8ad346dec1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversation_profiles.py @@ -79,6 +79,13 @@ from google.cloud.dialogflow_v2.types import conversation_profile from google.cloud.dialogflow_v2.types import participant +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationProfilesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationProfilesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5903,10 +5953,14 @@ def test_list_conversation_profiles_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_list_conversation_profiles", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_list_conversation_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_list_conversation_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_profile.ListConversationProfilesRequest.pb( conversation_profile.ListConversationProfilesRequest() ) @@ -5932,6 +5986,10 @@ def test_list_conversation_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_profile.ListConversationProfilesResponse() + post_with_metadata.return_value = ( + conversation_profile.ListConversationProfilesResponse(), + metadata, + ) client.list_conversation_profiles( request, @@ -5943,6 +6001,7 @@ def test_list_conversation_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_profile_rest_bad_request( @@ -6035,10 +6094,14 @@ def test_get_conversation_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationProfilesRestInterceptor, "post_get_conversation_profile" ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_get_conversation_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_get_conversation_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_profile.GetConversationProfileRequest.pb( conversation_profile.GetConversationProfileRequest() ) @@ -6064,6 +6127,10 @@ def test_get_conversation_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_profile.ConversationProfile() + post_with_metadata.return_value = ( + conversation_profile.ConversationProfile(), + metadata, + ) client.get_conversation_profile( request, @@ -6075,6 +6142,7 @@ def test_get_conversation_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversation_profile_rest_bad_request( @@ -6193,6 +6261,7 @@ def test_create_conversation_profile_rest_call_success(request_type): "notification_config": {}, "logging_config": {"enable_stackdriver_logging": True}, "new_message_event_notification_config": {}, + "new_recognition_result_notification_config": {}, "stt_config": { "speech_model_variant": 1, "model": "model_value", @@ -6340,11 +6409,15 @@ def test_create_conversation_profile_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_create_conversation_profile", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_create_conversation_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_create_conversation_profile", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.CreateConversationProfileRequest.pb( gcd_conversation_profile.CreateConversationProfileRequest() ) @@ -6370,6 +6443,10 @@ def test_create_conversation_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation_profile.ConversationProfile() + post_with_metadata.return_value = ( + gcd_conversation_profile.ConversationProfile(), + metadata, + ) client.create_conversation_profile( request, @@ -6381,6 +6458,7 @@ def test_create_conversation_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_conversation_profile_rest_bad_request( @@ -6507,6 +6585,7 @@ def test_update_conversation_profile_rest_call_success(request_type): "notification_config": {}, "logging_config": {"enable_stackdriver_logging": True}, "new_message_event_notification_config": {}, + "new_recognition_result_notification_config": {}, "stt_config": { "speech_model_variant": 1, "model": "model_value", @@ -6654,11 +6733,15 @@ def test_update_conversation_profile_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_update_conversation_profile", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_update_conversation_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_update_conversation_profile", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.UpdateConversationProfileRequest.pb( gcd_conversation_profile.UpdateConversationProfileRequest() ) @@ -6684,6 +6767,10 @@ def test_update_conversation_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation_profile.ConversationProfile() + post_with_metadata.return_value = ( + gcd_conversation_profile.ConversationProfile(), + metadata, + ) client.update_conversation_profile( request, @@ -6695,6 +6782,7 @@ def test_update_conversation_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_profile_rest_bad_request( @@ -6890,11 +6978,15 @@ def test_set_suggestion_feature_config_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_set_suggestion_feature_config", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_set_suggestion_feature_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_set_suggestion_feature_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.SetSuggestionFeatureConfigRequest.pb( gcd_conversation_profile.SetSuggestionFeatureConfigRequest() ) @@ -6918,6 +7010,7 @@ def test_set_suggestion_feature_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.set_suggestion_feature_config( request, @@ -6929,6 +7022,7 @@ def test_set_suggestion_feature_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_clear_suggestion_feature_config_rest_bad_request( @@ -7014,11 +7108,15 @@ def test_clear_suggestion_feature_config_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_clear_suggestion_feature_config", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_clear_suggestion_feature_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_clear_suggestion_feature_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.ClearSuggestionFeatureConfigRequest.pb( gcd_conversation_profile.ClearSuggestionFeatureConfigRequest() ) @@ -7042,6 +7140,7 @@ def test_clear_suggestion_feature_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.clear_suggestion_feature_config( request, @@ -7053,6 +7152,7 @@ def test_clear_suggestion_feature_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py index b785646e2385..7dd7493016a1 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_conversations.py @@ -73,6 +73,13 @@ from google.cloud.dialogflow_v2.types import conversation from google.cloud.dialogflow_v2.types import conversation as gcd_conversation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -327,6 +334,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6766,10 +6816,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_create_conversation" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_create_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_create_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation.CreateConversationRequest.pb( gcd_conversation.CreateConversationRequest() ) @@ -6795,6 +6849,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.create_conversation( request, @@ -6806,6 +6861,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversations_rest_bad_request( @@ -6890,10 +6946,13 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_list_conversations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.ListConversationsRequest.pb( conversation.ListConversationsRequest() ) @@ -6919,6 +6978,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.ListConversationsResponse() + post_with_metadata.return_value = ( + conversation.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -6930,6 +6993,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -7025,10 +7089,13 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_get_conversation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.GetConversationRequest.pb( conversation.GetConversationRequest() ) @@ -7052,6 +7119,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.get_conversation( request, @@ -7063,6 +7131,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_complete_conversation_rest_bad_request( @@ -7158,10 +7227,14 @@ def test_complete_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_complete_conversation" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_complete_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_complete_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.CompleteConversationRequest.pb( conversation.CompleteConversationRequest() ) @@ -7185,6 +7258,7 @@ def test_complete_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.complete_conversation( request, @@ -7196,6 +7270,7 @@ def test_complete_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_messages_rest_bad_request(request_type=conversation.ListMessagesRequest): @@ -7278,10 +7353,13 @@ def test_list_messages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_list_messages" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_list_messages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_list_messages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.ListMessagesRequest.pb( conversation.ListMessagesRequest() ) @@ -7307,6 +7385,7 @@ def test_list_messages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.ListMessagesResponse() + post_with_metadata.return_value = conversation.ListMessagesResponse(), metadata client.list_messages( request, @@ -7318,6 +7397,7 @@ def test_list_messages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_conversation_summary_rest_bad_request( @@ -7406,10 +7486,14 @@ def test_suggest_conversation_summary_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_suggest_conversation_summary" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_suggest_conversation_summary_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_suggest_conversation_summary" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation.SuggestConversationSummaryRequest.pb( gcd_conversation.SuggestConversationSummaryRequest() ) @@ -7435,6 +7519,10 @@ def test_suggest_conversation_summary_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.SuggestConversationSummaryResponse() + post_with_metadata.return_value = ( + gcd_conversation.SuggestConversationSummaryResponse(), + metadata, + ) client.suggest_conversation_summary( request, @@ -7446,6 +7534,7 @@ def test_suggest_conversation_summary_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_stateless_summary_rest_bad_request( @@ -7532,10 +7621,14 @@ def test_generate_stateless_summary_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_generate_stateless_summary" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_generate_stateless_summary_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_generate_stateless_summary" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.GenerateStatelessSummaryRequest.pb( conversation.GenerateStatelessSummaryRequest() ) @@ -7561,6 +7654,10 @@ def test_generate_stateless_summary_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.GenerateStatelessSummaryResponse() + post_with_metadata.return_value = ( + conversation.GenerateStatelessSummaryResponse(), + metadata, + ) client.generate_stateless_summary( request, @@ -7572,6 +7669,7 @@ def test_generate_stateless_summary_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_stateless_suggestion_rest_bad_request( @@ -7653,10 +7751,14 @@ def test_generate_stateless_suggestion_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_generate_stateless_suggestion" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_generate_stateless_suggestion_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_generate_stateless_suggestion" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.GenerateStatelessSuggestionRequest.pb( conversation.GenerateStatelessSuggestionRequest() ) @@ -7682,6 +7784,10 @@ def test_generate_stateless_suggestion_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.GenerateStatelessSuggestionResponse() + post_with_metadata.return_value = ( + conversation.GenerateStatelessSuggestionResponse(), + metadata, + ) client.generate_stateless_suggestion( request, @@ -7693,6 +7799,7 @@ def test_generate_stateless_suggestion_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_knowledge_rest_bad_request( @@ -7777,10 +7884,13 @@ def test_search_knowledge_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_search_knowledge" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_search_knowledge_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_search_knowledge" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.SearchKnowledgeRequest.pb( conversation.SearchKnowledgeRequest() ) @@ -7806,6 +7916,10 @@ def test_search_knowledge_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.SearchKnowledgeResponse() + post_with_metadata.return_value = ( + conversation.SearchKnowledgeResponse(), + metadata, + ) client.search_knowledge( request, @@ -7817,6 +7931,7 @@ def test_search_knowledge_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py index e4e4c82e7ca5..2b6acf0fe842 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_documents.py @@ -78,6 +78,13 @@ from google.cloud.dialogflow_v2.types import document as gcd_document from google.cloud.dialogflow_v2.types import gcs +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +318,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5720,10 +5770,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentsRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ListDocumentsRequest.pb(document.ListDocumentsRequest()) transcode.return_value = { "method": "post", @@ -5747,6 +5800,7 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.ListDocumentsResponse() + post_with_metadata.return_value = document.ListDocumentsResponse(), metadata client.list_documents( request, @@ -5758,6 +5812,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_rest_bad_request(request_type=document.GetDocumentRequest): @@ -5849,10 +5904,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentsRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.GetDocumentRequest.pb(document.GetDocumentRequest()) transcode.return_value = { "method": "post", @@ -5874,6 +5932,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.get_document( request, @@ -5885,6 +5944,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request( @@ -6054,10 +6114,13 @@ def test_create_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_document.CreateDocumentRequest.pb( gcd_document.CreateDocumentRequest() ) @@ -6081,6 +6144,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_document( request, @@ -6092,6 +6156,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_documents_rest_bad_request( @@ -6170,10 +6235,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ImportDocumentsRequest.pb( document.ImportDocumentsRequest() ) @@ -6197,6 +6265,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -6208,6 +6277,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request(request_type=document.DeleteDocumentRequest): @@ -6284,10 +6354,13 @@ def test_delete_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_delete_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_delete_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.DeleteDocumentRequest.pb(document.DeleteDocumentRequest()) transcode.return_value = { "method": "post", @@ -6309,6 +6382,7 @@ def test_delete_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_document( request, @@ -6320,6 +6394,7 @@ def test_delete_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -6497,10 +6572,13 @@ def test_update_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_document.UpdateDocumentRequest.pb( gcd_document.UpdateDocumentRequest() ) @@ -6524,6 +6602,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_document( request, @@ -6535,6 +6614,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reload_document_rest_bad_request(request_type=document.ReloadDocumentRequest): @@ -6611,10 +6691,13 @@ def test_reload_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_reload_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_reload_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_reload_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ReloadDocumentRequest.pb(document.ReloadDocumentRequest()) transcode.return_value = { "method": "post", @@ -6636,6 +6719,7 @@ def test_reload_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reload_document( request, @@ -6647,6 +6731,7 @@ def test_reload_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_document_rest_bad_request(request_type=document.ExportDocumentRequest): @@ -6723,10 +6808,13 @@ def test_export_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_export_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_export_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_export_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ExportDocumentRequest.pb(document.ExportDocumentRequest()) transcode.return_value = { "method": "post", @@ -6748,6 +6836,7 @@ def test_export_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_document( request, @@ -6759,6 +6848,7 @@ def test_export_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py index 4cd2f5c3f96e..1dd9d0c39271 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_encryption_spec_service.py @@ -71,6 +71,13 @@ from google.cloud.dialogflow_v2.types import encryption_spec as gcd_encryption_spec from google.cloud.dialogflow_v2.types import encryption_spec +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EncryptionSpecServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EncryptionSpecServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2503,10 +2553,14 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EncryptionSpecServiceRestInterceptor, "post_get_encryption_spec" ) as post, mock.patch.object( + transports.EncryptionSpecServiceRestInterceptor, + "post_get_encryption_spec_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EncryptionSpecServiceRestInterceptor, "pre_get_encryption_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = encryption_spec.GetEncryptionSpecRequest.pb( encryption_spec.GetEncryptionSpecRequest() ) @@ -2532,6 +2586,7 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = encryption_spec.EncryptionSpec() + post_with_metadata.return_value = encryption_spec.EncryptionSpec(), metadata client.get_encryption_spec( request, @@ -2543,6 +2598,7 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_encryption_spec_rest_bad_request( @@ -2628,11 +2684,15 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): transports.EncryptionSpecServiceRestInterceptor, "post_initialize_encryption_spec", ) as post, mock.patch.object( + transports.EncryptionSpecServiceRestInterceptor, + "post_initialize_encryption_spec_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EncryptionSpecServiceRestInterceptor, "pre_initialize_encryption_spec", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_encryption_spec.InitializeEncryptionSpecRequest.pb( gcd_encryption_spec.InitializeEncryptionSpecRequest() ) @@ -2656,6 +2716,7 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.initialize_encryption_spec( request, @@ -2667,6 +2728,7 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py index 6df134b39b3b..4611c00eafe7 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_entity_types.py @@ -75,6 +75,13 @@ from google.cloud.dialogflow_v2.types import entity_type from google.cloud.dialogflow_v2.types import entity_type as gcd_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -314,6 +321,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7415,10 +7465,13 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_list_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_list_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_list_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ListEntityTypesRequest.pb( entity_type.ListEntityTypesRequest() ) @@ -7444,6 +7497,10 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.ListEntityTypesResponse() + post_with_metadata.return_value = ( + entity_type.ListEntityTypesResponse(), + metadata, + ) client.list_entity_types( request, @@ -7455,6 +7512,7 @@ def test_list_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_entity_type_rest_bad_request( @@ -7550,10 +7608,13 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_get_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_get_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_get_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.GetEntityTypeRequest.pb( entity_type.GetEntityTypeRequest() ) @@ -7577,6 +7638,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.EntityType() + post_with_metadata.return_value = entity_type.EntityType(), metadata client.get_entity_type( request, @@ -7588,6 +7650,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_entity_type_rest_bad_request( @@ -7760,10 +7823,13 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_create_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_create_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_create_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_entity_type.CreateEntityTypeRequest.pb( gcd_entity_type.CreateEntityTypeRequest() ) @@ -7787,6 +7853,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_entity_type.EntityType() + post_with_metadata.return_value = gcd_entity_type.EntityType(), metadata client.create_entity_type( request, @@ -7798,6 +7865,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_entity_type_rest_bad_request( @@ -7974,10 +8042,13 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_update_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_update_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_update_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_entity_type.UpdateEntityTypeRequest.pb( gcd_entity_type.UpdateEntityTypeRequest() ) @@ -8001,6 +8072,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_entity_type.EntityType() + post_with_metadata.return_value = gcd_entity_type.EntityType(), metadata client.update_entity_type( request, @@ -8012,6 +8084,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_entity_type_rest_bad_request( @@ -8201,10 +8274,14 @@ def test_batch_update_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_update_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_update_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_update_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchUpdateEntityTypesRequest.pb( entity_type.BatchUpdateEntityTypesRequest() ) @@ -8228,6 +8305,7 @@ def test_batch_update_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_update_entity_types( request, @@ -8239,6 +8317,7 @@ def test_batch_update_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_entity_types_rest_bad_request( @@ -8319,10 +8398,14 @@ def test_batch_delete_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_delete_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_delete_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_delete_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchDeleteEntityTypesRequest.pb( entity_type.BatchDeleteEntityTypesRequest() ) @@ -8346,6 +8429,7 @@ def test_batch_delete_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_entity_types( request, @@ -8357,6 +8441,7 @@ def test_batch_delete_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_entities_rest_bad_request( @@ -8437,10 +8522,14 @@ def test_batch_create_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_create_entities" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_create_entities_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_create_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchCreateEntitiesRequest.pb( entity_type.BatchCreateEntitiesRequest() ) @@ -8464,6 +8553,7 @@ def test_batch_create_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_entities( request, @@ -8475,6 +8565,7 @@ def test_batch_create_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_entities_rest_bad_request( @@ -8555,10 +8646,14 @@ def test_batch_update_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_update_entities" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_update_entities_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_update_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchUpdateEntitiesRequest.pb( entity_type.BatchUpdateEntitiesRequest() ) @@ -8582,6 +8677,7 @@ def test_batch_update_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_update_entities( request, @@ -8593,6 +8689,7 @@ def test_batch_update_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_entities_rest_bad_request( @@ -8673,10 +8770,14 @@ def test_batch_delete_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_delete_entities" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_delete_entities_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_delete_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchDeleteEntitiesRequest.pb( entity_type.BatchDeleteEntitiesRequest() ) @@ -8700,6 +8801,7 @@ def test_batch_delete_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_entities( request, @@ -8711,6 +8813,7 @@ def test_batch_delete_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py index e216c11e0feb..6bcded9af26f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_environments.py @@ -64,6 +64,13 @@ ) from google.cloud.dialogflow_v2.types import audio_config, environment, fulfillment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -307,6 +314,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4595,10 +4645,13 @@ def test_list_environments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_list_environments" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_list_environments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_list_environments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.ListEnvironmentsRequest.pb( environment.ListEnvironmentsRequest() ) @@ -4624,6 +4677,10 @@ def test_list_environments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.ListEnvironmentsResponse() + post_with_metadata.return_value = ( + environment.ListEnvironmentsResponse(), + metadata, + ) client.list_environments( request, @@ -4635,6 +4692,7 @@ def test_list_environments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_environment_rest_bad_request( @@ -4725,10 +4783,13 @@ def test_get_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_get_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_get_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_get_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.GetEnvironmentRequest.pb( environment.GetEnvironmentRequest() ) @@ -4752,6 +4813,7 @@ def test_get_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.get_environment( request, @@ -4763,6 +4825,7 @@ def test_get_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_environment_rest_bad_request( @@ -4946,10 +5009,13 @@ def test_create_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_create_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_create_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_create_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.CreateEnvironmentRequest.pb( environment.CreateEnvironmentRequest() ) @@ -4973,6 +5039,7 @@ def test_create_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.create_environment( request, @@ -4984,6 +5051,7 @@ def test_create_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_environment_rest_bad_request( @@ -5171,10 +5239,13 @@ def test_update_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_update_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_update_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_update_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.UpdateEnvironmentRequest.pb( environment.UpdateEnvironmentRequest() ) @@ -5198,6 +5269,7 @@ def test_update_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.update_environment( request, @@ -5209,6 +5281,7 @@ def test_update_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_environment_rest_bad_request( @@ -5404,10 +5477,14 @@ def test_get_environment_history_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_get_environment_history" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, + "post_get_environment_history_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_get_environment_history" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.GetEnvironmentHistoryRequest.pb( environment.GetEnvironmentHistoryRequest() ) @@ -5433,6 +5510,7 @@ def test_get_environment_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.EnvironmentHistory() + post_with_metadata.return_value = environment.EnvironmentHistory(), metadata client.get_environment_history( request, @@ -5444,6 +5522,7 @@ def test_get_environment_history_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py index a832ae550c6d..744c811328fe 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_fulfillments.py @@ -63,6 +63,13 @@ from google.cloud.dialogflow_v2.types import fulfillment from google.cloud.dialogflow_v2.types import fulfillment as gcd_fulfillment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -306,6 +313,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FulfillmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FulfillmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2414,10 +2464,13 @@ def test_get_fulfillment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FulfillmentsRestInterceptor, "post_get_fulfillment" ) as post, mock.patch.object( + transports.FulfillmentsRestInterceptor, "post_get_fulfillment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FulfillmentsRestInterceptor, "pre_get_fulfillment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = fulfillment.GetFulfillmentRequest.pb( fulfillment.GetFulfillmentRequest() ) @@ -2441,6 +2494,7 @@ def test_get_fulfillment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = fulfillment.Fulfillment() + post_with_metadata.return_value = fulfillment.Fulfillment(), metadata client.get_fulfillment( request, @@ -2452,6 +2506,7 @@ def test_get_fulfillment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_fulfillment_rest_bad_request( @@ -2620,10 +2675,13 @@ def test_update_fulfillment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FulfillmentsRestInterceptor, "post_update_fulfillment" ) as post, mock.patch.object( + transports.FulfillmentsRestInterceptor, "post_update_fulfillment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FulfillmentsRestInterceptor, "pre_update_fulfillment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_fulfillment.UpdateFulfillmentRequest.pb( gcd_fulfillment.UpdateFulfillmentRequest() ) @@ -2649,6 +2707,7 @@ def test_update_fulfillment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_fulfillment.Fulfillment() + post_with_metadata.return_value = gcd_fulfillment.Fulfillment(), metadata client.update_fulfillment( request, @@ -2660,6 +2719,7 @@ def test_update_fulfillment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py index 68784c7734cf..04a862183c28 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_generators.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2.types import generator from google.cloud.dialogflow_v2.types import generator as gcd_generator +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4429,10 +4479,13 @@ def test_create_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_create_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_create_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_create_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_generator.CreateGeneratorRequest.pb( gcd_generator.CreateGeneratorRequest() ) @@ -4456,6 +4509,7 @@ def test_create_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_generator.Generator() + post_with_metadata.return_value = gcd_generator.Generator(), metadata client.create_generator( request, @@ -4467,6 +4521,7 @@ def test_create_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_generator_rest_bad_request(request_type=generator.GetGeneratorRequest): @@ -4553,10 +4608,13 @@ def test_get_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_get_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_get_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_get_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.GetGeneratorRequest.pb(generator.GetGeneratorRequest()) transcode.return_value = { "method": "post", @@ -4578,6 +4636,7 @@ def test_get_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.Generator() + post_with_metadata.return_value = generator.Generator(), metadata client.get_generator( request, @@ -4589,6 +4648,7 @@ def test_get_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_generators_rest_bad_request(request_type=generator.ListGeneratorsRequest): @@ -4671,10 +4731,13 @@ def test_list_generators_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_list_generators" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_list_generators_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_list_generators" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.ListGeneratorsRequest.pb( generator.ListGeneratorsRequest() ) @@ -4700,6 +4763,7 @@ def test_list_generators_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.ListGeneratorsResponse() + post_with_metadata.return_value = generator.ListGeneratorsResponse(), metadata client.list_generators( request, @@ -4711,6 +4775,7 @@ def test_list_generators_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_generator_rest_bad_request( @@ -5022,10 +5087,13 @@ def test_update_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_update_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_update_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_update_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_generator.UpdateGeneratorRequest.pb( gcd_generator.UpdateGeneratorRequest() ) @@ -5049,6 +5117,7 @@ def test_update_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_generator.Generator() + post_with_metadata.return_value = gcd_generator.Generator(), metadata client.update_generator( request, @@ -5060,6 +5129,7 @@ def test_update_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py index c48b0af2484d..ab6c061cfab8 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_intents.py @@ -76,6 +76,13 @@ from google.cloud.dialogflow_v2.types import intent from google.cloud.dialogflow_v2.types import intent as gcd_intent +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5757,10 +5807,13 @@ def test_list_intents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_list_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_list_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_list_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ListIntentsRequest.pb(intent.ListIntentsRequest()) transcode.return_value = { "method": "post", @@ -5782,6 +5835,7 @@ def test_list_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.ListIntentsResponse() + post_with_metadata.return_value = intent.ListIntentsResponse(), metadata client.list_intents( request, @@ -5793,6 +5847,7 @@ def test_list_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_intent_rest_bad_request(request_type=intent.GetIntentRequest): @@ -5903,10 +5958,13 @@ def test_get_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_get_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_get_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_get_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.GetIntentRequest.pb(intent.GetIntentRequest()) transcode.return_value = { "method": "post", @@ -5928,6 +5986,7 @@ def test_get_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.Intent() + post_with_metadata.return_value = intent.Intent(), metadata client.get_intent( request, @@ -5939,6 +5998,7 @@ def test_get_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_intent_rest_bad_request(request_type=gcd_intent.CreateIntentRequest): @@ -6282,10 +6342,13 @@ def test_create_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_create_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_create_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_create_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_intent.CreateIntentRequest.pb(gcd_intent.CreateIntentRequest()) transcode.return_value = { "method": "post", @@ -6307,6 +6370,7 @@ def test_create_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_intent.Intent() + post_with_metadata.return_value = gcd_intent.Intent(), metadata client.create_intent( request, @@ -6318,6 +6382,7 @@ def test_create_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_intent_rest_bad_request(request_type=gcd_intent.UpdateIntentRequest): @@ -6661,10 +6726,13 @@ def test_update_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_update_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_update_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_update_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_intent.UpdateIntentRequest.pb(gcd_intent.UpdateIntentRequest()) transcode.return_value = { "method": "post", @@ -6686,6 +6754,7 @@ def test_update_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_intent.Intent() + post_with_metadata.return_value = gcd_intent.Intent(), metadata client.update_intent( request, @@ -6697,6 +6766,7 @@ def test_update_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_intent_rest_bad_request(request_type=intent.DeleteIntentRequest): @@ -6878,10 +6948,13 @@ def test_batch_update_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_batch_update_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_batch_update_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_batch_update_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.BatchUpdateIntentsRequest.pb( intent.BatchUpdateIntentsRequest() ) @@ -6905,6 +6978,7 @@ def test_batch_update_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_update_intents( request, @@ -6916,6 +6990,7 @@ def test_batch_update_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_intents_rest_bad_request( @@ -6994,10 +7069,13 @@ def test_batch_delete_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_batch_delete_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_batch_delete_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_batch_delete_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.BatchDeleteIntentsRequest.pb( intent.BatchDeleteIntentsRequest() ) @@ -7021,6 +7099,7 @@ def test_batch_delete_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_intents( request, @@ -7032,6 +7111,7 @@ def test_batch_delete_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py index 22b2c7ff9475..98c4cb62ce3c 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_knowledge_bases.py @@ -64,6 +64,13 @@ from google.cloud.dialogflow_v2.types import knowledge_base as gcd_knowledge_base from google.cloud.dialogflow_v2.types import knowledge_base +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -322,6 +329,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = KnowledgeBasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = KnowledgeBasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4479,10 +4529,14 @@ def test_list_knowledge_bases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_list_knowledge_bases" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_list_knowledge_bases_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_list_knowledge_bases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = knowledge_base.ListKnowledgeBasesRequest.pb( knowledge_base.ListKnowledgeBasesRequest() ) @@ -4508,6 +4562,10 @@ def test_list_knowledge_bases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = knowledge_base.ListKnowledgeBasesResponse() + post_with_metadata.return_value = ( + knowledge_base.ListKnowledgeBasesResponse(), + metadata, + ) client.list_knowledge_bases( request, @@ -4519,6 +4577,7 @@ def test_list_knowledge_bases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_knowledge_base_rest_bad_request( @@ -4607,10 +4666,14 @@ def test_get_knowledge_base_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_get_knowledge_base" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_get_knowledge_base_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_get_knowledge_base" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = knowledge_base.GetKnowledgeBaseRequest.pb( knowledge_base.GetKnowledgeBaseRequest() ) @@ -4636,6 +4699,7 @@ def test_get_knowledge_base_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = knowledge_base.KnowledgeBase() + post_with_metadata.return_value = knowledge_base.KnowledgeBase(), metadata client.get_knowledge_base( request, @@ -4647,6 +4711,7 @@ def test_get_knowledge_base_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_knowledge_base_rest_bad_request( @@ -4809,10 +4874,14 @@ def test_create_knowledge_base_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_create_knowledge_base" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_create_knowledge_base_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_create_knowledge_base" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_knowledge_base.CreateKnowledgeBaseRequest.pb( gcd_knowledge_base.CreateKnowledgeBaseRequest() ) @@ -4838,6 +4907,7 @@ def test_create_knowledge_base_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_knowledge_base.KnowledgeBase() + post_with_metadata.return_value = gcd_knowledge_base.KnowledgeBase(), metadata client.create_knowledge_base( request, @@ -4849,6 +4919,7 @@ def test_create_knowledge_base_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_knowledge_base_rest_bad_request( @@ -5124,10 +5195,14 @@ def test_update_knowledge_base_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_update_knowledge_base" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_update_knowledge_base_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_update_knowledge_base" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_knowledge_base.UpdateKnowledgeBaseRequest.pb( gcd_knowledge_base.UpdateKnowledgeBaseRequest() ) @@ -5153,6 +5228,7 @@ def test_update_knowledge_base_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_knowledge_base.KnowledgeBase() + post_with_metadata.return_value = gcd_knowledge_base.KnowledgeBase(), metadata client.update_knowledge_base( request, @@ -5164,6 +5240,7 @@ def test_update_knowledge_base_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py index 182d10d6f569..5d04eb382a92 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_participants.py @@ -68,6 +68,13 @@ from google.cloud.dialogflow_v2.types import participant as gcd_participant from google.cloud.dialogflow_v2.types import session, session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +318,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ParticipantsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ParticipantsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6905,10 +6955,13 @@ def test_create_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_create_participant" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_create_participant_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_create_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_participant.CreateParticipantRequest.pb( gcd_participant.CreateParticipantRequest() ) @@ -6934,6 +6987,7 @@ def test_create_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_participant.Participant() + post_with_metadata.return_value = gcd_participant.Participant(), metadata client.create_participant( request, @@ -6945,6 +6999,7 @@ def test_create_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_participant_rest_bad_request( @@ -7039,10 +7094,13 @@ def test_get_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_get_participant" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_get_participant_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_get_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.GetParticipantRequest.pb( participant.GetParticipantRequest() ) @@ -7066,6 +7124,7 @@ def test_get_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.Participant() + post_with_metadata.return_value = participant.Participant(), metadata client.get_participant( request, @@ -7077,6 +7136,7 @@ def test_get_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_participants_rest_bad_request( @@ -7161,10 +7221,13 @@ def test_list_participants_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_list_participants" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_list_participants_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_list_participants" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.ListParticipantsRequest.pb( participant.ListParticipantsRequest() ) @@ -7190,6 +7253,10 @@ def test_list_participants_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.ListParticipantsResponse() + post_with_metadata.return_value = ( + participant.ListParticipantsResponse(), + metadata, + ) client.list_participants( request, @@ -7201,6 +7268,7 @@ def test_list_participants_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_participant_rest_bad_request( @@ -7373,10 +7441,13 @@ def test_update_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_update_participant" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_update_participant_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_update_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_participant.UpdateParticipantRequest.pb( gcd_participant.UpdateParticipantRequest() ) @@ -7402,6 +7473,7 @@ def test_update_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_participant.Participant() + post_with_metadata.return_value = gcd_participant.Participant(), metadata client.update_participant( request, @@ -7413,6 +7485,7 @@ def test_update_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_content_rest_bad_request( @@ -7501,10 +7574,13 @@ def test_analyze_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_analyze_content" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_analyze_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_analyze_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_participant.AnalyzeContentRequest.pb( gcd_participant.AnalyzeContentRequest() ) @@ -7530,6 +7606,10 @@ def test_analyze_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_participant.AnalyzeContentResponse() + post_with_metadata.return_value = ( + gcd_participant.AnalyzeContentResponse(), + metadata, + ) client.analyze_content( request, @@ -7541,6 +7621,7 @@ def test_analyze_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_analyze_content_rest_error(): @@ -7643,10 +7724,13 @@ def test_suggest_articles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_articles" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_suggest_articles_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_articles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestArticlesRequest.pb( participant.SuggestArticlesRequest() ) @@ -7672,6 +7756,10 @@ def test_suggest_articles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestArticlesResponse() + post_with_metadata.return_value = ( + participant.SuggestArticlesResponse(), + metadata, + ) client.suggest_articles( request, @@ -7683,6 +7771,7 @@ def test_suggest_articles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_faq_answers_rest_bad_request( @@ -7773,10 +7862,13 @@ def test_suggest_faq_answers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_faq_answers" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_suggest_faq_answers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_faq_answers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestFaqAnswersRequest.pb( participant.SuggestFaqAnswersRequest() ) @@ -7802,6 +7894,10 @@ def test_suggest_faq_answers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestFaqAnswersResponse() + post_with_metadata.return_value = ( + participant.SuggestFaqAnswersResponse(), + metadata, + ) client.suggest_faq_answers( request, @@ -7813,6 +7909,7 @@ def test_suggest_faq_answers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_smart_replies_rest_bad_request( @@ -7903,10 +8000,14 @@ def test_suggest_smart_replies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_smart_replies" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, + "post_suggest_smart_replies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_smart_replies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestSmartRepliesRequest.pb( participant.SuggestSmartRepliesRequest() ) @@ -7932,6 +8033,10 @@ def test_suggest_smart_replies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestSmartRepliesResponse() + post_with_metadata.return_value = ( + participant.SuggestSmartRepliesResponse(), + metadata, + ) client.suggest_smart_replies( request, @@ -7943,6 +8048,7 @@ def test_suggest_smart_replies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_knowledge_assist_rest_bad_request( @@ -8033,10 +8139,14 @@ def test_suggest_knowledge_assist_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_knowledge_assist" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, + "post_suggest_knowledge_assist_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_knowledge_assist" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestKnowledgeAssistRequest.pb( participant.SuggestKnowledgeAssistRequest() ) @@ -8062,6 +8172,10 @@ def test_suggest_knowledge_assist_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestKnowledgeAssistResponse() + post_with_metadata.return_value = ( + participant.SuggestKnowledgeAssistResponse(), + metadata, + ) client.suggest_knowledge_assist( request, @@ -8073,6 +8187,7 @@ def test_suggest_knowledge_assist_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py index e7f5d4f4c89b..c175daae464d 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_session_entity_types.py @@ -67,6 +67,13 @@ from google.cloud.dialogflow_v2.types import entity_type from google.cloud.dialogflow_v2.types import session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +345,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4579,10 +4629,14 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_list_session_entity_types" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_list_session_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_list_session_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.ListSessionEntityTypesRequest.pb( session_entity_type.ListSessionEntityTypesRequest() ) @@ -4608,6 +4662,10 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.ListSessionEntityTypesResponse() + post_with_metadata.return_value = ( + session_entity_type.ListSessionEntityTypesResponse(), + metadata, + ) client.list_session_entity_types( request, @@ -4619,6 +4677,7 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_entity_type_rest_bad_request( @@ -4712,10 +4771,14 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_get_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_get_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_get_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.GetSessionEntityTypeRequest.pb( session_entity_type.GetSessionEntityTypeRequest() ) @@ -4741,6 +4804,10 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + session_entity_type.SessionEntityType(), + metadata, + ) client.get_session_entity_type( request, @@ -4752,6 +4819,7 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_entity_type_rest_bad_request( @@ -4917,10 +4985,14 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_create_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_create_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_create_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_session_entity_type.CreateSessionEntityTypeRequest.pb( gcd_session_entity_type.CreateSessionEntityTypeRequest() ) @@ -4946,6 +5018,10 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcd_session_entity_type.SessionEntityType(), + metadata, + ) client.create_session_entity_type( request, @@ -4957,6 +5033,7 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_session_entity_type_rest_bad_request( @@ -5130,10 +5207,14 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_update_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_update_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_update_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_session_entity_type.UpdateSessionEntityTypeRequest.pb( gcd_session_entity_type.UpdateSessionEntityTypeRequest() ) @@ -5159,6 +5240,10 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcd_session_entity_type.SessionEntityType(), + metadata, + ) client.update_session_entity_type( request, @@ -5170,6 +5255,7 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_entity_type_rest_bad_request( diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py index abe9ecdfd3a0..c8a4f8479d52 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_sessions.py @@ -68,6 +68,13 @@ from google.cloud.dialogflow_v2.types import session as gcd_session from google.cloud.dialogflow_v2.types import session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2011,10 +2061,13 @@ def test_detect_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_detect_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_detect_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_detect_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_session.DetectIntentRequest.pb( gcd_session.DetectIntentRequest() ) @@ -2040,6 +2093,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.DetectIntentResponse() + post_with_metadata.return_value = gcd_session.DetectIntentResponse(), metadata client.detect_intent( request, @@ -2051,6 +2105,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_detect_intent_rest_error(): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py index ad52cbe58c78..eb40cff19f99 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2/test_versions.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2.types import version from google.cloud.dialogflow_v2.types import version as gcd_version +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4268,10 +4318,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -4295,6 +4348,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.ListVersionsResponse() + post_with_metadata.return_value = version.ListVersionsResponse(), metadata client.list_versions( request, @@ -4306,6 +4360,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): @@ -4392,10 +4447,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -4417,6 +4475,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -4428,6 +4487,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_version_rest_bad_request(request_type=gcd_version.CreateVersionRequest): @@ -4588,10 +4648,13 @@ def test_create_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_create_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_create_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_create_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_version.CreateVersionRequest.pb( gcd_version.CreateVersionRequest() ) @@ -4615,6 +4678,7 @@ def test_create_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_version.Version() + post_with_metadata.return_value = gcd_version.Version(), metadata client.create_version( request, @@ -4626,6 +4690,7 @@ def test_create_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request(request_type=gcd_version.UpdateVersionRequest): @@ -4786,10 +4851,13 @@ def test_update_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_version.UpdateVersionRequest.pb( gcd_version.UpdateVersionRequest() ) @@ -4813,6 +4881,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_version.Version() + post_with_metadata.return_value = gcd_version.Version(), metadata client.update_version( request, @@ -4824,6 +4893,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py index 53f89bf61cbd..fa9ebd851f13 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_agents.py @@ -76,6 +76,13 @@ from google.cloud.dialogflow_v2beta1.types import agent as gcd_agent from google.cloud.dialogflow_v2beta1.types import validation_result +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AgentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6126,10 +6176,13 @@ def test_get_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetAgentRequest.pb(agent.GetAgentRequest()) transcode.return_value = { "method": "post", @@ -6151,6 +6204,7 @@ def test_get_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.Agent() + post_with_metadata.return_value = agent.Agent(), metadata client.get_agent( request, @@ -6162,6 +6216,7 @@ def test_get_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_agent_rest_bad_request(request_type=gcd_agent.SetAgentRequest): @@ -6350,10 +6405,13 @@ def test_set_agent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_set_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_set_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_set_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_agent.SetAgentRequest.pb(gcd_agent.SetAgentRequest()) transcode.return_value = { "method": "post", @@ -6375,6 +6433,7 @@ def test_set_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_agent.Agent() + post_with_metadata.return_value = gcd_agent.Agent(), metadata client.set_agent( request, @@ -6386,6 +6445,7 @@ def test_set_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_agent_rest_bad_request(request_type=agent.DeleteAgentRequest): @@ -6569,10 +6629,13 @@ def test_search_agents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_search_agents" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_search_agents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_search_agents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.SearchAgentsRequest.pb(agent.SearchAgentsRequest()) transcode.return_value = { "method": "post", @@ -6594,6 +6657,7 @@ def test_search_agents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = agent.SearchAgentsResponse() + post_with_metadata.return_value = agent.SearchAgentsResponse(), metadata client.search_agents( request, @@ -6605,6 +6669,7 @@ def test_search_agents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_train_agent_rest_bad_request(request_type=agent.TrainAgentRequest): @@ -6681,10 +6746,13 @@ def test_train_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_train_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_train_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_train_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.TrainAgentRequest.pb(agent.TrainAgentRequest()) transcode.return_value = { "method": "post", @@ -6706,6 +6774,7 @@ def test_train_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_agent( request, @@ -6717,6 +6786,7 @@ def test_train_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_agent_rest_bad_request(request_type=agent.ExportAgentRequest): @@ -6793,10 +6863,13 @@ def test_export_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_export_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_export_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_export_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ExportAgentRequest.pb(agent.ExportAgentRequest()) transcode.return_value = { "method": "post", @@ -6818,6 +6891,7 @@ def test_export_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_agent( request, @@ -6829,6 +6903,7 @@ def test_export_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_agent_rest_bad_request(request_type=agent.ImportAgentRequest): @@ -6905,10 +6980,13 @@ def test_import_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_import_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_import_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_import_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.ImportAgentRequest.pb(agent.ImportAgentRequest()) transcode.return_value = { "method": "post", @@ -6930,6 +7008,7 @@ def test_import_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_agent( request, @@ -6941,6 +7020,7 @@ def test_import_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_agent_rest_bad_request(request_type=agent.RestoreAgentRequest): @@ -7017,10 +7097,13 @@ def test_restore_agent_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AgentsRestInterceptor, "post_restore_agent" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_restore_agent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_restore_agent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.RestoreAgentRequest.pb(agent.RestoreAgentRequest()) transcode.return_value = { "method": "post", @@ -7042,6 +7125,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_agent( request, @@ -7053,6 +7137,7 @@ def test_restore_agent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_validation_result_rest_bad_request( @@ -7132,10 +7217,13 @@ def test_get_validation_result_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AgentsRestInterceptor, "post_get_validation_result" ) as post, mock.patch.object( + transports.AgentsRestInterceptor, "post_get_validation_result_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AgentsRestInterceptor, "pre_get_validation_result" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = agent.GetValidationResultRequest.pb( agent.GetValidationResultRequest() ) @@ -7161,6 +7249,7 @@ def test_get_validation_result_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = validation_result.ValidationResult() + post_with_metadata.return_value = validation_result.ValidationResult(), metadata client.get_validation_result( request, @@ -7172,6 +7261,7 @@ def test_get_validation_result_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py index 361bc2e6d603..d324e2810216 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_answer_records.py @@ -67,6 +67,13 @@ from google.cloud.dialogflow_v2beta1.types import answer_record as gcd_answer_record from google.cloud.dialogflow_v2beta1.types import answer_record +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -321,6 +328,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AnswerRecordsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AnswerRecordsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2961,10 +3011,13 @@ def test_get_answer_record_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnswerRecordsRestInterceptor, "post_get_answer_record" ) as post, mock.patch.object( + transports.AnswerRecordsRestInterceptor, "post_get_answer_record_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AnswerRecordsRestInterceptor, "pre_get_answer_record" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = answer_record.GetAnswerRecordRequest.pb( answer_record.GetAnswerRecordRequest() ) @@ -2988,6 +3041,7 @@ def test_get_answer_record_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = answer_record.AnswerRecord() + post_with_metadata.return_value = answer_record.AnswerRecord(), metadata client.get_answer_record( request, @@ -2999,6 +3053,7 @@ def test_get_answer_record_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_answer_records_rest_bad_request( @@ -3083,10 +3138,14 @@ def test_list_answer_records_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnswerRecordsRestInterceptor, "post_list_answer_records" ) as post, mock.patch.object( + transports.AnswerRecordsRestInterceptor, + "post_list_answer_records_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnswerRecordsRestInterceptor, "pre_list_answer_records" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = answer_record.ListAnswerRecordsRequest.pb( answer_record.ListAnswerRecordsRequest() ) @@ -3112,6 +3171,10 @@ def test_list_answer_records_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = answer_record.ListAnswerRecordsResponse() + post_with_metadata.return_value = ( + answer_record.ListAnswerRecordsResponse(), + metadata, + ) client.list_answer_records( request, @@ -3123,6 +3186,7 @@ def test_list_answer_records_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_answer_record_rest_bad_request( @@ -3591,10 +3655,14 @@ def test_update_answer_record_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AnswerRecordsRestInterceptor, "post_update_answer_record" ) as post, mock.patch.object( + transports.AnswerRecordsRestInterceptor, + "post_update_answer_record_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AnswerRecordsRestInterceptor, "pre_update_answer_record" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_answer_record.UpdateAnswerRecordRequest.pb( gcd_answer_record.UpdateAnswerRecordRequest() ) @@ -3620,6 +3688,7 @@ def test_update_answer_record_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_answer_record.AnswerRecord() + post_with_metadata.return_value = gcd_answer_record.AnswerRecord(), metadata client.update_answer_record( request, @@ -3631,6 +3700,7 @@ def test_update_answer_record_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py index 02852788f15c..9af8343f998e 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_contexts.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2beta1.types import context from google.cloud.dialogflow_v2beta1.types import context as gcd_context +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ContextsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ContextsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4799,10 +4849,13 @@ def test_list_contexts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_list_contexts" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_list_contexts_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_list_contexts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = context.ListContextsRequest.pb(context.ListContextsRequest()) transcode.return_value = { "method": "post", @@ -4826,6 +4879,7 @@ def test_list_contexts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = context.ListContextsResponse() + post_with_metadata.return_value = context.ListContextsResponse(), metadata client.list_contexts( request, @@ -4837,6 +4891,7 @@ def test_list_contexts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_context_rest_bad_request(request_type=context.GetContextRequest): @@ -4919,10 +4974,13 @@ def test_get_context_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_get_context" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_get_context_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_get_context" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = context.GetContextRequest.pb(context.GetContextRequest()) transcode.return_value = { "method": "post", @@ -4944,6 +5002,7 @@ def test_get_context_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = context.Context() + post_with_metadata.return_value = context.Context(), metadata client.get_context( request, @@ -4955,6 +5014,7 @@ def test_get_context_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_context_rest_bad_request(request_type=gcd_context.CreateContextRequest): @@ -5109,10 +5169,13 @@ def test_create_context_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_create_context" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_create_context_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_create_context" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_context.CreateContextRequest.pb( gcd_context.CreateContextRequest() ) @@ -5136,6 +5199,7 @@ def test_create_context_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_context.Context() + post_with_metadata.return_value = gcd_context.Context(), metadata client.create_context( request, @@ -5147,6 +5211,7 @@ def test_create_context_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_context_rest_bad_request(request_type=gcd_context.UpdateContextRequest): @@ -5305,10 +5370,13 @@ def test_update_context_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ContextsRestInterceptor, "post_update_context" ) as post, mock.patch.object( + transports.ContextsRestInterceptor, "post_update_context_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ContextsRestInterceptor, "pre_update_context" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_context.UpdateContextRequest.pb( gcd_context.UpdateContextRequest() ) @@ -5332,6 +5400,7 @@ def test_update_context_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_context.Context() + post_with_metadata.return_value = gcd_context.Context(), metadata client.update_context( request, @@ -5343,6 +5412,7 @@ def test_update_context_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_context_rest_bad_request(request_type=context.DeleteContextRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py index 82bde4e771fb..b5c9d69273f4 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversation_profiles.py @@ -79,6 +79,13 @@ from google.cloud.dialogflow_v2beta1.types import conversation_profile from google.cloud.dialogflow_v2beta1.types import participant +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationProfilesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationProfilesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5911,10 +5961,14 @@ def test_list_conversation_profiles_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_list_conversation_profiles", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_list_conversation_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_list_conversation_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_profile.ListConversationProfilesRequest.pb( conversation_profile.ListConversationProfilesRequest() ) @@ -5940,6 +5994,10 @@ def test_list_conversation_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_profile.ListConversationProfilesResponse() + post_with_metadata.return_value = ( + conversation_profile.ListConversationProfilesResponse(), + metadata, + ) client.list_conversation_profiles( request, @@ -5951,6 +6009,7 @@ def test_list_conversation_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_profile_rest_bad_request( @@ -6043,10 +6102,14 @@ def test_get_conversation_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationProfilesRestInterceptor, "post_get_conversation_profile" ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_get_conversation_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_get_conversation_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation_profile.GetConversationProfileRequest.pb( conversation_profile.GetConversationProfileRequest() ) @@ -6072,6 +6135,10 @@ def test_get_conversation_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation_profile.ConversationProfile() + post_with_metadata.return_value = ( + conversation_profile.ConversationProfile(), + metadata, + ) client.get_conversation_profile( request, @@ -6083,6 +6150,7 @@ def test_get_conversation_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversation_profile_rest_bad_request( @@ -6201,6 +6269,7 @@ def test_create_conversation_profile_rest_call_success(request_type): "notification_config": {}, "logging_config": {"enable_stackdriver_logging": True}, "new_message_event_notification_config": {}, + "new_recognition_result_notification_config": {}, "stt_config": { "speech_model_variant": 1, "model": "model_value", @@ -6348,11 +6417,15 @@ def test_create_conversation_profile_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_create_conversation_profile", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_create_conversation_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_create_conversation_profile", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.CreateConversationProfileRequest.pb( gcd_conversation_profile.CreateConversationProfileRequest() ) @@ -6378,6 +6451,10 @@ def test_create_conversation_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation_profile.ConversationProfile() + post_with_metadata.return_value = ( + gcd_conversation_profile.ConversationProfile(), + metadata, + ) client.create_conversation_profile( request, @@ -6389,6 +6466,7 @@ def test_create_conversation_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_conversation_profile_rest_bad_request( @@ -6515,6 +6593,7 @@ def test_update_conversation_profile_rest_call_success(request_type): "notification_config": {}, "logging_config": {"enable_stackdriver_logging": True}, "new_message_event_notification_config": {}, + "new_recognition_result_notification_config": {}, "stt_config": { "speech_model_variant": 1, "model": "model_value", @@ -6662,11 +6741,15 @@ def test_update_conversation_profile_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_update_conversation_profile", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_update_conversation_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_update_conversation_profile", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.UpdateConversationProfileRequest.pb( gcd_conversation_profile.UpdateConversationProfileRequest() ) @@ -6692,6 +6775,10 @@ def test_update_conversation_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation_profile.ConversationProfile() + post_with_metadata.return_value = ( + gcd_conversation_profile.ConversationProfile(), + metadata, + ) client.update_conversation_profile( request, @@ -6703,6 +6790,7 @@ def test_update_conversation_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_profile_rest_bad_request( @@ -6898,11 +6986,15 @@ def test_set_suggestion_feature_config_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_set_suggestion_feature_config", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_set_suggestion_feature_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_set_suggestion_feature_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.SetSuggestionFeatureConfigRequest.pb( gcd_conversation_profile.SetSuggestionFeatureConfigRequest() ) @@ -6926,6 +7018,7 @@ def test_set_suggestion_feature_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.set_suggestion_feature_config( request, @@ -6937,6 +7030,7 @@ def test_set_suggestion_feature_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_clear_suggestion_feature_config_rest_bad_request( @@ -7022,11 +7116,15 @@ def test_clear_suggestion_feature_config_rest_interceptors(null_interceptor): transports.ConversationProfilesRestInterceptor, "post_clear_suggestion_feature_config", ) as post, mock.patch.object( + transports.ConversationProfilesRestInterceptor, + "post_clear_suggestion_feature_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationProfilesRestInterceptor, "pre_clear_suggestion_feature_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation_profile.ClearSuggestionFeatureConfigRequest.pb( gcd_conversation_profile.ClearSuggestionFeatureConfigRequest() ) @@ -7050,6 +7148,7 @@ def test_clear_suggestion_feature_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.clear_suggestion_feature_config( request, @@ -7061,6 +7160,7 @@ def test_clear_suggestion_feature_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py index ca787759610e..11cd6ea51be9 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_conversations.py @@ -73,6 +73,13 @@ from google.cloud.dialogflow_v2beta1.types import audio_config from google.cloud.dialogflow_v2beta1.types import conversation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -327,6 +334,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7234,10 +7284,18 @@ def test_create_conversation_rest_call_success(request_type): "name": "name_value", "lifecycle_state": 1, "conversation_profile": "conversation_profile_value", - "phone_number": {"phone_number": "phone_number_value"}, + "phone_number": {"country_code": 1294, "phone_number": "phone_number_value"}, "conversation_stage": 1, "start_time": {"seconds": 751, "nanos": 543}, "end_time": {}, + "telephony_connection_info": { + "dialed_number": "dialed_number_value", + "sdp": "sdp_value", + "sip_headers": [{"name": "name_value", "value": "value_value"}], + "extra_mime_contents": [ + {"mime_type": "mime_type_value", "content": b"content_blob"} + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -7361,10 +7419,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_create_conversation" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_create_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_create_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation.CreateConversationRequest.pb( gcd_conversation.CreateConversationRequest() ) @@ -7390,6 +7452,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.create_conversation( request, @@ -7401,6 +7464,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversations_rest_bad_request( @@ -7485,10 +7549,13 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_list_conversations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.ListConversationsRequest.pb( conversation.ListConversationsRequest() ) @@ -7514,6 +7581,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.ListConversationsResponse() + post_with_metadata.return_value = ( + conversation.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -7525,6 +7596,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -7620,10 +7692,13 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_get_conversation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.GetConversationRequest.pb( conversation.GetConversationRequest() ) @@ -7647,6 +7722,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.get_conversation( request, @@ -7658,6 +7734,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_complete_conversation_rest_bad_request( @@ -7753,10 +7830,14 @@ def test_complete_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_complete_conversation" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_complete_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_complete_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.CompleteConversationRequest.pb( conversation.CompleteConversationRequest() ) @@ -7780,6 +7861,7 @@ def test_complete_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.complete_conversation( request, @@ -7791,6 +7873,7 @@ def test_complete_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_messages_rest_bad_request( @@ -7872,10 +7955,14 @@ def test_batch_create_messages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_batch_create_messages" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_batch_create_messages_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_batch_create_messages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.BatchCreateMessagesRequest.pb( conversation.BatchCreateMessagesRequest() ) @@ -7901,6 +7988,10 @@ def test_batch_create_messages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.BatchCreateMessagesResponse() + post_with_metadata.return_value = ( + conversation.BatchCreateMessagesResponse(), + metadata, + ) client.batch_create_messages( request, @@ -7912,6 +8003,7 @@ def test_batch_create_messages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_messages_rest_bad_request(request_type=conversation.ListMessagesRequest): @@ -7994,10 +8086,13 @@ def test_list_messages_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_list_messages" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_list_messages_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_list_messages" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.ListMessagesRequest.pb( conversation.ListMessagesRequest() ) @@ -8023,6 +8118,7 @@ def test_list_messages_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.ListMessagesResponse() + post_with_metadata.return_value = conversation.ListMessagesResponse(), metadata client.list_messages( request, @@ -8034,6 +8130,7 @@ def test_list_messages_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_conversation_summary_rest_bad_request( @@ -8122,10 +8219,14 @@ def test_suggest_conversation_summary_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_suggest_conversation_summary" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_suggest_conversation_summary_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_suggest_conversation_summary" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_conversation.SuggestConversationSummaryRequest.pb( gcd_conversation.SuggestConversationSummaryRequest() ) @@ -8151,6 +8252,10 @@ def test_suggest_conversation_summary_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.SuggestConversationSummaryResponse() + post_with_metadata.return_value = ( + gcd_conversation.SuggestConversationSummaryResponse(), + metadata, + ) client.suggest_conversation_summary( request, @@ -8162,6 +8267,7 @@ def test_suggest_conversation_summary_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_stateless_summary_rest_bad_request( @@ -8248,10 +8354,14 @@ def test_generate_stateless_summary_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_generate_stateless_summary" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_generate_stateless_summary_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_generate_stateless_summary" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.GenerateStatelessSummaryRequest.pb( conversation.GenerateStatelessSummaryRequest() ) @@ -8277,6 +8387,10 @@ def test_generate_stateless_summary_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.GenerateStatelessSummaryResponse() + post_with_metadata.return_value = ( + conversation.GenerateStatelessSummaryResponse(), + metadata, + ) client.generate_stateless_summary( request, @@ -8288,6 +8402,7 @@ def test_generate_stateless_summary_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_stateless_suggestion_rest_bad_request( @@ -8369,10 +8484,14 @@ def test_generate_stateless_suggestion_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_generate_stateless_suggestion" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, + "post_generate_stateless_suggestion_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_generate_stateless_suggestion" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.GenerateStatelessSuggestionRequest.pb( conversation.GenerateStatelessSuggestionRequest() ) @@ -8398,6 +8517,10 @@ def test_generate_stateless_suggestion_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.GenerateStatelessSuggestionResponse() + post_with_metadata.return_value = ( + conversation.GenerateStatelessSuggestionResponse(), + metadata, + ) client.generate_stateless_suggestion( request, @@ -8409,6 +8532,7 @@ def test_generate_stateless_suggestion_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_knowledge_rest_bad_request( @@ -8493,10 +8617,13 @@ def test_search_knowledge_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationsRestInterceptor, "post_search_knowledge" ) as post, mock.patch.object( + transports.ConversationsRestInterceptor, "post_search_knowledge_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ConversationsRestInterceptor, "pre_search_knowledge" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversation.SearchKnowledgeRequest.pb( conversation.SearchKnowledgeRequest() ) @@ -8522,6 +8649,10 @@ def test_search_knowledge_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.SearchKnowledgeResponse() + post_with_metadata.return_value = ( + conversation.SearchKnowledgeResponse(), + metadata, + ) client.search_knowledge( request, @@ -8533,6 +8664,7 @@ def test_search_knowledge_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py index 7e78424a9571..5552bc95547c 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_documents.py @@ -78,6 +78,13 @@ from google.cloud.dialogflow_v2beta1.types import document as gcd_document from google.cloud.dialogflow_v2beta1.types import gcs +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +318,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5307,10 +5357,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentsRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ListDocumentsRequest.pb(document.ListDocumentsRequest()) transcode.return_value = { "method": "post", @@ -5334,6 +5387,7 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.ListDocumentsResponse() + post_with_metadata.return_value = document.ListDocumentsResponse(), metadata client.list_documents( request, @@ -5345,6 +5399,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_rest_bad_request(request_type=document.GetDocumentRequest): @@ -5436,10 +5491,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentsRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.GetDocumentRequest.pb(document.GetDocumentRequest()) transcode.return_value = { "method": "post", @@ -5461,6 +5519,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.get_document( request, @@ -5472,6 +5531,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request( @@ -5642,10 +5702,13 @@ def test_create_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_document.CreateDocumentRequest.pb( gcd_document.CreateDocumentRequest() ) @@ -5669,6 +5732,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_document( request, @@ -5680,6 +5744,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_documents_rest_bad_request( @@ -5758,10 +5823,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ImportDocumentsRequest.pb( document.ImportDocumentsRequest() ) @@ -5785,6 +5853,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -5796,6 +5865,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request(request_type=document.DeleteDocumentRequest): @@ -5872,10 +5942,13 @@ def test_delete_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_delete_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_delete_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_delete_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.DeleteDocumentRequest.pb(document.DeleteDocumentRequest()) transcode.return_value = { "method": "post", @@ -5897,6 +5970,7 @@ def test_delete_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_document( request, @@ -5908,6 +5982,7 @@ def test_delete_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -6086,10 +6161,13 @@ def test_update_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_document.UpdateDocumentRequest.pb( gcd_document.UpdateDocumentRequest() ) @@ -6113,6 +6191,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_document( request, @@ -6124,6 +6203,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reload_document_rest_bad_request(request_type=document.ReloadDocumentRequest): @@ -6200,10 +6280,13 @@ def test_reload_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentsRestInterceptor, "post_reload_document" ) as post, mock.patch.object( + transports.DocumentsRestInterceptor, "post_reload_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentsRestInterceptor, "pre_reload_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document.ReloadDocumentRequest.pb(document.ReloadDocumentRequest()) transcode.return_value = { "method": "post", @@ -6225,6 +6308,7 @@ def test_reload_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reload_document( request, @@ -6236,6 +6320,7 @@ def test_reload_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py index 126c4db5804a..7787b74656bb 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_encryption_spec_service.py @@ -71,6 +71,13 @@ from google.cloud.dialogflow_v2beta1.types import encryption_spec as gcd_encryption_spec from google.cloud.dialogflow_v2beta1.types import encryption_spec +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EncryptionSpecServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EncryptionSpecServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2503,10 +2553,14 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EncryptionSpecServiceRestInterceptor, "post_get_encryption_spec" ) as post, mock.patch.object( + transports.EncryptionSpecServiceRestInterceptor, + "post_get_encryption_spec_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EncryptionSpecServiceRestInterceptor, "pre_get_encryption_spec" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = encryption_spec.GetEncryptionSpecRequest.pb( encryption_spec.GetEncryptionSpecRequest() ) @@ -2532,6 +2586,7 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = encryption_spec.EncryptionSpec() + post_with_metadata.return_value = encryption_spec.EncryptionSpec(), metadata client.get_encryption_spec( request, @@ -2543,6 +2598,7 @@ def test_get_encryption_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_encryption_spec_rest_bad_request( @@ -2628,11 +2684,15 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): transports.EncryptionSpecServiceRestInterceptor, "post_initialize_encryption_spec", ) as post, mock.patch.object( + transports.EncryptionSpecServiceRestInterceptor, + "post_initialize_encryption_spec_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EncryptionSpecServiceRestInterceptor, "pre_initialize_encryption_spec", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_encryption_spec.InitializeEncryptionSpecRequest.pb( gcd_encryption_spec.InitializeEncryptionSpecRequest() ) @@ -2656,6 +2716,7 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.initialize_encryption_spec( request, @@ -2667,6 +2728,7 @@ def test_initialize_encryption_spec_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py index 09275e12c7e1..cded7d537d63 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_entity_types.py @@ -75,6 +75,13 @@ from google.cloud.dialogflow_v2beta1.types import entity_type as gcd_entity_type from google.cloud.dialogflow_v2beta1.types import entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -314,6 +321,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7427,10 +7477,13 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_list_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_list_entity_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_list_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.ListEntityTypesRequest.pb( entity_type.ListEntityTypesRequest() ) @@ -7456,6 +7509,10 @@ def test_list_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.ListEntityTypesResponse() + post_with_metadata.return_value = ( + entity_type.ListEntityTypesResponse(), + metadata, + ) client.list_entity_types( request, @@ -7467,6 +7524,7 @@ def test_list_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_entity_type_rest_bad_request( @@ -7562,10 +7620,13 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_get_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_get_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_get_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.GetEntityTypeRequest.pb( entity_type.GetEntityTypeRequest() ) @@ -7589,6 +7650,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = entity_type.EntityType() + post_with_metadata.return_value = entity_type.EntityType(), metadata client.get_entity_type( request, @@ -7600,6 +7662,7 @@ def test_get_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_entity_type_rest_bad_request( @@ -7772,10 +7835,13 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_create_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_create_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_create_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_entity_type.CreateEntityTypeRequest.pb( gcd_entity_type.CreateEntityTypeRequest() ) @@ -7799,6 +7865,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_entity_type.EntityType() + post_with_metadata.return_value = gcd_entity_type.EntityType(), metadata client.create_entity_type( request, @@ -7810,6 +7877,7 @@ def test_create_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_entity_type_rest_bad_request( @@ -7986,10 +8054,13 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EntityTypesRestInterceptor, "post_update_entity_type" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, "post_update_entity_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_update_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_entity_type.UpdateEntityTypeRequest.pb( gcd_entity_type.UpdateEntityTypeRequest() ) @@ -8013,6 +8084,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_entity_type.EntityType() + post_with_metadata.return_value = gcd_entity_type.EntityType(), metadata client.update_entity_type( request, @@ -8024,6 +8096,7 @@ def test_update_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_entity_type_rest_bad_request( @@ -8213,10 +8286,14 @@ def test_batch_update_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_update_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_update_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_update_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchUpdateEntityTypesRequest.pb( entity_type.BatchUpdateEntityTypesRequest() ) @@ -8240,6 +8317,7 @@ def test_batch_update_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_update_entity_types( request, @@ -8251,6 +8329,7 @@ def test_batch_update_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_entity_types_rest_bad_request( @@ -8331,10 +8410,14 @@ def test_batch_delete_entity_types_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_delete_entity_types" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_delete_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_delete_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchDeleteEntityTypesRequest.pb( entity_type.BatchDeleteEntityTypesRequest() ) @@ -8358,6 +8441,7 @@ def test_batch_delete_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_entity_types( request, @@ -8369,6 +8453,7 @@ def test_batch_delete_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_entities_rest_bad_request( @@ -8449,10 +8534,14 @@ def test_batch_create_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_create_entities" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_create_entities_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_create_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchCreateEntitiesRequest.pb( entity_type.BatchCreateEntitiesRequest() ) @@ -8476,6 +8565,7 @@ def test_batch_create_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_entities( request, @@ -8487,6 +8577,7 @@ def test_batch_create_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_entities_rest_bad_request( @@ -8567,10 +8658,14 @@ def test_batch_update_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_update_entities" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_update_entities_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_update_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchUpdateEntitiesRequest.pb( entity_type.BatchUpdateEntitiesRequest() ) @@ -8594,6 +8689,7 @@ def test_batch_update_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_update_entities( request, @@ -8605,6 +8701,7 @@ def test_batch_update_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_entities_rest_bad_request( @@ -8685,10 +8782,14 @@ def test_batch_delete_entities_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EntityTypesRestInterceptor, "post_batch_delete_entities" ) as post, mock.patch.object( + transports.EntityTypesRestInterceptor, + "post_batch_delete_entities_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EntityTypesRestInterceptor, "pre_batch_delete_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = entity_type.BatchDeleteEntitiesRequest.pb( entity_type.BatchDeleteEntitiesRequest() ) @@ -8712,6 +8813,7 @@ def test_batch_delete_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_entities( request, @@ -8723,6 +8825,7 @@ def test_batch_delete_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py index 9ef7d59406e8..305987656d2e 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_environments.py @@ -64,6 +64,13 @@ ) from google.cloud.dialogflow_v2beta1.types import audio_config, environment, fulfillment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -307,6 +314,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EnvironmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4596,10 +4646,13 @@ def test_list_environments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_list_environments" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_list_environments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_list_environments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.ListEnvironmentsRequest.pb( environment.ListEnvironmentsRequest() ) @@ -4625,6 +4678,10 @@ def test_list_environments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.ListEnvironmentsResponse() + post_with_metadata.return_value = ( + environment.ListEnvironmentsResponse(), + metadata, + ) client.list_environments( request, @@ -4636,6 +4693,7 @@ def test_list_environments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_environment_rest_bad_request( @@ -4726,10 +4784,13 @@ def test_get_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_get_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_get_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_get_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.GetEnvironmentRequest.pb( environment.GetEnvironmentRequest() ) @@ -4753,6 +4814,7 @@ def test_get_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.get_environment( request, @@ -4764,6 +4826,7 @@ def test_get_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_environment_rest_bad_request( @@ -4947,10 +5010,13 @@ def test_create_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_create_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_create_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_create_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.CreateEnvironmentRequest.pb( environment.CreateEnvironmentRequest() ) @@ -4974,6 +5040,7 @@ def test_create_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.create_environment( request, @@ -4985,6 +5052,7 @@ def test_create_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_environment_rest_bad_request( @@ -5172,10 +5240,13 @@ def test_update_environment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_update_environment" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, "post_update_environment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_update_environment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.UpdateEnvironmentRequest.pb( environment.UpdateEnvironmentRequest() ) @@ -5199,6 +5270,7 @@ def test_update_environment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.Environment() + post_with_metadata.return_value = environment.Environment(), metadata client.update_environment( request, @@ -5210,6 +5282,7 @@ def test_update_environment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_environment_rest_bad_request( @@ -5405,10 +5478,14 @@ def test_get_environment_history_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnvironmentsRestInterceptor, "post_get_environment_history" ) as post, mock.patch.object( + transports.EnvironmentsRestInterceptor, + "post_get_environment_history_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnvironmentsRestInterceptor, "pre_get_environment_history" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = environment.GetEnvironmentHistoryRequest.pb( environment.GetEnvironmentHistoryRequest() ) @@ -5434,6 +5511,7 @@ def test_get_environment_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = environment.EnvironmentHistory() + post_with_metadata.return_value = environment.EnvironmentHistory(), metadata client.get_environment_history( request, @@ -5445,6 +5523,7 @@ def test_get_environment_history_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py index ca767709f12f..85e3c7a739cb 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_fulfillments.py @@ -63,6 +63,13 @@ from google.cloud.dialogflow_v2beta1.types import fulfillment as gcd_fulfillment from google.cloud.dialogflow_v2beta1.types import fulfillment +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -306,6 +313,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FulfillmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FulfillmentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2414,10 +2464,13 @@ def test_get_fulfillment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FulfillmentsRestInterceptor, "post_get_fulfillment" ) as post, mock.patch.object( + transports.FulfillmentsRestInterceptor, "post_get_fulfillment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FulfillmentsRestInterceptor, "pre_get_fulfillment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = fulfillment.GetFulfillmentRequest.pb( fulfillment.GetFulfillmentRequest() ) @@ -2441,6 +2494,7 @@ def test_get_fulfillment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = fulfillment.Fulfillment() + post_with_metadata.return_value = fulfillment.Fulfillment(), metadata client.get_fulfillment( request, @@ -2452,6 +2506,7 @@ def test_get_fulfillment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_fulfillment_rest_bad_request( @@ -2620,10 +2675,13 @@ def test_update_fulfillment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FulfillmentsRestInterceptor, "post_update_fulfillment" ) as post, mock.patch.object( + transports.FulfillmentsRestInterceptor, "post_update_fulfillment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FulfillmentsRestInterceptor, "pre_update_fulfillment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_fulfillment.UpdateFulfillmentRequest.pb( gcd_fulfillment.UpdateFulfillmentRequest() ) @@ -2649,6 +2707,7 @@ def test_update_fulfillment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_fulfillment.Fulfillment() + post_with_metadata.return_value = gcd_fulfillment.Fulfillment(), metadata client.update_fulfillment( request, @@ -2660,6 +2719,7 @@ def test_update_fulfillment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py index bb6f49b7303b..0ab7e7dd2f36 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_generators.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2beta1.types import generator from google.cloud.dialogflow_v2beta1.types import generator as gcd_generator +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GeneratorsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4433,10 +4483,13 @@ def test_create_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_create_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_create_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_create_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_generator.CreateGeneratorRequest.pb( gcd_generator.CreateGeneratorRequest() ) @@ -4460,6 +4513,7 @@ def test_create_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_generator.Generator() + post_with_metadata.return_value = gcd_generator.Generator(), metadata client.create_generator( request, @@ -4471,6 +4525,7 @@ def test_create_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_generator_rest_bad_request(request_type=generator.GetGeneratorRequest): @@ -4557,10 +4612,13 @@ def test_get_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_get_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_get_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_get_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.GetGeneratorRequest.pb(generator.GetGeneratorRequest()) transcode.return_value = { "method": "post", @@ -4582,6 +4640,7 @@ def test_get_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.Generator() + post_with_metadata.return_value = generator.Generator(), metadata client.get_generator( request, @@ -4593,6 +4652,7 @@ def test_get_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_generators_rest_bad_request(request_type=generator.ListGeneratorsRequest): @@ -4675,10 +4735,13 @@ def test_list_generators_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_list_generators" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_list_generators_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_list_generators" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = generator.ListGeneratorsRequest.pb( generator.ListGeneratorsRequest() ) @@ -4704,6 +4767,7 @@ def test_list_generators_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = generator.ListGeneratorsResponse() + post_with_metadata.return_value = generator.ListGeneratorsResponse(), metadata client.list_generators( request, @@ -4715,6 +4779,7 @@ def test_list_generators_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_generator_rest_bad_request( @@ -5026,10 +5091,13 @@ def test_update_generator_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GeneratorsRestInterceptor, "post_update_generator" ) as post, mock.patch.object( + transports.GeneratorsRestInterceptor, "post_update_generator_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GeneratorsRestInterceptor, "pre_update_generator" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_generator.UpdateGeneratorRequest.pb( gcd_generator.UpdateGeneratorRequest() ) @@ -5053,6 +5121,7 @@ def test_update_generator_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_generator.Generator() + post_with_metadata.return_value = gcd_generator.Generator(), metadata client.update_generator( request, @@ -5064,6 +5133,7 @@ def test_update_generator_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py index 2f51458f28ea..877a0b69bb1f 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_intents.py @@ -76,6 +76,13 @@ from google.cloud.dialogflow_v2beta1.types import intent from google.cloud.dialogflow_v2beta1.types import intent as gcd_intent +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +306,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IntentsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5777,10 +5827,13 @@ def test_list_intents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_list_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_list_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_list_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.ListIntentsRequest.pb(intent.ListIntentsRequest()) transcode.return_value = { "method": "post", @@ -5802,6 +5855,7 @@ def test_list_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.ListIntentsResponse() + post_with_metadata.return_value = intent.ListIntentsResponse(), metadata client.list_intents( request, @@ -5813,6 +5867,7 @@ def test_list_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_intent_rest_bad_request(request_type=intent.GetIntentRequest): @@ -5925,10 +5980,13 @@ def test_get_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_get_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_get_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_get_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.GetIntentRequest.pb(intent.GetIntentRequest()) transcode.return_value = { "method": "post", @@ -5950,6 +6008,7 @@ def test_get_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = intent.Intent() + post_with_metadata.return_value = intent.Intent(), metadata client.get_intent( request, @@ -5961,6 +6020,7 @@ def test_get_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_intent_rest_bad_request(request_type=gcd_intent.CreateIntentRequest): @@ -6346,10 +6406,13 @@ def test_create_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_create_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_create_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_create_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_intent.CreateIntentRequest.pb(gcd_intent.CreateIntentRequest()) transcode.return_value = { "method": "post", @@ -6371,6 +6434,7 @@ def test_create_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_intent.Intent() + post_with_metadata.return_value = gcd_intent.Intent(), metadata client.create_intent( request, @@ -6382,6 +6446,7 @@ def test_create_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_intent_rest_bad_request(request_type=gcd_intent.UpdateIntentRequest): @@ -6767,10 +6832,13 @@ def test_update_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IntentsRestInterceptor, "post_update_intent" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_update_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_update_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_intent.UpdateIntentRequest.pb(gcd_intent.UpdateIntentRequest()) transcode.return_value = { "method": "post", @@ -6792,6 +6860,7 @@ def test_update_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_intent.Intent() + post_with_metadata.return_value = gcd_intent.Intent(), metadata client.update_intent( request, @@ -6803,6 +6872,7 @@ def test_update_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_intent_rest_bad_request(request_type=intent.DeleteIntentRequest): @@ -6984,10 +7054,13 @@ def test_batch_update_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_batch_update_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_batch_update_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_batch_update_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.BatchUpdateIntentsRequest.pb( intent.BatchUpdateIntentsRequest() ) @@ -7011,6 +7084,7 @@ def test_batch_update_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_update_intents( request, @@ -7022,6 +7096,7 @@ def test_batch_update_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_intents_rest_bad_request( @@ -7100,10 +7175,13 @@ def test_batch_delete_intents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IntentsRestInterceptor, "post_batch_delete_intents" ) as post, mock.patch.object( + transports.IntentsRestInterceptor, "post_batch_delete_intents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IntentsRestInterceptor, "pre_batch_delete_intents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = intent.BatchDeleteIntentsRequest.pb( intent.BatchDeleteIntentsRequest() ) @@ -7127,6 +7205,7 @@ def test_batch_delete_intents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_intents( request, @@ -7138,6 +7217,7 @@ def test_batch_delete_intents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py index 1c756113b71e..52096cf2ec0c 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_knowledge_bases.py @@ -64,6 +64,13 @@ from google.cloud.dialogflow_v2beta1.types import knowledge_base as gcd_knowledge_base from google.cloud.dialogflow_v2beta1.types import knowledge_base +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -322,6 +329,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = KnowledgeBasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = KnowledgeBasesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4483,10 +4533,14 @@ def test_list_knowledge_bases_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_list_knowledge_bases" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_list_knowledge_bases_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_list_knowledge_bases" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = knowledge_base.ListKnowledgeBasesRequest.pb( knowledge_base.ListKnowledgeBasesRequest() ) @@ -4512,6 +4566,10 @@ def test_list_knowledge_bases_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = knowledge_base.ListKnowledgeBasesResponse() + post_with_metadata.return_value = ( + knowledge_base.ListKnowledgeBasesResponse(), + metadata, + ) client.list_knowledge_bases( request, @@ -4523,6 +4581,7 @@ def test_list_knowledge_bases_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_knowledge_base_rest_bad_request( @@ -4611,10 +4670,14 @@ def test_get_knowledge_base_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_get_knowledge_base" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_get_knowledge_base_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_get_knowledge_base" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = knowledge_base.GetKnowledgeBaseRequest.pb( knowledge_base.GetKnowledgeBaseRequest() ) @@ -4640,6 +4703,7 @@ def test_get_knowledge_base_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = knowledge_base.KnowledgeBase() + post_with_metadata.return_value = knowledge_base.KnowledgeBase(), metadata client.get_knowledge_base( request, @@ -4651,6 +4715,7 @@ def test_get_knowledge_base_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_knowledge_base_rest_bad_request( @@ -4813,10 +4878,14 @@ def test_create_knowledge_base_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_create_knowledge_base" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_create_knowledge_base_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_create_knowledge_base" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_knowledge_base.CreateKnowledgeBaseRequest.pb( gcd_knowledge_base.CreateKnowledgeBaseRequest() ) @@ -4842,6 +4911,7 @@ def test_create_knowledge_base_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_knowledge_base.KnowledgeBase() + post_with_metadata.return_value = gcd_knowledge_base.KnowledgeBase(), metadata client.create_knowledge_base( request, @@ -4853,6 +4923,7 @@ def test_create_knowledge_base_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_knowledge_base_rest_bad_request( @@ -5128,10 +5199,14 @@ def test_update_knowledge_base_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "post_update_knowledge_base" ) as post, mock.patch.object( + transports.KnowledgeBasesRestInterceptor, + "post_update_knowledge_base_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KnowledgeBasesRestInterceptor, "pre_update_knowledge_base" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_knowledge_base.UpdateKnowledgeBaseRequest.pb( gcd_knowledge_base.UpdateKnowledgeBaseRequest() ) @@ -5157,6 +5232,7 @@ def test_update_knowledge_base_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_knowledge_base.KnowledgeBase() + post_with_metadata.return_value = gcd_knowledge_base.KnowledgeBase(), metadata client.update_knowledge_base( request, @@ -5168,6 +5244,7 @@ def test_update_knowledge_base_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py index eae2191cedc3..daaa17ac46f0 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_participants.py @@ -75,6 +75,13 @@ from google.cloud.dialogflow_v2beta1.types import participant from google.cloud.dialogflow_v2beta1.types import session, session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -318,6 +325,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ParticipantsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ParticipantsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7867,10 +7917,13 @@ def test_create_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_create_participant" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_create_participant_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_create_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_participant.CreateParticipantRequest.pb( gcd_participant.CreateParticipantRequest() ) @@ -7896,6 +7949,7 @@ def test_create_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_participant.Participant() + post_with_metadata.return_value = gcd_participant.Participant(), metadata client.create_participant( request, @@ -7907,6 +7961,7 @@ def test_create_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_participant_rest_bad_request( @@ -7999,10 +8054,13 @@ def test_get_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_get_participant" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_get_participant_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_get_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.GetParticipantRequest.pb( participant.GetParticipantRequest() ) @@ -8026,6 +8084,7 @@ def test_get_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.Participant() + post_with_metadata.return_value = participant.Participant(), metadata client.get_participant( request, @@ -8037,6 +8096,7 @@ def test_get_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_participants_rest_bad_request( @@ -8121,10 +8181,13 @@ def test_list_participants_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_list_participants" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_list_participants_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_list_participants" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.ListParticipantsRequest.pb( participant.ListParticipantsRequest() ) @@ -8150,6 +8213,10 @@ def test_list_participants_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.ListParticipantsResponse() + post_with_metadata.return_value = ( + participant.ListParticipantsResponse(), + metadata, + ) client.list_participants( request, @@ -8161,6 +8228,7 @@ def test_list_participants_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_participant_rest_bad_request( @@ -8330,10 +8398,13 @@ def test_update_participant_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_update_participant" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_update_participant_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_update_participant" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_participant.UpdateParticipantRequest.pb( gcd_participant.UpdateParticipantRequest() ) @@ -8359,6 +8430,7 @@ def test_update_participant_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_participant.Participant() + post_with_metadata.return_value = gcd_participant.Participant(), metadata client.update_participant( request, @@ -8370,6 +8442,7 @@ def test_update_participant_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_content_rest_bad_request( @@ -8458,10 +8531,13 @@ def test_analyze_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_analyze_content" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_analyze_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_analyze_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_participant.AnalyzeContentRequest.pb( gcd_participant.AnalyzeContentRequest() ) @@ -8487,6 +8563,10 @@ def test_analyze_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_participant.AnalyzeContentResponse() + post_with_metadata.return_value = ( + gcd_participant.AnalyzeContentResponse(), + metadata, + ) client.analyze_content( request, @@ -8498,6 +8578,7 @@ def test_analyze_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_analyze_content_rest_error(): @@ -8600,10 +8681,13 @@ def test_suggest_articles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_articles" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_suggest_articles_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_articles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestArticlesRequest.pb( participant.SuggestArticlesRequest() ) @@ -8629,6 +8713,10 @@ def test_suggest_articles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestArticlesResponse() + post_with_metadata.return_value = ( + participant.SuggestArticlesResponse(), + metadata, + ) client.suggest_articles( request, @@ -8640,6 +8728,7 @@ def test_suggest_articles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_faq_answers_rest_bad_request( @@ -8730,10 +8819,13 @@ def test_suggest_faq_answers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_faq_answers" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_suggest_faq_answers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_faq_answers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestFaqAnswersRequest.pb( participant.SuggestFaqAnswersRequest() ) @@ -8759,6 +8851,10 @@ def test_suggest_faq_answers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestFaqAnswersResponse() + post_with_metadata.return_value = ( + participant.SuggestFaqAnswersResponse(), + metadata, + ) client.suggest_faq_answers( request, @@ -8770,6 +8866,7 @@ def test_suggest_faq_answers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_smart_replies_rest_bad_request( @@ -8860,10 +8957,14 @@ def test_suggest_smart_replies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_smart_replies" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, + "post_suggest_smart_replies_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_smart_replies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestSmartRepliesRequest.pb( participant.SuggestSmartRepliesRequest() ) @@ -8889,6 +8990,10 @@ def test_suggest_smart_replies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestSmartRepliesResponse() + post_with_metadata.return_value = ( + participant.SuggestSmartRepliesResponse(), + metadata, + ) client.suggest_smart_replies( request, @@ -8900,6 +9005,7 @@ def test_suggest_smart_replies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_suggest_knowledge_assist_rest_bad_request( @@ -8990,10 +9096,14 @@ def test_suggest_knowledge_assist_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_suggest_knowledge_assist" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, + "post_suggest_knowledge_assist_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_suggest_knowledge_assist" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.SuggestKnowledgeAssistRequest.pb( participant.SuggestKnowledgeAssistRequest() ) @@ -9019,6 +9129,10 @@ def test_suggest_knowledge_assist_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.SuggestKnowledgeAssistResponse() + post_with_metadata.return_value = ( + participant.SuggestKnowledgeAssistResponse(), + metadata, + ) client.suggest_knowledge_assist( request, @@ -9030,6 +9144,7 @@ def test_suggest_knowledge_assist_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_suggestions_rest_bad_request( @@ -9118,10 +9233,13 @@ def test_list_suggestions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_list_suggestions" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_list_suggestions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_list_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.ListSuggestionsRequest.pb( participant.ListSuggestionsRequest() ) @@ -9147,6 +9265,10 @@ def test_list_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.ListSuggestionsResponse() + post_with_metadata.return_value = ( + participant.ListSuggestionsResponse(), + metadata, + ) client.list_suggestions( request, @@ -9158,6 +9280,7 @@ def test_list_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_compile_suggestion_rest_bad_request( @@ -9248,10 +9371,13 @@ def test_compile_suggestion_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ParticipantsRestInterceptor, "post_compile_suggestion" ) as post, mock.patch.object( + transports.ParticipantsRestInterceptor, "post_compile_suggestion_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ParticipantsRestInterceptor, "pre_compile_suggestion" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = participant.CompileSuggestionRequest.pb( participant.CompileSuggestionRequest() ) @@ -9277,6 +9403,10 @@ def test_compile_suggestion_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = participant.CompileSuggestionResponse() + post_with_metadata.return_value = ( + participant.CompileSuggestionResponse(), + metadata, + ) client.compile_suggestion( request, @@ -9288,6 +9418,7 @@ def test_compile_suggestion_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py new file mode 100644 index 000000000000..8994c64d2c30 --- /dev/null +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_phone_numbers.py @@ -0,0 +1,6265 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore + +from google.cloud.dialogflow_v2beta1.services.phone_numbers import ( + PhoneNumbersAsyncClient, + PhoneNumbersClient, + pagers, + transports, +) +from google.cloud.dialogflow_v2beta1.types import phone_number as gcd_phone_number +from google.cloud.dialogflow_v2beta1.types import phone_number + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PhoneNumbersClient._get_default_mtls_endpoint(None) is None + assert ( + PhoneNumbersClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + PhoneNumbersClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PhoneNumbersClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PhoneNumbersClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert PhoneNumbersClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert PhoneNumbersClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PhoneNumbersClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PhoneNumbersClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PhoneNumbersClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PhoneNumbersClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PhoneNumbersClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PhoneNumbersClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PhoneNumbersClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PhoneNumbersClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PhoneNumbersClient._get_client_cert_source(None, False) is None + assert ( + PhoneNumbersClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + PhoneNumbersClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PhoneNumbersClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PhoneNumbersClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PhoneNumbersClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersClient), +) +@mock.patch.object( + PhoneNumbersAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PhoneNumbersClient._DEFAULT_UNIVERSE + default_endpoint = PhoneNumbersClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PhoneNumbersClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PhoneNumbersClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PhoneNumbersClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PhoneNumbersClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PhoneNumbersClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PhoneNumbersClient._get_api_endpoint(None, None, default_universe, "always") + == PhoneNumbersClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PhoneNumbersClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PhoneNumbersClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PhoneNumbersClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PhoneNumbersClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PhoneNumbersClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PhoneNumbersClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PhoneNumbersClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PhoneNumbersClient._get_universe_domain(None, None) + == PhoneNumbersClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PhoneNumbersClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PhoneNumbersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PhoneNumbersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PhoneNumbersClient, "grpc"), + (PhoneNumbersAsyncClient, "grpc_asyncio"), + (PhoneNumbersClient, "rest"), + ], +) +def test_phone_numbers_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dialogflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dialogflow.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PhoneNumbersGrpcTransport, "grpc"), + (transports.PhoneNumbersGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PhoneNumbersRestTransport, "rest"), + ], +) +def test_phone_numbers_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PhoneNumbersClient, "grpc"), + (PhoneNumbersAsyncClient, "grpc_asyncio"), + (PhoneNumbersClient, "rest"), + ], +) +def test_phone_numbers_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dialogflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dialogflow.googleapis.com" + ) + + +def test_phone_numbers_client_get_transport_class(): + transport = PhoneNumbersClient.get_transport_class() + available_transports = [ + transports.PhoneNumbersGrpcTransport, + transports.PhoneNumbersRestTransport, + ] + assert transport in available_transports + + transport = PhoneNumbersClient.get_transport_class("grpc") + assert transport == transports.PhoneNumbersGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PhoneNumbersClient, transports.PhoneNumbersGrpcTransport, "grpc"), + ( + PhoneNumbersAsyncClient, + transports.PhoneNumbersGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PhoneNumbersClient, transports.PhoneNumbersRestTransport, "rest"), + ], +) +@mock.patch.object( + PhoneNumbersClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersClient), +) +@mock.patch.object( + PhoneNumbersAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersAsyncClient), +) +def test_phone_numbers_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PhoneNumbersClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PhoneNumbersClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (PhoneNumbersClient, transports.PhoneNumbersGrpcTransport, "grpc", "true"), + ( + PhoneNumbersAsyncClient, + transports.PhoneNumbersGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (PhoneNumbersClient, transports.PhoneNumbersGrpcTransport, "grpc", "false"), + ( + PhoneNumbersAsyncClient, + transports.PhoneNumbersGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (PhoneNumbersClient, transports.PhoneNumbersRestTransport, "rest", "true"), + (PhoneNumbersClient, transports.PhoneNumbersRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + PhoneNumbersClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersClient), +) +@mock.patch.object( + PhoneNumbersAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_phone_numbers_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [PhoneNumbersClient, PhoneNumbersAsyncClient]) +@mock.patch.object( + PhoneNumbersClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PhoneNumbersClient) +) +@mock.patch.object( + PhoneNumbersAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PhoneNumbersAsyncClient), +) +def test_phone_numbers_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [PhoneNumbersClient, PhoneNumbersAsyncClient]) +@mock.patch.object( + PhoneNumbersClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersClient), +) +@mock.patch.object( + PhoneNumbersAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PhoneNumbersAsyncClient), +) +def test_phone_numbers_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PhoneNumbersClient._DEFAULT_UNIVERSE + default_endpoint = PhoneNumbersClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PhoneNumbersClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PhoneNumbersClient, transports.PhoneNumbersGrpcTransport, "grpc"), + ( + PhoneNumbersAsyncClient, + transports.PhoneNumbersGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PhoneNumbersClient, transports.PhoneNumbersRestTransport, "rest"), + ], +) +def test_phone_numbers_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PhoneNumbersClient, + transports.PhoneNumbersGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PhoneNumbersAsyncClient, + transports.PhoneNumbersGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (PhoneNumbersClient, transports.PhoneNumbersRestTransport, "rest", None), + ], +) +def test_phone_numbers_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_phone_numbers_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dialogflow_v2beta1.services.phone_numbers.transports.PhoneNumbersGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PhoneNumbersClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + PhoneNumbersClient, + transports.PhoneNumbersGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + PhoneNumbersAsyncClient, + transports.PhoneNumbersGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_phone_numbers_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dialogflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ), + scopes=None, + default_host="dialogflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + phone_number.ListPhoneNumbersRequest, + dict, + ], +) +def test_list_phone_numbers(request_type, transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.ListPhoneNumbersResponse( + next_page_token="next_page_token_value", + ) + response = client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = phone_number.ListPhoneNumbersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPhoneNumbersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_phone_numbers_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = phone_number.ListPhoneNumbersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_phone_numbers(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == phone_number.ListPhoneNumbersRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_phone_numbers_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_phone_numbers in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_phone_numbers + ] = mock_rpc + request = {} + client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_phone_numbers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_phone_numbers_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_phone_numbers + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_phone_numbers + ] = mock_rpc + + request = {} + await client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_phone_numbers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_phone_numbers_async( + transport: str = "grpc_asyncio", request_type=phone_number.ListPhoneNumbersRequest +): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.ListPhoneNumbersResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = phone_number.ListPhoneNumbersRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPhoneNumbersAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_phone_numbers_async_from_dict(): + await test_list_phone_numbers_async(request_type=dict) + + +def test_list_phone_numbers_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = phone_number.ListPhoneNumbersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + call.return_value = phone_number.ListPhoneNumbersResponse() + client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_phone_numbers_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = phone_number.ListPhoneNumbersRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.ListPhoneNumbersResponse() + ) + await client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_phone_numbers_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.ListPhoneNumbersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_phone_numbers( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_phone_numbers_flattened_error(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_phone_numbers( + phone_number.ListPhoneNumbersRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_phone_numbers_flattened_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.ListPhoneNumbersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.ListPhoneNumbersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_phone_numbers( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_phone_numbers_flattened_error_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_phone_numbers( + phone_number.ListPhoneNumbersRequest(), + parent="parent_value", + ) + + +def test_list_phone_numbers_pager(transport_name: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + next_page_token="abc", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[], + next_page_token="def", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + ], + next_page_token="ghi", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_phone_numbers(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, phone_number.PhoneNumber) for i in results) + + +def test_list_phone_numbers_pages(transport_name: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + next_page_token="abc", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[], + next_page_token="def", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + ], + next_page_token="ghi", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + ), + RuntimeError, + ) + pages = list(client.list_phone_numbers(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_phone_numbers_async_pager(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + next_page_token="abc", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[], + next_page_token="def", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + ], + next_page_token="ghi", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_phone_numbers( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, phone_number.PhoneNumber) for i in responses) + + +@pytest.mark.asyncio +async def test_list_phone_numbers_async_pages(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + next_page_token="abc", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[], + next_page_token="def", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + ], + next_page_token="ghi", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_phone_numbers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gcd_phone_number.UpdatePhoneNumberRequest, + dict, + ], +) +def test_update_phone_number(request_type, transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + response = client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcd_phone_number.UpdatePhoneNumberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert ( + response.lifecycle_state == gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE + ) + + +def test_update_phone_number_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcd_phone_number.UpdatePhoneNumberRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_phone_number(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_phone_number.UpdatePhoneNumberRequest() + + +def test_update_phone_number_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_phone_number in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_phone_number + ] = mock_rpc + request = {} + client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_phone_number_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_phone_number + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_phone_number + ] = mock_rpc + + request = {} + await client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_phone_number_async( + transport: str = "grpc_asyncio", + request_type=gcd_phone_number.UpdatePhoneNumberRequest, +): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + ) + response = await client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcd_phone_number.UpdatePhoneNumberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert ( + response.lifecycle_state == gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE + ) + + +@pytest.mark.asyncio +async def test_update_phone_number_async_from_dict(): + await test_update_phone_number_async(request_type=dict) + + +def test_update_phone_number_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_phone_number.UpdatePhoneNumberRequest() + + request.phone_number.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + call.return_value = gcd_phone_number.PhoneNumber() + client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "phone_number.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_phone_number_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_phone_number.UpdatePhoneNumberRequest() + + request.phone_number.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_phone_number.PhoneNumber() + ) + await client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "phone_number.name=name_value", + ) in kw["metadata"] + + +def test_update_phone_number_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_phone_number.PhoneNumber() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_phone_number( + phone_number=gcd_phone_number.PhoneNumber(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].phone_number + mock_val = gcd_phone_number.PhoneNumber(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_phone_number_flattened_error(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_phone_number( + gcd_phone_number.UpdatePhoneNumberRequest(), + phone_number=gcd_phone_number.PhoneNumber(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_phone_number_flattened_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcd_phone_number.PhoneNumber() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_phone_number.PhoneNumber() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_phone_number( + phone_number=gcd_phone_number.PhoneNumber(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].phone_number + mock_val = gcd_phone_number.PhoneNumber(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_phone_number_flattened_error_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_phone_number( + gcd_phone_number.UpdatePhoneNumberRequest(), + phone_number=gcd_phone_number.PhoneNumber(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + phone_number.DeletePhoneNumberRequest, + dict, + ], +) +def test_delete_phone_number(request_type, transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + response = client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = phone_number.DeletePhoneNumberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert response.lifecycle_state == phone_number.PhoneNumber.LifecycleState.ACTIVE + + +def test_delete_phone_number_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = phone_number.DeletePhoneNumberRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_phone_number(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == phone_number.DeletePhoneNumberRequest( + name="name_value", + ) + + +def test_delete_phone_number_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_phone_number in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_phone_number + ] = mock_rpc + request = {} + client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_phone_number_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_phone_number + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_phone_number + ] = mock_rpc + + request = {} + await client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_phone_number_async( + transport: str = "grpc_asyncio", request_type=phone_number.DeletePhoneNumberRequest +): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + ) + response = await client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = phone_number.DeletePhoneNumberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert response.lifecycle_state == phone_number.PhoneNumber.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_delete_phone_number_async_from_dict(): + await test_delete_phone_number_async(request_type=dict) + + +def test_delete_phone_number_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = phone_number.DeletePhoneNumberRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + call.return_value = phone_number.PhoneNumber() + client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_phone_number_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = phone_number.DeletePhoneNumberRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber() + ) + await client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_phone_number_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.PhoneNumber() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_phone_number( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_phone_number_flattened_error(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_phone_number( + phone_number.DeletePhoneNumberRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_phone_number_flattened_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.PhoneNumber() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_phone_number( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_phone_number_flattened_error_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_phone_number( + phone_number.DeletePhoneNumberRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + phone_number.UndeletePhoneNumberRequest, + dict, + ], +) +def test_undelete_phone_number(request_type, transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + response = client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = phone_number.UndeletePhoneNumberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert response.lifecycle_state == phone_number.PhoneNumber.LifecycleState.ACTIVE + + +def test_undelete_phone_number_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = phone_number.UndeletePhoneNumberRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.undelete_phone_number(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == phone_number.UndeletePhoneNumberRequest( + name="name_value", + ) + + +def test_undelete_phone_number_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.undelete_phone_number + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.undelete_phone_number + ] = mock_rpc + request = {} + client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.undelete_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_undelete_phone_number_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.undelete_phone_number + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.undelete_phone_number + ] = mock_rpc + + request = {} + await client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.undelete_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_undelete_phone_number_async( + transport: str = "grpc_asyncio", + request_type=phone_number.UndeletePhoneNumberRequest, +): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + ) + response = await client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = phone_number.UndeletePhoneNumberRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert response.lifecycle_state == phone_number.PhoneNumber.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_undelete_phone_number_async_from_dict(): + await test_undelete_phone_number_async(request_type=dict) + + +def test_undelete_phone_number_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = phone_number.UndeletePhoneNumberRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + call.return_value = phone_number.PhoneNumber() + client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_undelete_phone_number_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = phone_number.UndeletePhoneNumberRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber() + ) + await client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_undelete_phone_number_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.PhoneNumber() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.undelete_phone_number( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_undelete_phone_number_flattened_error(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undelete_phone_number( + phone_number.UndeletePhoneNumberRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_undelete_phone_number_flattened_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = phone_number.PhoneNumber() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.undelete_phone_number( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_undelete_phone_number_flattened_error_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.undelete_phone_number( + phone_number.UndeletePhoneNumberRequest(), + name="name_value", + ) + + +def test_list_phone_numbers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_phone_numbers in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_phone_numbers + ] = mock_rpc + + request = {} + client.list_phone_numbers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_phone_numbers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_phone_numbers_rest_required_fields( + request_type=phone_number.ListPhoneNumbersRequest, +): + transport_class = transports.PhoneNumbersRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_phone_numbers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_phone_numbers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "show_deleted", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = phone_number.ListPhoneNumbersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = phone_number.ListPhoneNumbersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_phone_numbers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_phone_numbers_rest_unset_required_fields(): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_phone_numbers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "showDeleted", + ) + ) + & set(("parent",)) + ) + + +def test_list_phone_numbers_rest_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = phone_number.ListPhoneNumbersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = phone_number.ListPhoneNumbersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_phone_numbers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta1/{parent=projects/*}/phoneNumbers" % client.transport._host, + args[1], + ) + + +def test_list_phone_numbers_rest_flattened_error(transport: str = "rest"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_phone_numbers( + phone_number.ListPhoneNumbersRequest(), + parent="parent_value", + ) + + +def test_list_phone_numbers_rest_pager(transport: str = "rest"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + next_page_token="abc", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[], + next_page_token="def", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + ], + next_page_token="ghi", + ), + phone_number.ListPhoneNumbersResponse( + phone_numbers=[ + phone_number.PhoneNumber(), + phone_number.PhoneNumber(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + phone_number.ListPhoneNumbersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_phone_numbers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, phone_number.PhoneNumber) for i in results) + + pages = list(client.list_phone_numbers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_update_phone_number_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_phone_number in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_phone_number + ] = mock_rpc + + request = {} + client.update_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_phone_number_rest_required_fields( + request_type=gcd_phone_number.UpdatePhoneNumberRequest, +): + transport_class = transports.PhoneNumbersRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_phone_number._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_phone_number._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcd_phone_number.PhoneNumber() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_phone_number(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_phone_number_rest_unset_required_fields(): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_phone_number._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("phoneNumber",))) + + +def test_update_phone_number_rest_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_phone_number.PhoneNumber() + + # get arguments that satisfy an http rule for this method + sample_request = { + "phone_number": {"name": "projects/sample1/phoneNumbers/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + phone_number=gcd_phone_number.PhoneNumber(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcd_phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_phone_number(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta1/{phone_number.name=projects/*/phoneNumbers/*}" + % client.transport._host, + args[1], + ) + + +def test_update_phone_number_rest_flattened_error(transport: str = "rest"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_phone_number( + gcd_phone_number.UpdatePhoneNumberRequest(), + phone_number=gcd_phone_number.PhoneNumber(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_phone_number_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_phone_number in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_phone_number + ] = mock_rpc + + request = {} + client.delete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_phone_number_rest_required_fields( + request_type=phone_number.DeletePhoneNumberRequest, +): + transport_class = transports.PhoneNumbersRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_phone_number._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_phone_number._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = phone_number.PhoneNumber() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_phone_number(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_phone_number_rest_unset_required_fields(): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_phone_number._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_phone_number_rest_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = phone_number.PhoneNumber() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/phoneNumbers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_phone_number(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta1/{name=projects/*/phoneNumbers/*}" % client.transport._host, + args[1], + ) + + +def test_delete_phone_number_rest_flattened_error(transport: str = "rest"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_phone_number( + phone_number.DeletePhoneNumberRequest(), + name="name_value", + ) + + +def test_undelete_phone_number_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.undelete_phone_number + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.undelete_phone_number + ] = mock_rpc + + request = {} + client.undelete_phone_number(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.undelete_phone_number(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_undelete_phone_number_rest_required_fields( + request_type=phone_number.UndeletePhoneNumberRequest, +): + transport_class = transports.PhoneNumbersRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undelete_phone_number._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).undelete_phone_number._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = phone_number.PhoneNumber() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.undelete_phone_number(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_undelete_phone_number_rest_unset_required_fields(): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.undelete_phone_number._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_undelete_phone_number_rest_flattened(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = phone_number.PhoneNumber() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/phoneNumbers/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.undelete_phone_number(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v2beta1/{name=projects/*/phoneNumbers/*}:undelete" + % client.transport._host, + args[1], + ) + + +def test_undelete_phone_number_rest_flattened_error(transport: str = "rest"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.undelete_phone_number( + phone_number.UndeletePhoneNumberRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PhoneNumbersGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PhoneNumbersGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PhoneNumbersClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PhoneNumbersGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PhoneNumbersClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PhoneNumbersClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PhoneNumbersGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PhoneNumbersClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PhoneNumbersGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PhoneNumbersClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PhoneNumbersGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PhoneNumbersGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PhoneNumbersGrpcTransport, + transports.PhoneNumbersGrpcAsyncIOTransport, + transports.PhoneNumbersRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = PhoneNumbersClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_phone_numbers_empty_call_grpc(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + call.return_value = phone_number.ListPhoneNumbersResponse() + client.list_phone_numbers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.ListPhoneNumbersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_phone_number_empty_call_grpc(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + call.return_value = gcd_phone_number.PhoneNumber() + client.update_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_phone_number.UpdatePhoneNumberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_phone_number_empty_call_grpc(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + call.return_value = phone_number.PhoneNumber() + client.delete_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.DeletePhoneNumberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_phone_number_empty_call_grpc(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + call.return_value = phone_number.PhoneNumber() + client.undelete_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.UndeletePhoneNumberRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = PhoneNumbersAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_phone_numbers_empty_call_grpc_asyncio(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.ListPhoneNumbersResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_phone_numbers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.ListPhoneNumbersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_phone_number_empty_call_grpc_asyncio(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcd_phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + ) + await client.update_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_phone_number.UpdatePhoneNumberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_phone_number_empty_call_grpc_asyncio(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + ) + await client.delete_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.DeletePhoneNumberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_phone_number_empty_call_grpc_asyncio(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + ) + await client.undelete_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.UndeletePhoneNumberRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = PhoneNumbersClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_phone_numbers_rest_bad_request( + request_type=phone_number.ListPhoneNumbersRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_phone_numbers(request) + + +@pytest.mark.parametrize( + "request_type", + [ + phone_number.ListPhoneNumbersRequest, + dict, + ], +) +def test_list_phone_numbers_rest_call_success(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = phone_number.ListPhoneNumbersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = phone_number.ListPhoneNumbersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_phone_numbers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPhoneNumbersPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_phone_numbers_rest_interceptors(null_interceptor): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PhoneNumbersRestInterceptor(), + ) + client = PhoneNumbersClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_list_phone_numbers" + ) as post, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_list_phone_numbers_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "pre_list_phone_numbers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = phone_number.ListPhoneNumbersRequest.pb( + phone_number.ListPhoneNumbersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = phone_number.ListPhoneNumbersResponse.to_json( + phone_number.ListPhoneNumbersResponse() + ) + req.return_value.content = return_value + + request = phone_number.ListPhoneNumbersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = phone_number.ListPhoneNumbersResponse() + post_with_metadata.return_value = ( + phone_number.ListPhoneNumbersResponse(), + metadata, + ) + + client.list_phone_numbers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_phone_number_rest_bad_request( + request_type=gcd_phone_number.UpdatePhoneNumberRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"phone_number": {"name": "projects/sample1/phoneNumbers/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_phone_number(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcd_phone_number.UpdatePhoneNumberRequest, + dict, + ], +) +def test_update_phone_number_rest_call_success(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"phone_number": {"name": "projects/sample1/phoneNumbers/sample2"}} + request_init["phone_number"] = { + "name": "projects/sample1/phoneNumbers/sample2", + "phone_number": "phone_number_value", + "conversation_profile": "conversation_profile_value", + "lifecycle_state": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcd_phone_number.UpdatePhoneNumberRequest.meta.fields["phone_number"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["phone_number"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["phone_number"][field])): + del request_init["phone_number"][field][i][subfield] + else: + del request_init["phone_number"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcd_phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcd_phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_phone_number(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcd_phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert ( + response.lifecycle_state == gcd_phone_number.PhoneNumber.LifecycleState.ACTIVE + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_phone_number_rest_interceptors(null_interceptor): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PhoneNumbersRestInterceptor(), + ) + client = PhoneNumbersClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_update_phone_number" + ) as post, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_update_phone_number_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "pre_update_phone_number" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcd_phone_number.UpdatePhoneNumberRequest.pb( + gcd_phone_number.UpdatePhoneNumberRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcd_phone_number.PhoneNumber.to_json( + gcd_phone_number.PhoneNumber() + ) + req.return_value.content = return_value + + request = gcd_phone_number.UpdatePhoneNumberRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcd_phone_number.PhoneNumber() + post_with_metadata.return_value = gcd_phone_number.PhoneNumber(), metadata + + client.update_phone_number( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_phone_number_rest_bad_request( + request_type=phone_number.DeletePhoneNumberRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/phoneNumbers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_phone_number(request) + + +@pytest.mark.parametrize( + "request_type", + [ + phone_number.DeletePhoneNumberRequest, + dict, + ], +) +def test_delete_phone_number_rest_call_success(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/phoneNumbers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_phone_number(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert response.lifecycle_state == phone_number.PhoneNumber.LifecycleState.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_phone_number_rest_interceptors(null_interceptor): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PhoneNumbersRestInterceptor(), + ) + client = PhoneNumbersClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_delete_phone_number" + ) as post, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_delete_phone_number_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "pre_delete_phone_number" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = phone_number.DeletePhoneNumberRequest.pb( + phone_number.DeletePhoneNumberRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = phone_number.PhoneNumber.to_json(phone_number.PhoneNumber()) + req.return_value.content = return_value + + request = phone_number.DeletePhoneNumberRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = phone_number.PhoneNumber() + post_with_metadata.return_value = phone_number.PhoneNumber(), metadata + + client.delete_phone_number( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_undelete_phone_number_rest_bad_request( + request_type=phone_number.UndeletePhoneNumberRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/phoneNumbers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.undelete_phone_number(request) + + +@pytest.mark.parametrize( + "request_type", + [ + phone_number.UndeletePhoneNumberRequest, + dict, + ], +) +def test_undelete_phone_number_rest_call_success(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/phoneNumbers/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = phone_number.PhoneNumber( + name="name_value", + phone_number="phone_number_value", + conversation_profile="conversation_profile_value", + lifecycle_state=phone_number.PhoneNumber.LifecycleState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = phone_number.PhoneNumber.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.undelete_phone_number(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, phone_number.PhoneNumber) + assert response.name == "name_value" + assert response.phone_number == "phone_number_value" + assert response.conversation_profile == "conversation_profile_value" + assert response.lifecycle_state == phone_number.PhoneNumber.LifecycleState.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_undelete_phone_number_rest_interceptors(null_interceptor): + transport = transports.PhoneNumbersRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.PhoneNumbersRestInterceptor(), + ) + client = PhoneNumbersClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "post_undelete_phone_number" + ) as post, mock.patch.object( + transports.PhoneNumbersRestInterceptor, + "post_undelete_phone_number_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.PhoneNumbersRestInterceptor, "pre_undelete_phone_number" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = phone_number.UndeletePhoneNumberRequest.pb( + phone_number.UndeletePhoneNumberRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = phone_number.PhoneNumber.to_json(phone_number.PhoneNumber()) + req.return_value.content = return_value + + request = phone_number.UndeletePhoneNumberRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = phone_number.PhoneNumber() + post_with_metadata.return_value = phone_number.PhoneNumber(), metadata + + client.undelete_phone_number( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_phone_numbers_empty_call_rest(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_phone_numbers), "__call__" + ) as call: + client.list_phone_numbers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.ListPhoneNumbersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_phone_number_empty_call_rest(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_phone_number), "__call__" + ) as call: + client.update_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcd_phone_number.UpdatePhoneNumberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_phone_number_empty_call_rest(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_phone_number), "__call__" + ) as call: + client.delete_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.DeletePhoneNumberRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_phone_number_empty_call_rest(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_phone_number), "__call__" + ) as call: + client.undelete_phone_number(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = phone_number.UndeletePhoneNumberRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PhoneNumbersGrpcTransport, + ) + + +def test_phone_numbers_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PhoneNumbersTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_phone_numbers_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dialogflow_v2beta1.services.phone_numbers.transports.PhoneNumbersTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PhoneNumbersTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_phone_numbers", + "update_phone_number", + "delete_phone_number", + "undelete_phone_number", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_phone_numbers_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dialogflow_v2beta1.services.phone_numbers.transports.PhoneNumbersTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PhoneNumbersTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ), + quota_project_id="octopus", + ) + + +def test_phone_numbers_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dialogflow_v2beta1.services.phone_numbers.transports.PhoneNumbersTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PhoneNumbersTransport() + adc.assert_called_once() + + +def test_phone_numbers_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PhoneNumbersClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PhoneNumbersGrpcTransport, + transports.PhoneNumbersGrpcAsyncIOTransport, + ], +) +def test_phone_numbers_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PhoneNumbersGrpcTransport, + transports.PhoneNumbersGrpcAsyncIOTransport, + transports.PhoneNumbersRestTransport, + ], +) +def test_phone_numbers_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PhoneNumbersGrpcTransport, grpc_helpers), + (transports.PhoneNumbersGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_phone_numbers_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dialogflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/dialogflow", + ), + scopes=["1", "2"], + default_host="dialogflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.PhoneNumbersGrpcTransport, transports.PhoneNumbersGrpcAsyncIOTransport], +) +def test_phone_numbers_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_phone_numbers_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PhoneNumbersRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_phone_numbers_host_no_port(transport_name): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dialogflow.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dialogflow.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dialogflow.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_phone_numbers_host_with_port(transport_name): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dialogflow.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dialogflow.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dialogflow.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_phone_numbers_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PhoneNumbersClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PhoneNumbersClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_phone_numbers._session + session2 = client2.transport.list_phone_numbers._session + assert session1 != session2 + session1 = client1.transport.update_phone_number._session + session2 = client2.transport.update_phone_number._session + assert session1 != session2 + session1 = client1.transport.delete_phone_number._session + session2 = client2.transport.delete_phone_number._session + assert session1 != session2 + session1 = client1.transport.undelete_phone_number._session + session2 = client2.transport.undelete_phone_number._session + assert session1 != session2 + + +def test_phone_numbers_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PhoneNumbersGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_phone_numbers_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PhoneNumbersGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.PhoneNumbersGrpcTransport, transports.PhoneNumbersGrpcAsyncIOTransport], +) +def test_phone_numbers_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.PhoneNumbersGrpcTransport, transports.PhoneNumbersGrpcAsyncIOTransport], +) +def test_phone_numbers_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_phone_number_path(): + project = "squid" + phone_number = "clam" + expected = "projects/{project}/phoneNumbers/{phone_number}".format( + project=project, + phone_number=phone_number, + ) + actual = PhoneNumbersClient.phone_number_path(project, phone_number) + assert expected == actual + + +def test_parse_phone_number_path(): + expected = { + "project": "whelk", + "phone_number": "octopus", + } + path = PhoneNumbersClient.phone_number_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_phone_number_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PhoneNumbersClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = PhoneNumbersClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PhoneNumbersClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = PhoneNumbersClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PhoneNumbersClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = PhoneNumbersClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = PhoneNumbersClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = PhoneNumbersClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PhoneNumbersClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = PhoneNumbersClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PhoneNumbersClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PhoneNumbersTransport, "_prep_wrapped_messages" + ) as prep: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PhoneNumbersTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PhoneNumbersClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_cancel_operation(transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = PhoneNumbersClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = PhoneNumbersAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = PhoneNumbersAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PhoneNumbersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PhoneNumbersClient, transports.PhoneNumbersGrpcTransport), + (PhoneNumbersAsyncClient, transports.PhoneNumbersGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py index c38d1e16ab58..b07a5138fddd 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_session_entity_types.py @@ -67,6 +67,13 @@ from google.cloud.dialogflow_v2beta1.types import entity_type from google.cloud.dialogflow_v2beta1.types import session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -338,6 +345,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionEntityTypesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4579,10 +4629,14 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_list_session_entity_types" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_list_session_entity_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_list_session_entity_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.ListSessionEntityTypesRequest.pb( session_entity_type.ListSessionEntityTypesRequest() ) @@ -4608,6 +4662,10 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.ListSessionEntityTypesResponse() + post_with_metadata.return_value = ( + session_entity_type.ListSessionEntityTypesResponse(), + metadata, + ) client.list_session_entity_types( request, @@ -4619,6 +4677,7 @@ def test_list_session_entity_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_entity_type_rest_bad_request( @@ -4712,10 +4771,14 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_get_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_get_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_get_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = session_entity_type.GetSessionEntityTypeRequest.pb( session_entity_type.GetSessionEntityTypeRequest() ) @@ -4741,6 +4804,10 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + session_entity_type.SessionEntityType(), + metadata, + ) client.get_session_entity_type( request, @@ -4752,6 +4819,7 @@ def test_get_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_entity_type_rest_bad_request( @@ -4917,10 +4985,14 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_create_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_create_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_create_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_session_entity_type.CreateSessionEntityTypeRequest.pb( gcd_session_entity_type.CreateSessionEntityTypeRequest() ) @@ -4946,6 +5018,10 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcd_session_entity_type.SessionEntityType(), + metadata, + ) client.create_session_entity_type( request, @@ -4957,6 +5033,7 @@ def test_create_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_session_entity_type_rest_bad_request( @@ -5130,10 +5207,14 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "post_update_session_entity_type" ) as post, mock.patch.object( + transports.SessionEntityTypesRestInterceptor, + "post_update_session_entity_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SessionEntityTypesRestInterceptor, "pre_update_session_entity_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_session_entity_type.UpdateSessionEntityTypeRequest.pb( gcd_session_entity_type.UpdateSessionEntityTypeRequest() ) @@ -5159,6 +5240,10 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session_entity_type.SessionEntityType() + post_with_metadata.return_value = ( + gcd_session_entity_type.SessionEntityType(), + metadata, + ) client.update_session_entity_type( request, @@ -5170,6 +5255,7 @@ def test_update_session_entity_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_entity_type_rest_bad_request( diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py index deadb5739a04..9c781dd99a87 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sessions.py @@ -74,6 +74,13 @@ from google.cloud.dialogflow_v2beta1.types import session as gcd_session from google.cloud.dialogflow_v2beta1.types import session_entity_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SessionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2017,10 +2067,13 @@ def test_detect_intent_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SessionsRestInterceptor, "post_detect_intent" ) as post, mock.patch.object( + transports.SessionsRestInterceptor, "post_detect_intent_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SessionsRestInterceptor, "pre_detect_intent" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_session.DetectIntentRequest.pb( gcd_session.DetectIntentRequest() ) @@ -2046,6 +2099,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.DetectIntentResponse() + post_with_metadata.return_value = gcd_session.DetectIntentResponse(), metadata client.detect_intent( request, @@ -2057,6 +2111,7 @@ def test_detect_intent_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_detect_intent_rest_error(): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sip_trunks.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sip_trunks.py index feddae956bdc..48666fe48e28 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sip_trunks.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_sip_trunks.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2beta1.types import sip_trunk from google.cloud.dialogflow_v2beta1.types import sip_trunk as gcd_sip_trunk +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SipTrunksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SipTrunksClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4386,10 +4436,13 @@ def test_create_sip_trunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SipTrunksRestInterceptor, "post_create_sip_trunk" ) as post, mock.patch.object( + transports.SipTrunksRestInterceptor, "post_create_sip_trunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SipTrunksRestInterceptor, "pre_create_sip_trunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_sip_trunk.CreateSipTrunkRequest.pb( gcd_sip_trunk.CreateSipTrunkRequest() ) @@ -4413,6 +4466,7 @@ def test_create_sip_trunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sip_trunk.SipTrunk() + post_with_metadata.return_value = gcd_sip_trunk.SipTrunk(), metadata client.create_sip_trunk( request, @@ -4424,6 +4478,7 @@ def test_create_sip_trunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sip_trunk_rest_bad_request( @@ -4611,10 +4666,13 @@ def test_list_sip_trunks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SipTrunksRestInterceptor, "post_list_sip_trunks" ) as post, mock.patch.object( + transports.SipTrunksRestInterceptor, "post_list_sip_trunks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SipTrunksRestInterceptor, "pre_list_sip_trunks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sip_trunk.ListSipTrunksRequest.pb(sip_trunk.ListSipTrunksRequest()) transcode.return_value = { "method": "post", @@ -4638,6 +4696,7 @@ def test_list_sip_trunks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sip_trunk.ListSipTrunksResponse() + post_with_metadata.return_value = sip_trunk.ListSipTrunksResponse(), metadata client.list_sip_trunks( request, @@ -4649,6 +4708,7 @@ def test_list_sip_trunks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_sip_trunk_rest_bad_request(request_type=sip_trunk.GetSipTrunkRequest): @@ -4733,10 +4793,13 @@ def test_get_sip_trunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SipTrunksRestInterceptor, "post_get_sip_trunk" ) as post, mock.patch.object( + transports.SipTrunksRestInterceptor, "post_get_sip_trunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SipTrunksRestInterceptor, "pre_get_sip_trunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sip_trunk.GetSipTrunkRequest.pb(sip_trunk.GetSipTrunkRequest()) transcode.return_value = { "method": "post", @@ -4758,6 +4821,7 @@ def test_get_sip_trunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sip_trunk.SipTrunk() + post_with_metadata.return_value = sip_trunk.SipTrunk(), metadata client.get_sip_trunk( request, @@ -4769,6 +4833,7 @@ def test_get_sip_trunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_sip_trunk_rest_bad_request( @@ -4942,10 +5007,13 @@ def test_update_sip_trunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SipTrunksRestInterceptor, "post_update_sip_trunk" ) as post, mock.patch.object( + transports.SipTrunksRestInterceptor, "post_update_sip_trunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SipTrunksRestInterceptor, "pre_update_sip_trunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_sip_trunk.UpdateSipTrunkRequest.pb( gcd_sip_trunk.UpdateSipTrunkRequest() ) @@ -4969,6 +5037,7 @@ def test_update_sip_trunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sip_trunk.SipTrunk() + post_with_metadata.return_value = gcd_sip_trunk.SipTrunk(), metadata client.update_sip_trunk( request, @@ -4980,6 +5049,7 @@ def test_update_sip_trunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py index 5559904b5510..5520dd5088e4 100644 --- a/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py +++ b/packages/google-cloud-dialogflow/tests/unit/gapic/dialogflow_v2beta1/test_versions.py @@ -65,6 +65,13 @@ from google.cloud.dialogflow_v2beta1.types import version from google.cloud.dialogflow_v2beta1.types import version as gcd_version +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = VersionsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4273,10 +4323,13 @@ def test_list_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_list_versions" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_list_versions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_list_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.ListVersionsRequest.pb(version.ListVersionsRequest()) transcode.return_value = { "method": "post", @@ -4300,6 +4353,7 @@ def test_list_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.ListVersionsResponse() + post_with_metadata.return_value = version.ListVersionsResponse(), metadata client.list_versions( request, @@ -4311,6 +4365,7 @@ def test_list_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_version_rest_bad_request(request_type=version.GetVersionRequest): @@ -4397,10 +4452,13 @@ def test_get_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_get_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_get_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_get_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = version.GetVersionRequest.pb(version.GetVersionRequest()) transcode.return_value = { "method": "post", @@ -4422,6 +4480,7 @@ def test_get_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = version.Version() + post_with_metadata.return_value = version.Version(), metadata client.get_version( request, @@ -4433,6 +4492,7 @@ def test_get_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_version_rest_bad_request(request_type=gcd_version.CreateVersionRequest): @@ -4593,10 +4653,13 @@ def test_create_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_create_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_create_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_create_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_version.CreateVersionRequest.pb( gcd_version.CreateVersionRequest() ) @@ -4620,6 +4683,7 @@ def test_create_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_version.Version() + post_with_metadata.return_value = gcd_version.Version(), metadata client.create_version( request, @@ -4631,6 +4695,7 @@ def test_create_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_version_rest_bad_request(request_type=gcd_version.UpdateVersionRequest): @@ -4791,10 +4856,13 @@ def test_update_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.VersionsRestInterceptor, "post_update_version" ) as post, mock.patch.object( + transports.VersionsRestInterceptor, "post_update_version_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.VersionsRestInterceptor, "pre_update_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gcd_version.UpdateVersionRequest.pb( gcd_version.UpdateVersionRequest() ) @@ -4818,6 +4886,7 @@ def test_update_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_version.Version() + post_with_metadata.return_value = gcd_version.Version(), metadata client.update_version( request, @@ -4829,6 +4898,7 @@ def test_update_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_version_rest_bad_request(request_type=version.DeleteVersionRequest): diff --git a/packages/google-cloud-discoveryengine/CHANGELOG.md b/packages/google-cloud-discoveryengine/CHANGELOG.md index 97fb67650f8a..f67f78d4d2d1 100644 --- a/packages/google-cloud-discoveryengine/CHANGELOG.md +++ b/packages/google-cloud-discoveryengine/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.13.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.13.5...google-cloud-discoveryengine-v0.13.6) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.13.5](https://github.com/googleapis/google-cloud-python/compare/google-cloud-discoveryengine-v0.13.4...google-cloud-discoveryengine-v0.13.5) (2024-12-12) diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py index 458413f5c30f..154c54fd3917 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.13.5" # {x-release-please-version} +__version__ = "0.13.6" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py index 458413f5c30f..154c54fd3917 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.13.5" # {x-release-please-version} +__version__ = "0.13.6" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py index b0a10091075a..c220728633b4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1279,16 +1308,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1334,16 +1367,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py index d4d346f19cac..50716645d965 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/completion_service/transports/rest.py @@ -139,12 +139,38 @@ def post_complete_query( ) -> completion_service.CompleteQueryResponse: """Post-rpc interceptor for complete_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_complete_query` interceptor runs + before the `post_complete_query_with_metadata` interceptor. """ return response + def post_complete_query_with_metadata( + self, + response: completion_service.CompleteQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + completion_service.CompleteQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for complete_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_complete_query_with_metadata` + interceptor in new development instead of the `post_complete_query` interceptor. + When both interceptors are used, this `post_complete_query_with_metadata` interceptor runs after the + `post_complete_query` interceptor. The (possibly modified) response returned by + `post_complete_query` will be passed to + `post_complete_query_with_metadata`. + """ + return response, metadata + def pre_import_completion_suggestions( self, request: import_config.ImportCompletionSuggestionsRequest, @@ -165,12 +191,35 @@ def post_import_completion_suggestions( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_completion_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_completion_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_import_completion_suggestions` interceptor runs + before the `post_import_completion_suggestions_with_metadata` interceptor. """ return response + def post_import_completion_suggestions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_completion_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_import_completion_suggestions_with_metadata` + interceptor in new development instead of the `post_import_completion_suggestions` interceptor. + When both interceptors are used, this `post_import_completion_suggestions_with_metadata` interceptor runs after the + `post_import_completion_suggestions` interceptor. The (possibly modified) response returned by + `post_import_completion_suggestions` will be passed to + `post_import_completion_suggestions_with_metadata`. + """ + return response, metadata + def pre_import_suggestion_deny_list_entries( self, request: import_config.ImportSuggestionDenyListEntriesRequest, @@ -191,12 +240,35 @@ def post_import_suggestion_deny_list_entries( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_suggestion_deny_list_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_suggestion_deny_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_import_suggestion_deny_list_entries` interceptor runs + before the `post_import_suggestion_deny_list_entries_with_metadata` interceptor. """ return response + def post_import_suggestion_deny_list_entries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_import_suggestion_deny_list_entries_with_metadata` + interceptor in new development instead of the `post_import_suggestion_deny_list_entries` interceptor. + When both interceptors are used, this `post_import_suggestion_deny_list_entries_with_metadata` interceptor runs after the + `post_import_suggestion_deny_list_entries` interceptor. The (possibly modified) response returned by + `post_import_suggestion_deny_list_entries` will be passed to + `post_import_suggestion_deny_list_entries_with_metadata`. + """ + return response, metadata + def pre_purge_completion_suggestions( self, request: purge_config.PurgeCompletionSuggestionsRequest, @@ -217,12 +289,35 @@ def post_purge_completion_suggestions( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_completion_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_completion_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_purge_completion_suggestions` interceptor runs + before the `post_purge_completion_suggestions_with_metadata` interceptor. """ return response + def post_purge_completion_suggestions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_completion_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_purge_completion_suggestions_with_metadata` + interceptor in new development instead of the `post_purge_completion_suggestions` interceptor. + When both interceptors are used, this `post_purge_completion_suggestions_with_metadata` interceptor runs after the + `post_purge_completion_suggestions` interceptor. The (possibly modified) response returned by + `post_purge_completion_suggestions` will be passed to + `post_purge_completion_suggestions_with_metadata`. + """ + return response, metadata + def pre_purge_suggestion_deny_list_entries( self, request: purge_config.PurgeSuggestionDenyListEntriesRequest, @@ -243,12 +338,35 @@ def post_purge_suggestion_deny_list_entries( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_suggestion_deny_list_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_suggestion_deny_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_purge_suggestion_deny_list_entries` interceptor runs + before the `post_purge_suggestion_deny_list_entries_with_metadata` interceptor. """ return response + def post_purge_suggestion_deny_list_entries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_purge_suggestion_deny_list_entries_with_metadata` + interceptor in new development instead of the `post_purge_suggestion_deny_list_entries` interceptor. + When both interceptors are used, this `post_purge_suggestion_deny_list_entries_with_metadata` interceptor runs after the + `post_purge_suggestion_deny_list_entries` interceptor. The (possibly modified) response returned by + `post_purge_suggestion_deny_list_entries` will be passed to + `post_purge_suggestion_deny_list_entries_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -710,6 +828,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -865,6 +987,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_completion_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_import_completion_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1023,6 +1152,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_suggestion_deny_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_import_suggestion_deny_list_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1176,6 +1312,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_completion_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_completion_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1332,6 +1472,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_suggestion_deny_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_purge_suggestion_deny_list_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py index 78ee6e7b3636..0a341d204c99 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -519,6 +521,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1379,16 +1408,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1434,16 +1467,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py index c98e3ca97a24..b7e75fb0eecc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/control_service/transports/rest.py @@ -132,12 +132,35 @@ def pre_create_control( def post_create_control(self, response: gcd_control.Control) -> gcd_control.Control: """Post-rpc interceptor for create_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_create_control` interceptor runs + before the `post_create_control_with_metadata` interceptor. """ return response + def post_create_control_with_metadata( + self, + response: gcd_control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_create_control_with_metadata` + interceptor in new development instead of the `post_create_control` interceptor. + When both interceptors are used, this `post_create_control_with_metadata` interceptor runs after the + `post_create_control` interceptor. The (possibly modified) response returned by + `post_create_control` will be passed to + `post_create_control_with_metadata`. + """ + return response, metadata + def pre_delete_control( self, request: control_service.DeleteControlRequest, @@ -169,12 +192,35 @@ def pre_get_control( def post_get_control(self, response: control.Control) -> control.Control: """Post-rpc interceptor for get_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_get_control` interceptor runs + before the `post_get_control_with_metadata` interceptor. """ return response + def post_get_control_with_metadata( + self, + response: control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_get_control_with_metadata` + interceptor in new development instead of the `post_get_control` interceptor. + When both interceptors are used, this `post_get_control_with_metadata` interceptor runs after the + `post_get_control` interceptor. The (possibly modified) response returned by + `post_get_control` will be passed to + `post_get_control_with_metadata`. + """ + return response, metadata + def pre_list_controls( self, request: control_service.ListControlsRequest, @@ -194,12 +240,37 @@ def post_list_controls( ) -> control_service.ListControlsResponse: """Post-rpc interceptor for list_controls - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_controls_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_list_controls` interceptor runs + before the `post_list_controls_with_metadata` interceptor. """ return response + def post_list_controls_with_metadata( + self, + response: control_service.ListControlsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + control_service.ListControlsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_controls + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_list_controls_with_metadata` + interceptor in new development instead of the `post_list_controls` interceptor. + When both interceptors are used, this `post_list_controls_with_metadata` interceptor runs after the + `post_list_controls` interceptor. The (possibly modified) response returned by + `post_list_controls` will be passed to + `post_list_controls_with_metadata`. + """ + return response, metadata + def pre_update_control( self, request: control_service.UpdateControlRequest, @@ -217,12 +288,35 @@ def pre_update_control( def post_update_control(self, response: gcd_control.Control) -> gcd_control.Control: """Post-rpc interceptor for update_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_update_control` interceptor runs + before the `post_update_control_with_metadata` interceptor. """ return response + def post_update_control_with_metadata( + self, + response: gcd_control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_update_control_with_metadata` + interceptor in new development instead of the `post_update_control` interceptor. + When both interceptors are used, this `post_update_control_with_metadata` interceptor runs after the + `post_update_control` interceptor. The (possibly modified) response returned by + `post_update_control` will be passed to + `post_update_control_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -514,6 +608,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -767,6 +865,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -907,6 +1009,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_controls(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_controls_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1060,6 +1166,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py index 886076fa0b0d..d86f07e4dff8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -657,6 +659,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2392,16 +2421,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2447,16 +2480,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py index aa3fe27111cc..56f0b2e148a1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/conversational_search_service/transports/rest.py @@ -198,12 +198,38 @@ def post_answer_query( ) -> conversational_search_service.AnswerQueryResponse: """Post-rpc interceptor for answer_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_answer_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_answer_query` interceptor runs + before the `post_answer_query_with_metadata` interceptor. """ return response + def post_answer_query_with_metadata( + self, + response: conversational_search_service.AnswerQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.AnswerQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for answer_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_answer_query_with_metadata` + interceptor in new development instead of the `post_answer_query` interceptor. + When both interceptors are used, this `post_answer_query_with_metadata` interceptor runs after the + `post_answer_query` interceptor. The (possibly modified) response returned by + `post_answer_query` will be passed to + `post_answer_query_with_metadata`. + """ + return response, metadata + def pre_converse_conversation( self, request: conversational_search_service.ConverseConversationRequest, @@ -224,12 +250,38 @@ def post_converse_conversation( ) -> conversational_search_service.ConverseConversationResponse: """Post-rpc interceptor for converse_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_converse_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_converse_conversation` interceptor runs + before the `post_converse_conversation_with_metadata` interceptor. """ return response + def post_converse_conversation_with_metadata( + self, + response: conversational_search_service.ConverseConversationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ConverseConversationResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for converse_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_converse_conversation_with_metadata` + interceptor in new development instead of the `post_converse_conversation` interceptor. + When both interceptors are used, this `post_converse_conversation_with_metadata` interceptor runs after the + `post_converse_conversation` interceptor. The (possibly modified) response returned by + `post_converse_conversation` will be passed to + `post_converse_conversation_with_metadata`. + """ + return response, metadata + def pre_create_conversation( self, request: conversational_search_service.CreateConversationRequest, @@ -250,12 +302,35 @@ def post_create_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for create_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation` interceptor runs + before the `post_create_conversation_with_metadata` interceptor. """ return response + def post_create_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_create_conversation_with_metadata` + interceptor in new development instead of the `post_create_conversation` interceptor. + When both interceptors are used, this `post_create_conversation_with_metadata` interceptor runs after the + `post_create_conversation` interceptor. The (possibly modified) response returned by + `post_create_conversation` will be passed to + `post_create_conversation_with_metadata`. + """ + return response, metadata + def pre_create_session( self, request: conversational_search_service.CreateSessionRequest, @@ -274,12 +349,35 @@ def pre_create_session( def post_create_session(self, response: gcd_session.Session) -> gcd_session.Session: """Post-rpc interceptor for create_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_create_session` interceptor runs + before the `post_create_session_with_metadata` interceptor. """ return response + def post_create_session_with_metadata( + self, + response: gcd_session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_create_session_with_metadata` + interceptor in new development instead of the `post_create_session` interceptor. + When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the + `post_create_session` interceptor. The (possibly modified) response returned by + `post_create_session` will be passed to + `post_create_session_with_metadata`. + """ + return response, metadata + def pre_delete_conversation( self, request: conversational_search_service.DeleteConversationRequest, @@ -328,12 +426,33 @@ def pre_get_answer( def post_get_answer(self, response: answer.Answer) -> answer.Answer: """Post-rpc interceptor for get_answer - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_answer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_answer` interceptor runs + before the `post_get_answer_with_metadata` interceptor. """ return response + def post_get_answer_with_metadata( + self, response: answer.Answer, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[answer.Answer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_answer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_answer_with_metadata` + interceptor in new development instead of the `post_get_answer` interceptor. + When both interceptors are used, this `post_get_answer_with_metadata` interceptor runs after the + `post_get_answer` interceptor. The (possibly modified) response returned by + `post_get_answer` will be passed to + `post_get_answer_with_metadata`. + """ + return response, metadata + def pre_get_conversation( self, request: conversational_search_service.GetConversationRequest, @@ -354,12 +473,35 @@ def post_get_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_get_session( self, request: conversational_search_service.GetSessionRequest, @@ -378,12 +520,35 @@ def pre_get_session( def post_get_session(self, response: session.Session) -> session.Session: """Post-rpc interceptor for get_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_session` interceptor runs + before the `post_get_session_with_metadata` interceptor. """ return response + def post_get_session_with_metadata( + self, + response: session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_session_with_metadata` + interceptor in new development instead of the `post_get_session` interceptor. + When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the + `post_get_session` interceptor. The (possibly modified) response returned by + `post_get_session` will be passed to + `post_get_session_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: conversational_search_service.ListConversationsRequest, @@ -404,12 +569,38 @@ def post_list_conversations( ) -> conversational_search_service.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: conversational_search_service.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ListConversationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_list_sessions( self, request: conversational_search_service.ListSessionsRequest, @@ -430,12 +621,38 @@ def post_list_sessions( ) -> conversational_search_service.ListSessionsResponse: """Post-rpc interceptor for list_sessions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. """ return response + def post_list_sessions_with_metadata( + self, + response: conversational_search_service.ListSessionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ListSessionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + def pre_update_conversation( self, request: conversational_search_service.UpdateConversationRequest, @@ -456,12 +673,35 @@ def post_update_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for update_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_update_conversation` interceptor runs + before the `post_update_conversation_with_metadata` interceptor. """ return response + def post_update_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_update_conversation_with_metadata` + interceptor in new development instead of the `post_update_conversation` interceptor. + When both interceptors are used, this `post_update_conversation_with_metadata` interceptor runs after the + `post_update_conversation` interceptor. The (possibly modified) response returned by + `post_update_conversation` will be passed to + `post_update_conversation_with_metadata`. + """ + return response, metadata + def pre_update_session( self, request: conversational_search_service.UpdateSessionRequest, @@ -480,12 +720,35 @@ def pre_update_session( def post_update_session(self, response: gcd_session.Session) -> gcd_session.Session: """Post-rpc interceptor for update_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_update_session` interceptor runs + before the `post_update_session_with_metadata` interceptor. """ return response + def post_update_session_with_metadata( + self, + response: gcd_session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_update_session_with_metadata` + interceptor in new development instead of the `post_update_session` interceptor. + When both interceptors are used, this `post_update_session_with_metadata` interceptor runs after the + `post_update_session` interceptor. The (possibly modified) response returned by + `post_update_session` will be passed to + `post_update_session_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -779,6 +1042,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_answer_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_answer_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -939,6 +1206,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_converse_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_converse_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1093,6 +1364,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1242,6 +1517,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1604,6 +1883,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_answer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_answer_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1751,6 +2034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1894,6 +2181,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2039,6 +2330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2186,6 +2481,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2342,6 +2641,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2491,6 +2794,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py index fa43ce47c295..e4c15b458c7c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -572,6 +574,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1519,16 +1548,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1574,16 +1607,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py index 33d3286ea2b0..954f6aeb7030 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/data_store_service/transports/rest.py @@ -138,12 +138,35 @@ def post_create_data_store( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_data_store` interceptor runs + before the `post_create_data_store_with_metadata` interceptor. """ return response + def post_create_data_store_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_create_data_store_with_metadata` + interceptor in new development instead of the `post_create_data_store` interceptor. + When both interceptors are used, this `post_create_data_store_with_metadata` interceptor runs after the + `post_create_data_store` interceptor. The (possibly modified) response returned by + `post_create_data_store` will be passed to + `post_create_data_store_with_metadata`. + """ + return response, metadata + def pre_delete_data_store( self, request: data_store_service.DeleteDataStoreRequest, @@ -164,12 +187,35 @@ def post_delete_data_store( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_data_store` interceptor runs + before the `post_delete_data_store_with_metadata` interceptor. """ return response + def post_delete_data_store_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_delete_data_store_with_metadata` + interceptor in new development instead of the `post_delete_data_store` interceptor. + When both interceptors are used, this `post_delete_data_store_with_metadata` interceptor runs after the + `post_delete_data_store` interceptor. The (possibly modified) response returned by + `post_delete_data_store` will be passed to + `post_delete_data_store_with_metadata`. + """ + return response, metadata + def pre_get_data_store( self, request: data_store_service.GetDataStoreRequest, @@ -189,12 +235,35 @@ def post_get_data_store( ) -> data_store.DataStore: """Post-rpc interceptor for get_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_store` interceptor runs + before the `post_get_data_store_with_metadata` interceptor. """ return response + def post_get_data_store_with_metadata( + self, + response: data_store.DataStore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_store.DataStore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_get_data_store_with_metadata` + interceptor in new development instead of the `post_get_data_store` interceptor. + When both interceptors are used, this `post_get_data_store_with_metadata` interceptor runs after the + `post_get_data_store` interceptor. The (possibly modified) response returned by + `post_get_data_store` will be passed to + `post_get_data_store_with_metadata`. + """ + return response, metadata + def pre_list_data_stores( self, request: data_store_service.ListDataStoresRequest, @@ -215,12 +284,38 @@ def post_list_data_stores( ) -> data_store_service.ListDataStoresResponse: """Post-rpc interceptor for list_data_stores - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_stores_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_stores` interceptor runs + before the `post_list_data_stores_with_metadata` interceptor. """ return response + def post_list_data_stores_with_metadata( + self, + response: data_store_service.ListDataStoresResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_store_service.ListDataStoresResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_data_stores + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_list_data_stores_with_metadata` + interceptor in new development instead of the `post_list_data_stores` interceptor. + When both interceptors are used, this `post_list_data_stores_with_metadata` interceptor runs after the + `post_list_data_stores` interceptor. The (possibly modified) response returned by + `post_list_data_stores` will be passed to + `post_list_data_stores_with_metadata`. + """ + return response, metadata + def pre_update_data_store( self, request: data_store_service.UpdateDataStoreRequest, @@ -241,12 +336,35 @@ def post_update_data_store( ) -> gcd_data_store.DataStore: """Post-rpc interceptor for update_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_store` interceptor runs + before the `post_update_data_store_with_metadata` interceptor. """ return response + def post_update_data_store_with_metadata( + self, + response: gcd_data_store.DataStore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_data_store.DataStore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_update_data_store_with_metadata` + interceptor in new development instead of the `post_update_data_store` interceptor. + When both interceptors are used, this `post_update_data_store_with_metadata` interceptor runs after the + `post_update_data_store` interceptor. The (possibly modified) response returned by + `post_update_data_store` will be passed to + `post_update_data_store_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -716,6 +834,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -862,6 +984,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1006,6 +1132,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1153,6 +1283,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_stores(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_stores_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1308,6 +1442,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py index 641902a60428..413697f53938 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -581,6 +583,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1813,16 +1842,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1868,16 +1901,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py index 4e2c3b994a8b..7e00ed76fd0f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/document_service/transports/rest.py @@ -163,12 +163,38 @@ def post_batch_get_documents_metadata( ) -> document_service.BatchGetDocumentsMetadataResponse: """Post-rpc interceptor for batch_get_documents_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_documents_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_documents_metadata` interceptor runs + before the `post_batch_get_documents_metadata_with_metadata` interceptor. """ return response + def post_batch_get_documents_metadata_with_metadata( + self, + response: document_service.BatchGetDocumentsMetadataResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_batch_get_documents_metadata_with_metadata` + interceptor in new development instead of the `post_batch_get_documents_metadata` interceptor. + When both interceptors are used, this `post_batch_get_documents_metadata_with_metadata` interceptor runs after the + `post_batch_get_documents_metadata` interceptor. The (possibly modified) response returned by + `post_batch_get_documents_metadata` will be passed to + `post_batch_get_documents_metadata_with_metadata`. + """ + return response, metadata + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -188,12 +214,35 @@ def post_create_document( ) -> gcd_document.Document: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: gcd_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: document_service.DeleteDocumentRequest, @@ -225,12 +274,35 @@ def pre_get_document( def post_get_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: import_config.ImportDocumentsRequest, @@ -250,12 +322,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: document_service.ListDocumentsRequest, @@ -275,12 +370,37 @@ def post_list_documents( ) -> document_service.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: document_service.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_purge_documents( self, request: purge_config.PurgeDocumentsRequest, @@ -300,12 +420,35 @@ def post_purge_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_purge_documents` interceptor runs + before the `post_purge_documents_with_metadata` interceptor. """ return response + def post_purge_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_purge_documents_with_metadata` + interceptor in new development instead of the `post_purge_documents` interceptor. + When both interceptors are used, this `post_purge_documents_with_metadata` interceptor runs after the + `post_purge_documents` interceptor. The (possibly modified) response returned by + `post_purge_documents` will be passed to + `post_purge_documents_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: document_service.UpdateDocumentRequest, @@ -325,12 +468,35 @@ def post_update_document( ) -> gcd_document.Document: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: gcd_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -799,6 +965,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_documents_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_documents_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -954,6 +1124,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1207,6 +1381,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1356,6 +1534,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1501,6 +1683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1652,6 +1838,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1803,6 +1993,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py index d64184effa1a..ba41703167c5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -521,6 +523,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1433,16 +1462,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1488,16 +1521,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py index 1309d3b56ae3..c3981c1e9ffb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/engine_service/transports/rest.py @@ -137,12 +137,35 @@ def post_create_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_engine` interceptor runs + before the `post_create_engine_with_metadata` interceptor. """ return response + def post_create_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_create_engine_with_metadata` + interceptor in new development instead of the `post_create_engine` interceptor. + When both interceptors are used, this `post_create_engine_with_metadata` interceptor runs after the + `post_create_engine` interceptor. The (possibly modified) response returned by + `post_create_engine` will be passed to + `post_create_engine_with_metadata`. + """ + return response, metadata + def pre_delete_engine( self, request: engine_service.DeleteEngineRequest, @@ -162,12 +185,35 @@ def post_delete_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_engine` interceptor runs + before the `post_delete_engine_with_metadata` interceptor. """ return response + def post_delete_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_delete_engine_with_metadata` + interceptor in new development instead of the `post_delete_engine` interceptor. + When both interceptors are used, this `post_delete_engine_with_metadata` interceptor runs after the + `post_delete_engine` interceptor. The (possibly modified) response returned by + `post_delete_engine` will be passed to + `post_delete_engine_with_metadata`. + """ + return response, metadata + def pre_get_engine( self, request: engine_service.GetEngineRequest, @@ -185,12 +231,33 @@ def pre_get_engine( def post_get_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for get_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_engine` interceptor runs + before the `post_get_engine_with_metadata` interceptor. """ return response + def post_get_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_get_engine_with_metadata` + interceptor in new development instead of the `post_get_engine` interceptor. + When both interceptors are used, this `post_get_engine_with_metadata` interceptor runs after the + `post_get_engine` interceptor. The (possibly modified) response returned by + `post_get_engine` will be passed to + `post_get_engine_with_metadata`. + """ + return response, metadata + def pre_list_engines( self, request: engine_service.ListEnginesRequest, @@ -210,12 +277,37 @@ def post_list_engines( ) -> engine_service.ListEnginesResponse: """Post-rpc interceptor for list_engines - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_engines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_list_engines` interceptor runs + before the `post_list_engines_with_metadata` interceptor. """ return response + def post_list_engines_with_metadata( + self, + response: engine_service.ListEnginesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + engine_service.ListEnginesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_engines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_list_engines_with_metadata` + interceptor in new development instead of the `post_list_engines` interceptor. + When both interceptors are used, this `post_list_engines_with_metadata` interceptor runs after the + `post_list_engines` interceptor. The (possibly modified) response returned by + `post_list_engines` will be passed to + `post_list_engines_with_metadata`. + """ + return response, metadata + def pre_update_engine( self, request: engine_service.UpdateEngineRequest, @@ -233,12 +325,35 @@ def pre_update_engine( def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: """Post-rpc interceptor for update_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_update_engine` interceptor runs + before the `post_update_engine_with_metadata` interceptor. """ return response + def post_update_engine_with_metadata( + self, + response: gcd_engine.Engine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_update_engine_with_metadata` + interceptor in new development instead of the `post_update_engine` interceptor. + When both interceptors are used, this `post_update_engine_with_metadata` interceptor runs after the + `post_update_engine` interceptor. The (possibly modified) response returned by + `post_update_engine` will be passed to + `post_update_engine_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -704,6 +819,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -847,6 +966,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -996,6 +1119,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1143,6 +1270,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_engines(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_engines_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1296,6 +1427,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py index 103c667a79ec..f43125276d83 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -531,6 +533,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1061,16 +1090,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1116,16 +1149,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py index 9b7b9e98fbcb..63a382de86bc 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/grounded_generation_service/transports/rest.py @@ -112,12 +112,38 @@ def post_check_grounding( ) -> grounded_generation_service.CheckGroundingResponse: """Post-rpc interceptor for check_grounding - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_grounding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GroundedGenerationService server but before - it is returned to user code. + it is returned to user code. This `post_check_grounding` interceptor runs + before the `post_check_grounding_with_metadata` interceptor. """ return response + def post_check_grounding_with_metadata( + self, + response: grounded_generation_service.CheckGroundingResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + grounded_generation_service.CheckGroundingResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for check_grounding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GroundedGenerationService server but before it is returned to user code. + + We recommend only using this `post_check_grounding_with_metadata` + interceptor in new development instead of the `post_check_grounding` interceptor. + When both interceptors are used, this `post_check_grounding_with_metadata` interceptor runs after the + `post_check_grounding` interceptor. The (possibly modified) response returned by + `post_check_grounding` will be passed to + `post_check_grounding_with_metadata`. + """ + return response, metadata + def pre_generate_grounded_content( self, request: grounded_generation_service.GenerateGroundedContentRequest, @@ -138,12 +164,38 @@ def post_generate_grounded_content( ) -> grounded_generation_service.GenerateGroundedContentResponse: """Post-rpc interceptor for generate_grounded_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_grounded_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GroundedGenerationService server but before - it is returned to user code. + it is returned to user code. This `post_generate_grounded_content` interceptor runs + before the `post_generate_grounded_content_with_metadata` interceptor. """ return response + def post_generate_grounded_content_with_metadata( + self, + response: grounded_generation_service.GenerateGroundedContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + grounded_generation_service.GenerateGroundedContentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_grounded_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GroundedGenerationService server but before it is returned to user code. + + We recommend only using this `post_generate_grounded_content_with_metadata` + interceptor in new development instead of the `post_generate_grounded_content` interceptor. + When both interceptors are used, this `post_generate_grounded_content_with_metadata` interceptor runs after the + `post_generate_grounded_content` interceptor. The (possibly modified) response returned by + `post_generate_grounded_content` will be passed to + `post_generate_grounded_content_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -437,6 +489,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_grounding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_check_grounding_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -595,6 +651,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_grounded_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_grounded_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py index 73cd5a91dbd5..f4eb201ca72b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -483,6 +485,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -867,16 +896,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -922,16 +955,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py index 280f111b3be5..c0ac7275aac5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/project_service/transports/rest.py @@ -103,12 +103,35 @@ def post_provision_project( ) -> operations_pb2.Operation: """Post-rpc interceptor for provision_project - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_provision_project_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ProjectService server but before - it is returned to user code. + it is returned to user code. This `post_provision_project` interceptor runs + before the `post_provision_project_with_metadata` interceptor. """ return response + def post_provision_project_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for provision_project + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ProjectService server but before it is returned to user code. + + We recommend only using this `post_provision_project_with_metadata` + interceptor in new development instead of the `post_provision_project` interceptor. + When both interceptors are used, this `post_provision_project_with_metadata` interceptor runs after the + `post_provision_project` interceptor. The (possibly modified) response returned by + `post_provision_project` will be passed to + `post_provision_project_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -576,6 +599,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_provision_project(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_provision_project_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py index f475c2211c8b..6d2c31aa1956 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -484,6 +486,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -823,16 +852,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -878,16 +911,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py index 04d8e0b8bcf6..3107d51a859b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/rank_service/transports/rest.py @@ -101,12 +101,35 @@ def post_rank( ) -> rank_service.RankResponse: """Post-rpc interceptor for rank - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rank_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RankService server but before - it is returned to user code. + it is returned to user code. This `post_rank` interceptor runs + before the `post_rank_with_metadata` interceptor. """ return response + def post_rank_with_metadata( + self, + response: rank_service.RankResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rank_service.RankResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rank + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RankService server but before it is returned to user code. + + We recommend only using this `post_rank_with_metadata` + interceptor in new development instead of the `post_rank` interceptor. + When both interceptors are used, this `post_rank_with_metadata` interceptor runs after the + `post_rank` interceptor. The (possibly modified) response returned by + `post_rank` will be passed to + `post_rank_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -395,6 +418,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rank(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rank_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py index b37910f058fa..bdeca9a78d99 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -562,6 +564,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -911,16 +940,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -966,16 +999,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py index 01fe3f63615d..863143a7e3ef 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/recommendation_service/transports/rest.py @@ -103,12 +103,38 @@ def post_recommend( ) -> recommendation_service.RecommendResponse: """Post-rpc interceptor for recommend - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recommend_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RecommendationService server but before - it is returned to user code. + it is returned to user code. This `post_recommend` interceptor runs + before the `post_recommend_with_metadata` interceptor. """ return response + def post_recommend_with_metadata( + self, + response: recommendation_service.RecommendResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + recommendation_service.RecommendResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for recommend + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RecommendationService server but before it is returned to user code. + + We recommend only using this `post_recommend_with_metadata` + interceptor in new development instead of the `post_recommend` interceptor. + When both interceptors are used, this `post_recommend_with_metadata` interceptor runs after the + `post_recommend` interceptor. The (possibly modified) response returned by + `post_recommend` will be passed to + `post_recommend_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -395,6 +421,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_recommend(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recommend_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py index f24399b38f93..d8ad7a3575d0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1383,16 +1412,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1438,16 +1471,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py index 9e33bef7723d..693ea25901a8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/schema_service/transports/rest.py @@ -135,12 +135,35 @@ def post_create_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_create_schema` interceptor runs + before the `post_create_schema_with_metadata` interceptor. """ return response + def post_create_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_create_schema_with_metadata` + interceptor in new development instead of the `post_create_schema` interceptor. + When both interceptors are used, this `post_create_schema_with_metadata` interceptor runs after the + `post_create_schema` interceptor. The (possibly modified) response returned by + `post_create_schema` will be passed to + `post_create_schema_with_metadata`. + """ + return response, metadata + def pre_delete_schema( self, request: schema_service.DeleteSchemaRequest, @@ -160,12 +183,35 @@ def post_delete_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_delete_schema` interceptor runs + before the `post_delete_schema_with_metadata` interceptor. """ return response + def post_delete_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_delete_schema_with_metadata` + interceptor in new development instead of the `post_delete_schema` interceptor. + When both interceptors are used, this `post_delete_schema_with_metadata` interceptor runs after the + `post_delete_schema` interceptor. The (possibly modified) response returned by + `post_delete_schema` will be passed to + `post_delete_schema_with_metadata`. + """ + return response, metadata + def pre_get_schema( self, request: schema_service.GetSchemaRequest, @@ -183,12 +229,33 @@ def pre_get_schema( def post_get_schema(self, response: schema.Schema) -> schema.Schema: """Post-rpc interceptor for get_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_get_schema` interceptor runs + before the `post_get_schema_with_metadata` interceptor. """ return response + def post_get_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_get_schema_with_metadata` + interceptor in new development instead of the `post_get_schema` interceptor. + When both interceptors are used, this `post_get_schema_with_metadata` interceptor runs after the + `post_get_schema` interceptor. The (possibly modified) response returned by + `post_get_schema` will be passed to + `post_get_schema_with_metadata`. + """ + return response, metadata + def pre_list_schemas( self, request: schema_service.ListSchemasRequest, @@ -208,12 +275,37 @@ def post_list_schemas( ) -> schema_service.ListSchemasResponse: """Post-rpc interceptor for list_schemas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_list_schemas` interceptor runs + before the `post_list_schemas_with_metadata` interceptor. """ return response + def post_list_schemas_with_metadata( + self, + response: schema_service.ListSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema_service.ListSchemasResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schemas_with_metadata` + interceptor in new development instead of the `post_list_schemas` interceptor. + When both interceptors are used, this `post_list_schemas_with_metadata` interceptor runs after the + `post_list_schemas` interceptor. The (possibly modified) response returned by + `post_list_schemas` will be passed to + `post_list_schemas_with_metadata`. + """ + return response, metadata + def pre_update_schema( self, request: schema_service.UpdateSchemaRequest, @@ -233,12 +325,35 @@ def post_update_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_update_schema` interceptor runs + before the `post_update_schema_with_metadata` interceptor. """ return response + def post_update_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_update_schema_with_metadata` + interceptor in new development instead of the `post_update_schema` interceptor. + When both interceptors are used, this `post_update_schema_with_metadata` interceptor runs after the + `post_update_schema` interceptor. The (possibly modified) response returned by + `post_update_schema` will be passed to + `post_update_schema_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -704,6 +819,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -847,6 +966,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -995,6 +1118,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1142,6 +1269,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_schemas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schemas_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1293,6 +1424,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py index de36fe6fcde3..f2abeafec42f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -611,6 +613,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1084,16 +1113,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1139,16 +1172,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py index 88ee2feedd8b..29755bad44db 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_service/transports/rest.py @@ -109,12 +109,35 @@ def post_search( ) -> search_service.SearchResponse: """Post-rpc interceptor for search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchService server but before - it is returned to user code. + it is returned to user code. This `post_search` interceptor runs + before the `post_search_with_metadata` interceptor. """ return response + def post_search_with_metadata( + self, + response: search_service.SearchResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[search_service.SearchResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchService server but before it is returned to user code. + + We recommend only using this `post_search_with_metadata` + interceptor in new development instead of the `post_search` interceptor. + When both interceptors are used, this `post_search_with_metadata` interceptor runs after the + `post_search` interceptor. The (possibly modified) response returned by + `post_search` will be passed to + `post_search_with_metadata`. + """ + return response, metadata + def pre_search_lite( self, request: search_service.SearchRequest, @@ -132,12 +155,35 @@ def post_search_lite( ) -> search_service.SearchResponse: """Post-rpc interceptor for search_lite - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_lite_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchService server but before - it is returned to user code. + it is returned to user code. This `post_search_lite` interceptor runs + before the `post_search_lite_with_metadata` interceptor. """ return response + def post_search_lite_with_metadata( + self, + response: search_service.SearchResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[search_service.SearchResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_lite + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchService server but before it is returned to user code. + + We recommend only using this `post_search_lite_with_metadata` + interceptor in new development instead of the `post_search_lite` interceptor. + When both interceptors are used, this `post_search_lite_with_metadata` interceptor runs after the + `post_search_lite` interceptor. The (possibly modified) response returned by + `post_search_lite` will be passed to + `post_search_lite_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -428,6 +474,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -585,6 +635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_lite(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_lite_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py index c5e377e3a91f..34f4d69f35be 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -968,16 +997,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1023,16 +1056,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py index 9d8d10a495be..54b9defa25f1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/search_tuning_service/transports/rest.py @@ -112,12 +112,38 @@ def post_list_custom_models( ) -> search_tuning_service.ListCustomModelsResponse: """Post-rpc interceptor for list_custom_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchTuningService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_models` interceptor runs + before the `post_list_custom_models_with_metadata` interceptor. """ return response + def post_list_custom_models_with_metadata( + self, + response: search_tuning_service.ListCustomModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchTuningService server but before it is returned to user code. + + We recommend only using this `post_list_custom_models_with_metadata` + interceptor in new development instead of the `post_list_custom_models` interceptor. + When both interceptors are used, this `post_list_custom_models_with_metadata` interceptor runs after the + `post_list_custom_models` interceptor. The (possibly modified) response returned by + `post_list_custom_models` will be passed to + `post_list_custom_models_with_metadata`. + """ + return response, metadata + def pre_train_custom_model( self, request: search_tuning_service.TrainCustomModelRequest, @@ -138,12 +164,35 @@ def post_train_custom_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_custom_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_custom_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchTuningService server but before - it is returned to user code. + it is returned to user code. This `post_train_custom_model` interceptor runs + before the `post_train_custom_model_with_metadata` interceptor. """ return response + def post_train_custom_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchTuningService server but before it is returned to user code. + + We recommend only using this `post_train_custom_model_with_metadata` + interceptor in new development instead of the `post_train_custom_model` interceptor. + When both interceptors are used, this `post_train_custom_model_with_metadata` interceptor runs after the + `post_train_custom_model` interceptor. The (possibly modified) response returned by + `post_train_custom_model` will be passed to + `post_train_custom_model_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -607,6 +656,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -761,6 +814,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_custom_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_custom_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py index f4ab9ab7f612..a5cbd3d6ac64 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -520,6 +522,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2229,16 +2258,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2284,16 +2317,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py index 47d095386599..e91563826a8a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/site_search_engine_service/transports/rest.py @@ -195,12 +195,35 @@ def post_batch_create_target_sites( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_target_sites` interceptor runs + before the `post_batch_create_target_sites_with_metadata` interceptor. """ return response + def post_batch_create_target_sites_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_batch_create_target_sites_with_metadata` + interceptor in new development instead of the `post_batch_create_target_sites` interceptor. + When both interceptors are used, this `post_batch_create_target_sites_with_metadata` interceptor runs after the + `post_batch_create_target_sites` interceptor. The (possibly modified) response returned by + `post_batch_create_target_sites` will be passed to + `post_batch_create_target_sites_with_metadata`. + """ + return response, metadata + def pre_batch_verify_target_sites( self, request: site_search_engine_service.BatchVerifyTargetSitesRequest, @@ -221,12 +244,35 @@ def post_batch_verify_target_sites( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_verify_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_verify_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_batch_verify_target_sites` interceptor runs + before the `post_batch_verify_target_sites_with_metadata` interceptor. """ return response + def post_batch_verify_target_sites_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_verify_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_batch_verify_target_sites_with_metadata` + interceptor in new development instead of the `post_batch_verify_target_sites` interceptor. + When both interceptors are used, this `post_batch_verify_target_sites_with_metadata` interceptor runs after the + `post_batch_verify_target_sites` interceptor. The (possibly modified) response returned by + `post_batch_verify_target_sites` will be passed to + `post_batch_verify_target_sites_with_metadata`. + """ + return response, metadata + def pre_create_target_site( self, request: site_search_engine_service.CreateTargetSiteRequest, @@ -247,12 +293,35 @@ def post_create_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_target_site` interceptor runs + before the `post_create_target_site_with_metadata` interceptor. """ return response + def post_create_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_create_target_site_with_metadata` + interceptor in new development instead of the `post_create_target_site` interceptor. + When both interceptors are used, this `post_create_target_site_with_metadata` interceptor runs after the + `post_create_target_site` interceptor. The (possibly modified) response returned by + `post_create_target_site` will be passed to + `post_create_target_site_with_metadata`. + """ + return response, metadata + def pre_delete_target_site( self, request: site_search_engine_service.DeleteTargetSiteRequest, @@ -273,12 +342,35 @@ def post_delete_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_target_site` interceptor runs + before the `post_delete_target_site_with_metadata` interceptor. """ return response + def post_delete_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_delete_target_site_with_metadata` + interceptor in new development instead of the `post_delete_target_site` interceptor. + When both interceptors are used, this `post_delete_target_site_with_metadata` interceptor runs after the + `post_delete_target_site` interceptor. The (possibly modified) response returned by + `post_delete_target_site` will be passed to + `post_delete_target_site_with_metadata`. + """ + return response, metadata + def pre_disable_advanced_site_search( self, request: site_search_engine_service.DisableAdvancedSiteSearchRequest, @@ -299,12 +391,35 @@ def post_disable_advanced_site_search( ) -> operations_pb2.Operation: """Post-rpc interceptor for disable_advanced_site_search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_advanced_site_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_disable_advanced_site_search` interceptor runs + before the `post_disable_advanced_site_search_with_metadata` interceptor. """ return response + def post_disable_advanced_site_search_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_advanced_site_search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_disable_advanced_site_search_with_metadata` + interceptor in new development instead of the `post_disable_advanced_site_search` interceptor. + When both interceptors are used, this `post_disable_advanced_site_search_with_metadata` interceptor runs after the + `post_disable_advanced_site_search` interceptor. The (possibly modified) response returned by + `post_disable_advanced_site_search` will be passed to + `post_disable_advanced_site_search_with_metadata`. + """ + return response, metadata + def pre_enable_advanced_site_search( self, request: site_search_engine_service.EnableAdvancedSiteSearchRequest, @@ -325,12 +440,35 @@ def post_enable_advanced_site_search( ) -> operations_pb2.Operation: """Post-rpc interceptor for enable_advanced_site_search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_advanced_site_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_enable_advanced_site_search` interceptor runs + before the `post_enable_advanced_site_search_with_metadata` interceptor. """ return response + def post_enable_advanced_site_search_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_advanced_site_search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_enable_advanced_site_search_with_metadata` + interceptor in new development instead of the `post_enable_advanced_site_search` interceptor. + When both interceptors are used, this `post_enable_advanced_site_search_with_metadata` interceptor runs after the + `post_enable_advanced_site_search` interceptor. The (possibly modified) response returned by + `post_enable_advanced_site_search` will be passed to + `post_enable_advanced_site_search_with_metadata`. + """ + return response, metadata + def pre_fetch_domain_verification_status( self, request: site_search_engine_service.FetchDomainVerificationStatusRequest, @@ -351,12 +489,38 @@ def post_fetch_domain_verification_status( ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: """Post-rpc interceptor for fetch_domain_verification_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_domain_verification_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_domain_verification_status` interceptor runs + before the `post_fetch_domain_verification_status_with_metadata` interceptor. """ return response + def post_fetch_domain_verification_status_with_metadata( + self, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.FetchDomainVerificationStatusResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_fetch_domain_verification_status_with_metadata` + interceptor in new development instead of the `post_fetch_domain_verification_status` interceptor. + When both interceptors are used, this `post_fetch_domain_verification_status_with_metadata` interceptor runs after the + `post_fetch_domain_verification_status` interceptor. The (possibly modified) response returned by + `post_fetch_domain_verification_status` will be passed to + `post_fetch_domain_verification_status_with_metadata`. + """ + return response, metadata + def pre_get_site_search_engine( self, request: site_search_engine_service.GetSiteSearchEngineRequest, @@ -377,12 +541,37 @@ def post_get_site_search_engine( ) -> site_search_engine.SiteSearchEngine: """Post-rpc interceptor for get_site_search_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_site_search_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_site_search_engine` interceptor runs + before the `post_get_site_search_engine_with_metadata` interceptor. """ return response + def post_get_site_search_engine_with_metadata( + self, + response: site_search_engine.SiteSearchEngine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine.SiteSearchEngine, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_site_search_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_site_search_engine_with_metadata` + interceptor in new development instead of the `post_get_site_search_engine` interceptor. + When both interceptors are used, this `post_get_site_search_engine_with_metadata` interceptor runs after the + `post_get_site_search_engine` interceptor. The (possibly modified) response returned by + `post_get_site_search_engine` will be passed to + `post_get_site_search_engine_with_metadata`. + """ + return response, metadata + def pre_get_target_site( self, request: site_search_engine_service.GetTargetSiteRequest, @@ -403,12 +592,35 @@ def post_get_target_site( ) -> site_search_engine.TargetSite: """Post-rpc interceptor for get_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_target_site` interceptor runs + before the `post_get_target_site_with_metadata` interceptor. """ return response + def post_get_target_site_with_metadata( + self, + response: site_search_engine.TargetSite, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[site_search_engine.TargetSite, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_target_site_with_metadata` + interceptor in new development instead of the `post_get_target_site` interceptor. + When both interceptors are used, this `post_get_target_site_with_metadata` interceptor runs after the + `post_get_target_site` interceptor. The (possibly modified) response returned by + `post_get_target_site` will be passed to + `post_get_target_site_with_metadata`. + """ + return response, metadata + def pre_list_target_sites( self, request: site_search_engine_service.ListTargetSitesRequest, @@ -429,12 +641,38 @@ def post_list_target_sites( ) -> site_search_engine_service.ListTargetSitesResponse: """Post-rpc interceptor for list_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_list_target_sites` interceptor runs + before the `post_list_target_sites_with_metadata` interceptor. """ return response + def post_list_target_sites_with_metadata( + self, + response: site_search_engine_service.ListTargetSitesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.ListTargetSitesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_list_target_sites_with_metadata` + interceptor in new development instead of the `post_list_target_sites` interceptor. + When both interceptors are used, this `post_list_target_sites_with_metadata` interceptor runs after the + `post_list_target_sites` interceptor. The (possibly modified) response returned by + `post_list_target_sites` will be passed to + `post_list_target_sites_with_metadata`. + """ + return response, metadata + def pre_recrawl_uris( self, request: site_search_engine_service.RecrawlUrisRequest, @@ -455,12 +693,35 @@ def post_recrawl_uris( ) -> operations_pb2.Operation: """Post-rpc interceptor for recrawl_uris - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recrawl_uris_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_recrawl_uris` interceptor runs + before the `post_recrawl_uris_with_metadata` interceptor. """ return response + def post_recrawl_uris_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for recrawl_uris + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_recrawl_uris_with_metadata` + interceptor in new development instead of the `post_recrawl_uris` interceptor. + When both interceptors are used, this `post_recrawl_uris_with_metadata` interceptor runs after the + `post_recrawl_uris` interceptor. The (possibly modified) response returned by + `post_recrawl_uris` will be passed to + `post_recrawl_uris_with_metadata`. + """ + return response, metadata + def pre_update_target_site( self, request: site_search_engine_service.UpdateTargetSiteRequest, @@ -481,12 +742,35 @@ def post_update_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_update_target_site` interceptor runs + before the `post_update_target_site_with_metadata` interceptor. """ return response + def post_update_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_update_target_site_with_metadata` + interceptor in new development instead of the `post_update_target_site` interceptor. + When both interceptors are used, this `post_update_target_site_with_metadata` interceptor runs after the + `post_update_target_site` interceptor. The (possibly modified) response returned by + `post_update_target_site` will be passed to + `post_update_target_site_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -954,6 +1238,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1106,6 +1394,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_verify_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_verify_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1260,6 +1552,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1408,6 +1704,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1563,6 +1863,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_advanced_site_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_advanced_site_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1716,6 +2020,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_advanced_site_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_advanced_site_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1871,6 +2179,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_domain_verification_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_domain_verification_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2024,6 +2339,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_site_search_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_site_search_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2173,6 +2492,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2323,6 +2646,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2477,6 +2804,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_recrawl_uris(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recrawl_uris_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2631,6 +2962,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py index 1fe7caa085df..3f6297d88125 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -549,6 +551,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1245,16 +1274,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1300,16 +1333,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py index 0c2f6955f404..ca635d0a5725 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1/services/user_event_service/transports/rest.py @@ -134,12 +134,35 @@ def post_collect_user_event( ) -> httpbody_pb2.HttpBody: """Post-rpc interceptor for collect_user_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_collect_user_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_collect_user_event` interceptor runs + before the `post_collect_user_event_with_metadata` interceptor. """ return response + def post_collect_user_event_with_metadata( + self, + response: httpbody_pb2.HttpBody, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[httpbody_pb2.HttpBody, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for collect_user_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_collect_user_event_with_metadata` + interceptor in new development instead of the `post_collect_user_event` interceptor. + When both interceptors are used, this `post_collect_user_event_with_metadata` interceptor runs after the + `post_collect_user_event` interceptor. The (possibly modified) response returned by + `post_collect_user_event` will be passed to + `post_collect_user_event_with_metadata`. + """ + return response, metadata + def pre_import_user_events( self, request: import_config.ImportUserEventsRequest, @@ -159,12 +182,35 @@ def post_import_user_events( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_user_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_user_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_import_user_events` interceptor runs + before the `post_import_user_events_with_metadata` interceptor. """ return response + def post_import_user_events_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_user_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_import_user_events_with_metadata` + interceptor in new development instead of the `post_import_user_events` interceptor. + When both interceptors are used, this `post_import_user_events_with_metadata` interceptor runs after the + `post_import_user_events` interceptor. The (possibly modified) response returned by + `post_import_user_events` will be passed to + `post_import_user_events_with_metadata`. + """ + return response, metadata + def pre_purge_user_events( self, request: purge_config.PurgeUserEventsRequest, @@ -184,12 +230,35 @@ def post_purge_user_events( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_user_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_user_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_purge_user_events` interceptor runs + before the `post_purge_user_events_with_metadata` interceptor. """ return response + def post_purge_user_events_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_user_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_purge_user_events_with_metadata` + interceptor in new development instead of the `post_purge_user_events` interceptor. + When both interceptors are used, this `post_purge_user_events_with_metadata` interceptor runs after the + `post_purge_user_events` interceptor. The (possibly modified) response returned by + `post_purge_user_events` will be passed to + `post_purge_user_events_with_metadata`. + """ + return response, metadata + def pre_write_user_event( self, request: user_event_service.WriteUserEventRequest, @@ -210,12 +279,35 @@ def post_write_user_event( ) -> user_event.UserEvent: """Post-rpc interceptor for write_user_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_write_user_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_write_user_event` interceptor runs + before the `post_write_user_event_with_metadata` interceptor. """ return response + def post_write_user_event_with_metadata( + self, + response: user_event.UserEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[user_event.UserEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for write_user_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_write_user_event_with_metadata` + interceptor in new development instead of the `post_write_user_event` interceptor. + When both interceptors are used, this `post_write_user_event_with_metadata` interceptor runs after the + `post_write_user_event` interceptor. The (possibly modified) response returned by + `post_write_user_event` will be passed to + `post_write_user_event_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -725,6 +817,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_collect_user_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_collect_user_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -876,6 +972,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_user_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_user_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1027,6 +1127,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_user_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_user_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1180,6 +1284,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_write_user_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_write_user_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py index 458413f5c30f..154c54fd3917 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.13.5" # {x-release-please-version} +__version__ = "0.13.6" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py index 918f2fdf61cf..f41c2fe62140 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -487,6 +489,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -941,16 +970,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -996,16 +1029,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py index 726dcae7e136..23723a13f14a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/acl_config_service/transports/rest.py @@ -111,12 +111,35 @@ def post_get_acl_config( ) -> acl_config.AclConfig: """Post-rpc interceptor for get_acl_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_acl_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AclConfigService server but before - it is returned to user code. + it is returned to user code. This `post_get_acl_config` interceptor runs + before the `post_get_acl_config_with_metadata` interceptor. """ return response + def post_get_acl_config_with_metadata( + self, + response: acl_config.AclConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[acl_config.AclConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_acl_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AclConfigService server but before it is returned to user code. + + We recommend only using this `post_get_acl_config_with_metadata` + interceptor in new development instead of the `post_get_acl_config` interceptor. + When both interceptors are used, this `post_get_acl_config_with_metadata` interceptor runs after the + `post_get_acl_config` interceptor. The (possibly modified) response returned by + `post_get_acl_config` will be passed to + `post_get_acl_config_with_metadata`. + """ + return response, metadata + def pre_update_acl_config( self, request: acl_config_service.UpdateAclConfigRequest, @@ -137,12 +160,35 @@ def post_update_acl_config( ) -> acl_config.AclConfig: """Post-rpc interceptor for update_acl_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_acl_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AclConfigService server but before - it is returned to user code. + it is returned to user code. This `post_update_acl_config` interceptor runs + before the `post_update_acl_config_with_metadata` interceptor. """ return response + def post_update_acl_config_with_metadata( + self, + response: acl_config.AclConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[acl_config.AclConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_acl_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AclConfigService server but before it is returned to user code. + + We recommend only using this `post_update_acl_config_with_metadata` + interceptor in new development instead of the `post_update_acl_config` interceptor. + When both interceptors are used, this `post_update_acl_config_with_metadata` interceptor runs after the + `post_update_acl_config` interceptor. The (possibly modified) response returned by + `post_update_acl_config` will be passed to + `post_update_acl_config_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -421,6 +467,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_acl_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_acl_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -571,6 +621,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_acl_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_acl_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py index f7ee8eca8c23..ea229ac434ec 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -521,6 +523,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1019,16 +1048,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1074,16 +1107,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py index e59aee5e4361..e8816968c282 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/chunk_service/transports/rest.py @@ -107,12 +107,33 @@ def pre_get_chunk( def post_get_chunk(self, response: chunk.Chunk) -> chunk.Chunk: """Post-rpc interceptor for get_chunk - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_chunk_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChunkService server but before - it is returned to user code. + it is returned to user code. This `post_get_chunk` interceptor runs + before the `post_get_chunk_with_metadata` interceptor. """ return response + def post_get_chunk_with_metadata( + self, response: chunk.Chunk, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[chunk.Chunk, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_chunk + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChunkService server but before it is returned to user code. + + We recommend only using this `post_get_chunk_with_metadata` + interceptor in new development instead of the `post_get_chunk` interceptor. + When both interceptors are used, this `post_get_chunk_with_metadata` interceptor runs after the + `post_get_chunk` interceptor. The (possibly modified) response returned by + `post_get_chunk` will be passed to + `post_get_chunk_with_metadata`. + """ + return response, metadata + def pre_list_chunks( self, request: chunk_service.ListChunksRequest, @@ -132,12 +153,37 @@ def post_list_chunks( ) -> chunk_service.ListChunksResponse: """Post-rpc interceptor for list_chunks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_chunks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ChunkService server but before - it is returned to user code. + it is returned to user code. This `post_list_chunks` interceptor runs + before the `post_list_chunks_with_metadata` interceptor. """ return response + def post_list_chunks_with_metadata( + self, + response: chunk_service.ListChunksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + chunk_service.ListChunksResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_chunks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChunkService server but before it is returned to user code. + + We recommend only using this `post_list_chunks_with_metadata` + interceptor in new development instead of the `post_list_chunks` interceptor. + When both interceptors are used, this `post_list_chunks_with_metadata` interceptor runs after the + `post_list_chunks` interceptor. The (possibly modified) response returned by + `post_list_chunks` will be passed to + `post_list_chunks_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -424,6 +470,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_chunk(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_chunk_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -573,6 +623,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_chunks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_chunks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py index 4d7f618a5724..7b6ce61a0c43 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -492,6 +494,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1279,16 +1308,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1334,16 +1367,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py index 8699c3653d94..ef7dfa07beeb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/completion_service/transports/rest.py @@ -139,12 +139,38 @@ def post_complete_query( ) -> completion_service.CompleteQueryResponse: """Post-rpc interceptor for complete_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_complete_query` interceptor runs + before the `post_complete_query_with_metadata` interceptor. """ return response + def post_complete_query_with_metadata( + self, + response: completion_service.CompleteQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + completion_service.CompleteQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for complete_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_complete_query_with_metadata` + interceptor in new development instead of the `post_complete_query` interceptor. + When both interceptors are used, this `post_complete_query_with_metadata` interceptor runs after the + `post_complete_query` interceptor. The (possibly modified) response returned by + `post_complete_query` will be passed to + `post_complete_query_with_metadata`. + """ + return response, metadata + def pre_import_completion_suggestions( self, request: import_config.ImportCompletionSuggestionsRequest, @@ -165,12 +191,35 @@ def post_import_completion_suggestions( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_completion_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_completion_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_import_completion_suggestions` interceptor runs + before the `post_import_completion_suggestions_with_metadata` interceptor. """ return response + def post_import_completion_suggestions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_completion_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_import_completion_suggestions_with_metadata` + interceptor in new development instead of the `post_import_completion_suggestions` interceptor. + When both interceptors are used, this `post_import_completion_suggestions_with_metadata` interceptor runs after the + `post_import_completion_suggestions` interceptor. The (possibly modified) response returned by + `post_import_completion_suggestions` will be passed to + `post_import_completion_suggestions_with_metadata`. + """ + return response, metadata + def pre_import_suggestion_deny_list_entries( self, request: import_config.ImportSuggestionDenyListEntriesRequest, @@ -191,12 +240,35 @@ def post_import_suggestion_deny_list_entries( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_suggestion_deny_list_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_suggestion_deny_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_import_suggestion_deny_list_entries` interceptor runs + before the `post_import_suggestion_deny_list_entries_with_metadata` interceptor. """ return response + def post_import_suggestion_deny_list_entries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_import_suggestion_deny_list_entries_with_metadata` + interceptor in new development instead of the `post_import_suggestion_deny_list_entries` interceptor. + When both interceptors are used, this `post_import_suggestion_deny_list_entries_with_metadata` interceptor runs after the + `post_import_suggestion_deny_list_entries` interceptor. The (possibly modified) response returned by + `post_import_suggestion_deny_list_entries` will be passed to + `post_import_suggestion_deny_list_entries_with_metadata`. + """ + return response, metadata + def pre_purge_completion_suggestions( self, request: purge_config.PurgeCompletionSuggestionsRequest, @@ -217,12 +289,35 @@ def post_purge_completion_suggestions( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_completion_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_completion_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_purge_completion_suggestions` interceptor runs + before the `post_purge_completion_suggestions_with_metadata` interceptor. """ return response + def post_purge_completion_suggestions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_completion_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_purge_completion_suggestions_with_metadata` + interceptor in new development instead of the `post_purge_completion_suggestions` interceptor. + When both interceptors are used, this `post_purge_completion_suggestions_with_metadata` interceptor runs after the + `post_purge_completion_suggestions` interceptor. The (possibly modified) response returned by + `post_purge_completion_suggestions` will be passed to + `post_purge_completion_suggestions_with_metadata`. + """ + return response, metadata + def pre_purge_suggestion_deny_list_entries( self, request: purge_config.PurgeSuggestionDenyListEntriesRequest, @@ -243,12 +338,35 @@ def post_purge_suggestion_deny_list_entries( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_suggestion_deny_list_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_suggestion_deny_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_purge_suggestion_deny_list_entries` interceptor runs + before the `post_purge_suggestion_deny_list_entries_with_metadata` interceptor. """ return response + def post_purge_suggestion_deny_list_entries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_purge_suggestion_deny_list_entries_with_metadata` + interceptor in new development instead of the `post_purge_suggestion_deny_list_entries` interceptor. + When both interceptors are used, this `post_purge_suggestion_deny_list_entries_with_metadata` interceptor runs after the + `post_purge_suggestion_deny_list_entries` interceptor. The (possibly modified) response returned by + `post_purge_suggestion_deny_list_entries` will be passed to + `post_purge_suggestion_deny_list_entries_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -705,6 +823,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -860,6 +982,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_completion_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_import_completion_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1018,6 +1147,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_suggestion_deny_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_import_suggestion_deny_list_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1171,6 +1307,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_completion_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_completion_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1327,6 +1467,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_suggestion_deny_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_purge_suggestion_deny_list_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py index 635bf733158a..32b31d756ced 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -519,6 +521,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1379,16 +1408,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1434,16 +1467,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py index 328215d3f487..55ebc98b86d9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/control_service/transports/rest.py @@ -132,12 +132,35 @@ def pre_create_control( def post_create_control(self, response: gcd_control.Control) -> gcd_control.Control: """Post-rpc interceptor for create_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_create_control` interceptor runs + before the `post_create_control_with_metadata` interceptor. """ return response + def post_create_control_with_metadata( + self, + response: gcd_control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_create_control_with_metadata` + interceptor in new development instead of the `post_create_control` interceptor. + When both interceptors are used, this `post_create_control_with_metadata` interceptor runs after the + `post_create_control` interceptor. The (possibly modified) response returned by + `post_create_control` will be passed to + `post_create_control_with_metadata`. + """ + return response, metadata + def pre_delete_control( self, request: control_service.DeleteControlRequest, @@ -169,12 +192,35 @@ def pre_get_control( def post_get_control(self, response: control.Control) -> control.Control: """Post-rpc interceptor for get_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_get_control` interceptor runs + before the `post_get_control_with_metadata` interceptor. """ return response + def post_get_control_with_metadata( + self, + response: control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_get_control_with_metadata` + interceptor in new development instead of the `post_get_control` interceptor. + When both interceptors are used, this `post_get_control_with_metadata` interceptor runs after the + `post_get_control` interceptor. The (possibly modified) response returned by + `post_get_control` will be passed to + `post_get_control_with_metadata`. + """ + return response, metadata + def pre_list_controls( self, request: control_service.ListControlsRequest, @@ -194,12 +240,37 @@ def post_list_controls( ) -> control_service.ListControlsResponse: """Post-rpc interceptor for list_controls - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_controls_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_list_controls` interceptor runs + before the `post_list_controls_with_metadata` interceptor. """ return response + def post_list_controls_with_metadata( + self, + response: control_service.ListControlsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + control_service.ListControlsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_controls + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_list_controls_with_metadata` + interceptor in new development instead of the `post_list_controls` interceptor. + When both interceptors are used, this `post_list_controls_with_metadata` interceptor runs after the + `post_list_controls` interceptor. The (possibly modified) response returned by + `post_list_controls` will be passed to + `post_list_controls_with_metadata`. + """ + return response, metadata + def pre_update_control( self, request: control_service.UpdateControlRequest, @@ -217,12 +288,35 @@ def pre_update_control( def post_update_control(self, response: gcd_control.Control) -> gcd_control.Control: """Post-rpc interceptor for update_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_update_control` interceptor runs + before the `post_update_control_with_metadata` interceptor. """ return response + def post_update_control_with_metadata( + self, + response: gcd_control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_update_control_with_metadata` + interceptor in new development instead of the `post_update_control` interceptor. + When both interceptors are used, this `post_update_control_with_metadata` interceptor runs after the + `post_update_control` interceptor. The (possibly modified) response returned by + `post_update_control` will be passed to + `post_update_control_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -514,6 +608,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -767,6 +865,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -907,6 +1009,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_controls(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_controls_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1060,6 +1166,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py index 93bb1c4b7184..9d925b66b803 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -657,6 +659,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2392,16 +2421,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2447,16 +2480,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py index 5c8af6b0c029..afa86200b662 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/conversational_search_service/transports/rest.py @@ -198,12 +198,38 @@ def post_answer_query( ) -> conversational_search_service.AnswerQueryResponse: """Post-rpc interceptor for answer_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_answer_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_answer_query` interceptor runs + before the `post_answer_query_with_metadata` interceptor. """ return response + def post_answer_query_with_metadata( + self, + response: conversational_search_service.AnswerQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.AnswerQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for answer_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_answer_query_with_metadata` + interceptor in new development instead of the `post_answer_query` interceptor. + When both interceptors are used, this `post_answer_query_with_metadata` interceptor runs after the + `post_answer_query` interceptor. The (possibly modified) response returned by + `post_answer_query` will be passed to + `post_answer_query_with_metadata`. + """ + return response, metadata + def pre_converse_conversation( self, request: conversational_search_service.ConverseConversationRequest, @@ -224,12 +250,38 @@ def post_converse_conversation( ) -> conversational_search_service.ConverseConversationResponse: """Post-rpc interceptor for converse_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_converse_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_converse_conversation` interceptor runs + before the `post_converse_conversation_with_metadata` interceptor. """ return response + def post_converse_conversation_with_metadata( + self, + response: conversational_search_service.ConverseConversationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ConverseConversationResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for converse_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_converse_conversation_with_metadata` + interceptor in new development instead of the `post_converse_conversation` interceptor. + When both interceptors are used, this `post_converse_conversation_with_metadata` interceptor runs after the + `post_converse_conversation` interceptor. The (possibly modified) response returned by + `post_converse_conversation` will be passed to + `post_converse_conversation_with_metadata`. + """ + return response, metadata + def pre_create_conversation( self, request: conversational_search_service.CreateConversationRequest, @@ -250,12 +302,35 @@ def post_create_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for create_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation` interceptor runs + before the `post_create_conversation_with_metadata` interceptor. """ return response + def post_create_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_create_conversation_with_metadata` + interceptor in new development instead of the `post_create_conversation` interceptor. + When both interceptors are used, this `post_create_conversation_with_metadata` interceptor runs after the + `post_create_conversation` interceptor. The (possibly modified) response returned by + `post_create_conversation` will be passed to + `post_create_conversation_with_metadata`. + """ + return response, metadata + def pre_create_session( self, request: conversational_search_service.CreateSessionRequest, @@ -274,12 +349,35 @@ def pre_create_session( def post_create_session(self, response: gcd_session.Session) -> gcd_session.Session: """Post-rpc interceptor for create_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_create_session` interceptor runs + before the `post_create_session_with_metadata` interceptor. """ return response + def post_create_session_with_metadata( + self, + response: gcd_session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_create_session_with_metadata` + interceptor in new development instead of the `post_create_session` interceptor. + When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the + `post_create_session` interceptor. The (possibly modified) response returned by + `post_create_session` will be passed to + `post_create_session_with_metadata`. + """ + return response, metadata + def pre_delete_conversation( self, request: conversational_search_service.DeleteConversationRequest, @@ -328,12 +426,33 @@ def pre_get_answer( def post_get_answer(self, response: answer.Answer) -> answer.Answer: """Post-rpc interceptor for get_answer - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_answer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_answer` interceptor runs + before the `post_get_answer_with_metadata` interceptor. """ return response + def post_get_answer_with_metadata( + self, response: answer.Answer, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[answer.Answer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_answer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_answer_with_metadata` + interceptor in new development instead of the `post_get_answer` interceptor. + When both interceptors are used, this `post_get_answer_with_metadata` interceptor runs after the + `post_get_answer` interceptor. The (possibly modified) response returned by + `post_get_answer` will be passed to + `post_get_answer_with_metadata`. + """ + return response, metadata + def pre_get_conversation( self, request: conversational_search_service.GetConversationRequest, @@ -354,12 +473,35 @@ def post_get_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_get_session( self, request: conversational_search_service.GetSessionRequest, @@ -378,12 +520,35 @@ def pre_get_session( def post_get_session(self, response: session.Session) -> session.Session: """Post-rpc interceptor for get_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_session` interceptor runs + before the `post_get_session_with_metadata` interceptor. """ return response + def post_get_session_with_metadata( + self, + response: session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_session_with_metadata` + interceptor in new development instead of the `post_get_session` interceptor. + When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the + `post_get_session` interceptor. The (possibly modified) response returned by + `post_get_session` will be passed to + `post_get_session_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: conversational_search_service.ListConversationsRequest, @@ -404,12 +569,38 @@ def post_list_conversations( ) -> conversational_search_service.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: conversational_search_service.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ListConversationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_list_sessions( self, request: conversational_search_service.ListSessionsRequest, @@ -430,12 +621,38 @@ def post_list_sessions( ) -> conversational_search_service.ListSessionsResponse: """Post-rpc interceptor for list_sessions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. """ return response + def post_list_sessions_with_metadata( + self, + response: conversational_search_service.ListSessionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ListSessionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + def pre_update_conversation( self, request: conversational_search_service.UpdateConversationRequest, @@ -456,12 +673,35 @@ def post_update_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for update_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_update_conversation` interceptor runs + before the `post_update_conversation_with_metadata` interceptor. """ return response + def post_update_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_update_conversation_with_metadata` + interceptor in new development instead of the `post_update_conversation` interceptor. + When both interceptors are used, this `post_update_conversation_with_metadata` interceptor runs after the + `post_update_conversation` interceptor. The (possibly modified) response returned by + `post_update_conversation` will be passed to + `post_update_conversation_with_metadata`. + """ + return response, metadata + def pre_update_session( self, request: conversational_search_service.UpdateSessionRequest, @@ -480,12 +720,35 @@ def pre_update_session( def post_update_session(self, response: gcd_session.Session) -> gcd_session.Session: """Post-rpc interceptor for update_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_update_session` interceptor runs + before the `post_update_session_with_metadata` interceptor. """ return response + def post_update_session_with_metadata( + self, + response: gcd_session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_update_session_with_metadata` + interceptor in new development instead of the `post_update_session` interceptor. + When both interceptors are used, this `post_update_session_with_metadata` interceptor runs after the + `post_update_session` interceptor. The (possibly modified) response returned by + `post_update_session` will be passed to + `post_update_session_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -779,6 +1042,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_answer_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_answer_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -939,6 +1206,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_converse_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_converse_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1093,6 +1364,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1242,6 +1517,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1604,6 +1883,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_answer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_answer_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1751,6 +2034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1894,6 +2181,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2039,6 +2330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2186,6 +2481,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2342,6 +2641,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2491,6 +2794,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py index 8d406ec85e0e..c3971eb3d937 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -573,6 +575,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1800,16 +1829,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1855,16 +1888,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py index 517ce2a29575..800edaea0d33 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/data_store_service/transports/rest.py @@ -158,12 +158,35 @@ def post_create_data_store( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_data_store` interceptor runs + before the `post_create_data_store_with_metadata` interceptor. """ return response + def post_create_data_store_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_create_data_store_with_metadata` + interceptor in new development instead of the `post_create_data_store` interceptor. + When both interceptors are used, this `post_create_data_store_with_metadata` interceptor runs after the + `post_create_data_store` interceptor. The (possibly modified) response returned by + `post_create_data_store` will be passed to + `post_create_data_store_with_metadata`. + """ + return response, metadata + def pre_delete_data_store( self, request: data_store_service.DeleteDataStoreRequest, @@ -184,12 +207,35 @@ def post_delete_data_store( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_data_store` interceptor runs + before the `post_delete_data_store_with_metadata` interceptor. """ return response + def post_delete_data_store_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_delete_data_store_with_metadata` + interceptor in new development instead of the `post_delete_data_store` interceptor. + When both interceptors are used, this `post_delete_data_store_with_metadata` interceptor runs after the + `post_delete_data_store` interceptor. The (possibly modified) response returned by + `post_delete_data_store` will be passed to + `post_delete_data_store_with_metadata`. + """ + return response, metadata + def pre_get_data_store( self, request: data_store_service.GetDataStoreRequest, @@ -209,12 +255,35 @@ def post_get_data_store( ) -> data_store.DataStore: """Post-rpc interceptor for get_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_store` interceptor runs + before the `post_get_data_store_with_metadata` interceptor. """ return response + def post_get_data_store_with_metadata( + self, + response: data_store.DataStore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_store.DataStore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_get_data_store_with_metadata` + interceptor in new development instead of the `post_get_data_store` interceptor. + When both interceptors are used, this `post_get_data_store_with_metadata` interceptor runs after the + `post_get_data_store` interceptor. The (possibly modified) response returned by + `post_get_data_store` will be passed to + `post_get_data_store_with_metadata`. + """ + return response, metadata + def pre_get_document_processing_config( self, request: data_store_service.GetDocumentProcessingConfigRequest, @@ -235,12 +304,38 @@ def post_get_document_processing_config( ) -> document_processing_config.DocumentProcessingConfig: """Post-rpc interceptor for get_document_processing_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_processing_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_document_processing_config` interceptor runs + before the `post_get_document_processing_config_with_metadata` interceptor. """ return response + def post_get_document_processing_config_with_metadata( + self, + response: document_processing_config.DocumentProcessingConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processing_config.DocumentProcessingConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_document_processing_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_get_document_processing_config_with_metadata` + interceptor in new development instead of the `post_get_document_processing_config` interceptor. + When both interceptors are used, this `post_get_document_processing_config_with_metadata` interceptor runs after the + `post_get_document_processing_config` interceptor. The (possibly modified) response returned by + `post_get_document_processing_config` will be passed to + `post_get_document_processing_config_with_metadata`. + """ + return response, metadata + def pre_list_data_stores( self, request: data_store_service.ListDataStoresRequest, @@ -261,12 +356,38 @@ def post_list_data_stores( ) -> data_store_service.ListDataStoresResponse: """Post-rpc interceptor for list_data_stores - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_stores_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_stores` interceptor runs + before the `post_list_data_stores_with_metadata` interceptor. """ return response + def post_list_data_stores_with_metadata( + self, + response: data_store_service.ListDataStoresResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_store_service.ListDataStoresResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_data_stores + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_list_data_stores_with_metadata` + interceptor in new development instead of the `post_list_data_stores` interceptor. + When both interceptors are used, this `post_list_data_stores_with_metadata` interceptor runs after the + `post_list_data_stores` interceptor. The (possibly modified) response returned by + `post_list_data_stores` will be passed to + `post_list_data_stores_with_metadata`. + """ + return response, metadata + def pre_update_data_store( self, request: data_store_service.UpdateDataStoreRequest, @@ -287,12 +408,35 @@ def post_update_data_store( ) -> gcd_data_store.DataStore: """Post-rpc interceptor for update_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_store` interceptor runs + before the `post_update_data_store_with_metadata` interceptor. """ return response + def post_update_data_store_with_metadata( + self, + response: gcd_data_store.DataStore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_data_store.DataStore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_update_data_store_with_metadata` + interceptor in new development instead of the `post_update_data_store` interceptor. + When both interceptors are used, this `post_update_data_store_with_metadata` interceptor runs after the + `post_update_data_store` interceptor. The (possibly modified) response returned by + `post_update_data_store` will be passed to + `post_update_data_store_with_metadata`. + """ + return response, metadata + def pre_update_document_processing_config( self, request: data_store_service.UpdateDocumentProcessingConfigRequest, @@ -313,12 +457,38 @@ def post_update_document_processing_config( ) -> gcd_document_processing_config.DocumentProcessingConfig: """Post-rpc interceptor for update_document_processing_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_processing_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_document_processing_config` interceptor runs + before the `post_update_document_processing_config_with_metadata` interceptor. """ return response + def post_update_document_processing_config_with_metadata( + self, + response: gcd_document_processing_config.DocumentProcessingConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_document_processing_config.DocumentProcessingConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_document_processing_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_update_document_processing_config_with_metadata` + interceptor in new development instead of the `post_update_document_processing_config` interceptor. + When both interceptors are used, this `post_update_document_processing_config_with_metadata` interceptor runs after the + `post_update_document_processing_config` interceptor. The (possibly modified) response returned by + `post_update_document_processing_config` will be passed to + `post_update_document_processing_config_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -783,6 +953,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -929,6 +1103,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1073,6 +1251,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1227,6 +1409,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document_processing_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_document_processing_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1378,6 +1567,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_stores(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_stores_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1533,6 +1726,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1693,6 +1890,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document_processing_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_document_processing_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py index dae45c98012c..f76039ec91a5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -553,6 +555,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1912,16 +1941,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1967,16 +2000,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py index dc6056ab8d50..b7fd7d2105d4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/document_service/transports/rest.py @@ -171,12 +171,38 @@ def post_batch_get_documents_metadata( ) -> document_service.BatchGetDocumentsMetadataResponse: """Post-rpc interceptor for batch_get_documents_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_documents_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_documents_metadata` interceptor runs + before the `post_batch_get_documents_metadata_with_metadata` interceptor. """ return response + def post_batch_get_documents_metadata_with_metadata( + self, + response: document_service.BatchGetDocumentsMetadataResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_batch_get_documents_metadata_with_metadata` + interceptor in new development instead of the `post_batch_get_documents_metadata` interceptor. + When both interceptors are used, this `post_batch_get_documents_metadata_with_metadata` interceptor runs after the + `post_batch_get_documents_metadata` interceptor. The (possibly modified) response returned by + `post_batch_get_documents_metadata` will be passed to + `post_batch_get_documents_metadata_with_metadata`. + """ + return response, metadata + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -196,12 +222,35 @@ def post_create_document( ) -> gcd_document.Document: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: gcd_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: document_service.DeleteDocumentRequest, @@ -233,12 +282,35 @@ def pre_get_document( def post_get_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_get_processed_document( self, request: document_service.GetProcessedDocumentRequest, @@ -259,12 +331,35 @@ def post_get_processed_document( ) -> document.ProcessedDocument: """Post-rpc interceptor for get_processed_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processed_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_processed_document` interceptor runs + before the `post_get_processed_document_with_metadata` interceptor. """ return response + def post_get_processed_document_with_metadata( + self, + response: document.ProcessedDocument, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.ProcessedDocument, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processed_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_processed_document_with_metadata` + interceptor in new development instead of the `post_get_processed_document` interceptor. + When both interceptors are used, this `post_get_processed_document_with_metadata` interceptor runs after the + `post_get_processed_document` interceptor. The (possibly modified) response returned by + `post_get_processed_document` will be passed to + `post_get_processed_document_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: import_config.ImportDocumentsRequest, @@ -284,12 +379,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: document_service.ListDocumentsRequest, @@ -309,12 +427,37 @@ def post_list_documents( ) -> document_service.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: document_service.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_purge_documents( self, request: purge_config.PurgeDocumentsRequest, @@ -334,12 +477,35 @@ def post_purge_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_purge_documents` interceptor runs + before the `post_purge_documents_with_metadata` interceptor. """ return response + def post_purge_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_purge_documents_with_metadata` + interceptor in new development instead of the `post_purge_documents` interceptor. + When both interceptors are used, this `post_purge_documents_with_metadata` interceptor runs after the + `post_purge_documents` interceptor. The (possibly modified) response returned by + `post_purge_documents` will be passed to + `post_purge_documents_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: document_service.UpdateDocumentRequest, @@ -359,12 +525,35 @@ def post_update_document( ) -> gcd_document.Document: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: gcd_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -828,6 +1017,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_documents_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_documents_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -983,6 +1176,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1236,6 +1433,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1384,6 +1585,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processed_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processed_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1533,6 +1738,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1678,6 +1887,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1829,6 +2042,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1980,6 +2197,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py index 991c6a051619..0b3439a6aa59 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -521,6 +523,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1778,16 +1807,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1833,16 +1866,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py index 5017fd21f1f4..400c78f3c948 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/engine_service/transports/rest.py @@ -161,12 +161,35 @@ def post_create_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_engine` interceptor runs + before the `post_create_engine_with_metadata` interceptor. """ return response + def post_create_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_create_engine_with_metadata` + interceptor in new development instead of the `post_create_engine` interceptor. + When both interceptors are used, this `post_create_engine_with_metadata` interceptor runs after the + `post_create_engine` interceptor. The (possibly modified) response returned by + `post_create_engine` will be passed to + `post_create_engine_with_metadata`. + """ + return response, metadata + def pre_delete_engine( self, request: engine_service.DeleteEngineRequest, @@ -186,12 +209,35 @@ def post_delete_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_engine` interceptor runs + before the `post_delete_engine_with_metadata` interceptor. """ return response + def post_delete_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_delete_engine_with_metadata` + interceptor in new development instead of the `post_delete_engine` interceptor. + When both interceptors are used, this `post_delete_engine_with_metadata` interceptor runs after the + `post_delete_engine` interceptor. The (possibly modified) response returned by + `post_delete_engine` will be passed to + `post_delete_engine_with_metadata`. + """ + return response, metadata + def pre_get_engine( self, request: engine_service.GetEngineRequest, @@ -209,12 +255,33 @@ def pre_get_engine( def post_get_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for get_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_engine` interceptor runs + before the `post_get_engine_with_metadata` interceptor. """ return response + def post_get_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_get_engine_with_metadata` + interceptor in new development instead of the `post_get_engine` interceptor. + When both interceptors are used, this `post_get_engine_with_metadata` interceptor runs after the + `post_get_engine` interceptor. The (possibly modified) response returned by + `post_get_engine` will be passed to + `post_get_engine_with_metadata`. + """ + return response, metadata + def pre_list_engines( self, request: engine_service.ListEnginesRequest, @@ -234,12 +301,37 @@ def post_list_engines( ) -> engine_service.ListEnginesResponse: """Post-rpc interceptor for list_engines - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_engines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_list_engines` interceptor runs + before the `post_list_engines_with_metadata` interceptor. """ return response + def post_list_engines_with_metadata( + self, + response: engine_service.ListEnginesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + engine_service.ListEnginesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_engines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_list_engines_with_metadata` + interceptor in new development instead of the `post_list_engines` interceptor. + When both interceptors are used, this `post_list_engines_with_metadata` interceptor runs after the + `post_list_engines` interceptor. The (possibly modified) response returned by + `post_list_engines` will be passed to + `post_list_engines_with_metadata`. + """ + return response, metadata + def pre_pause_engine( self, request: engine_service.PauseEngineRequest, @@ -257,12 +349,33 @@ def pre_pause_engine( def post_pause_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for pause_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_pause_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_pause_engine` interceptor runs + before the `post_pause_engine_with_metadata` interceptor. """ return response + def post_pause_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for pause_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_pause_engine_with_metadata` + interceptor in new development instead of the `post_pause_engine` interceptor. + When both interceptors are used, this `post_pause_engine_with_metadata` interceptor runs after the + `post_pause_engine` interceptor. The (possibly modified) response returned by + `post_pause_engine` will be passed to + `post_pause_engine_with_metadata`. + """ + return response, metadata + def pre_resume_engine( self, request: engine_service.ResumeEngineRequest, @@ -280,12 +393,33 @@ def pre_resume_engine( def post_resume_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for resume_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resume_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_resume_engine` interceptor runs + before the `post_resume_engine_with_metadata` interceptor. """ return response + def post_resume_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resume_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_resume_engine_with_metadata` + interceptor in new development instead of the `post_resume_engine` interceptor. + When both interceptors are used, this `post_resume_engine_with_metadata` interceptor runs after the + `post_resume_engine` interceptor. The (possibly modified) response returned by + `post_resume_engine` will be passed to + `post_resume_engine_with_metadata`. + """ + return response, metadata + def pre_tune_engine( self, request: engine_service.TuneEngineRequest, @@ -305,12 +439,35 @@ def post_tune_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for tune_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_tune_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_tune_engine` interceptor runs + before the `post_tune_engine_with_metadata` interceptor. """ return response + def post_tune_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for tune_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_tune_engine_with_metadata` + interceptor in new development instead of the `post_tune_engine` interceptor. + When both interceptors are used, this `post_tune_engine_with_metadata` interceptor runs after the + `post_tune_engine` interceptor. The (possibly modified) response returned by + `post_tune_engine` will be passed to + `post_tune_engine_with_metadata`. + """ + return response, metadata + def pre_update_engine( self, request: engine_service.UpdateEngineRequest, @@ -328,12 +485,35 @@ def pre_update_engine( def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: """Post-rpc interceptor for update_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_update_engine` interceptor runs + before the `post_update_engine_with_metadata` interceptor. """ return response + def post_update_engine_with_metadata( + self, + response: gcd_engine.Engine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_update_engine_with_metadata` + interceptor in new development instead of the `post_update_engine` interceptor. + When both interceptors are used, this `post_update_engine_with_metadata` interceptor runs after the + `post_update_engine` interceptor. The (possibly modified) response returned by + `post_update_engine` will be passed to + `post_update_engine_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -794,6 +974,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -937,6 +1121,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1086,6 +1274,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1233,6 +1425,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_engines(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_engines_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1389,6 +1585,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_pause_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_pause_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1539,6 +1739,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resume_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resume_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1695,6 +1899,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_tune_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_tune_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1846,6 +2054,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py index 4b70618b5684..79a0afbf55c4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -485,6 +487,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -847,16 +876,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -902,16 +935,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py index ca7d96da4fd6..442b78676720 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/estimate_billing_service/transports/rest.py @@ -104,12 +104,35 @@ def post_estimate_data_size( ) -> operations_pb2.Operation: """Post-rpc interceptor for estimate_data_size - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_estimate_data_size_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EstimateBillingService server but before - it is returned to user code. + it is returned to user code. This `post_estimate_data_size` interceptor runs + before the `post_estimate_data_size_with_metadata` interceptor. """ return response + def post_estimate_data_size_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for estimate_data_size + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EstimateBillingService server but before it is returned to user code. + + We recommend only using this `post_estimate_data_size_with_metadata` + interceptor in new development instead of the `post_estimate_data_size` interceptor. + When both interceptors are used, this `post_estimate_data_size_with_metadata` interceptor runs after the + `post_estimate_data_size` interceptor. The (possibly modified) response returned by + `post_estimate_data_size` will be passed to + `post_estimate_data_size_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -574,6 +597,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_estimate_data_size(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_estimate_data_size_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py index a478f8307575..d0083bc2ee24 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -654,6 +656,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1438,16 +1467,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1493,16 +1526,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/transports/rest.py index bdcb6826075e..964b6bfc0425 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/evaluation_service/transports/rest.py @@ -128,12 +128,35 @@ def post_create_evaluation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_create_evaluation` interceptor runs + before the `post_create_evaluation_with_metadata` interceptor. """ return response + def post_create_evaluation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_create_evaluation_with_metadata` + interceptor in new development instead of the `post_create_evaluation` interceptor. + When both interceptors are used, this `post_create_evaluation_with_metadata` interceptor runs after the + `post_create_evaluation` interceptor. The (possibly modified) response returned by + `post_create_evaluation` will be passed to + `post_create_evaluation_with_metadata`. + """ + return response, metadata + def pre_get_evaluation( self, request: evaluation_service.GetEvaluationRequest, @@ -153,12 +176,35 @@ def post_get_evaluation( ) -> evaluation.Evaluation: """Post-rpc interceptor for get_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_get_evaluation` interceptor runs + before the `post_get_evaluation_with_metadata` interceptor. """ return response + def post_get_evaluation_with_metadata( + self, + response: evaluation.Evaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[evaluation.Evaluation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_get_evaluation_with_metadata` + interceptor in new development instead of the `post_get_evaluation` interceptor. + When both interceptors are used, this `post_get_evaluation_with_metadata` interceptor runs after the + `post_get_evaluation` interceptor. The (possibly modified) response returned by + `post_get_evaluation` will be passed to + `post_get_evaluation_with_metadata`. + """ + return response, metadata + def pre_list_evaluation_results( self, request: evaluation_service.ListEvaluationResultsRequest, @@ -179,12 +225,38 @@ def post_list_evaluation_results( ) -> evaluation_service.ListEvaluationResultsResponse: """Post-rpc interceptor for list_evaluation_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_evaluation_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_list_evaluation_results` interceptor runs + before the `post_list_evaluation_results_with_metadata` interceptor. """ return response + def post_list_evaluation_results_with_metadata( + self, + response: evaluation_service.ListEvaluationResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + evaluation_service.ListEvaluationResultsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_evaluation_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_list_evaluation_results_with_metadata` + interceptor in new development instead of the `post_list_evaluation_results` interceptor. + When both interceptors are used, this `post_list_evaluation_results_with_metadata` interceptor runs after the + `post_list_evaluation_results` interceptor. The (possibly modified) response returned by + `post_list_evaluation_results` will be passed to + `post_list_evaluation_results_with_metadata`. + """ + return response, metadata + def pre_list_evaluations( self, request: evaluation_service.ListEvaluationsRequest, @@ -205,12 +277,38 @@ def post_list_evaluations( ) -> evaluation_service.ListEvaluationsResponse: """Post-rpc interceptor for list_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_list_evaluations` interceptor runs + before the `post_list_evaluations_with_metadata` interceptor. """ return response + def post_list_evaluations_with_metadata( + self, + response: evaluation_service.ListEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + evaluation_service.ListEvaluationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_list_evaluations_with_metadata` + interceptor in new development instead of the `post_list_evaluations` interceptor. + When both interceptors are used, this `post_list_evaluations_with_metadata` interceptor runs after the + `post_list_evaluations` interceptor. The (possibly modified) response returned by + `post_list_evaluations` will be passed to + `post_list_evaluations_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -674,6 +772,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -821,6 +923,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -971,6 +1077,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_evaluation_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_evaluation_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1123,6 +1233,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py index 71f9203f5418..0793fbc77a5a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -841,16 +870,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -896,16 +929,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py index b116b81a73ba..3fd56ba5c903 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/grounded_generation_service/transports/rest.py @@ -104,12 +104,38 @@ def post_check_grounding( ) -> grounded_generation_service.CheckGroundingResponse: """Post-rpc interceptor for check_grounding - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_grounding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GroundedGenerationService server but before - it is returned to user code. + it is returned to user code. This `post_check_grounding` interceptor runs + before the `post_check_grounding_with_metadata` interceptor. """ return response + def post_check_grounding_with_metadata( + self, + response: grounded_generation_service.CheckGroundingResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + grounded_generation_service.CheckGroundingResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for check_grounding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GroundedGenerationService server but before it is returned to user code. + + We recommend only using this `post_check_grounding_with_metadata` + interceptor in new development instead of the `post_check_grounding` interceptor. + When both interceptors are used, this `post_check_grounding_with_metadata` interceptor runs after the + `post_check_grounding` interceptor. The (possibly modified) response returned by + `post_check_grounding` will be passed to + `post_check_grounding_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -403,6 +429,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_grounding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_check_grounding_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py index 69a62d2b1fde..c77064ba5d02 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -486,6 +488,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1139,16 +1168,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1194,16 +1227,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py index dfed7b617f50..672f0809a5f9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/project_service/transports/rest.py @@ -119,12 +119,35 @@ def pre_get_project( def post_get_project(self, response: project.Project) -> project.Project: """Post-rpc interceptor for get_project - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_project_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ProjectService server but before - it is returned to user code. + it is returned to user code. This `post_get_project` interceptor runs + before the `post_get_project_with_metadata` interceptor. """ return response + def post_get_project_with_metadata( + self, + response: project.Project, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[project.Project, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_project + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ProjectService server but before it is returned to user code. + + We recommend only using this `post_get_project_with_metadata` + interceptor in new development instead of the `post_get_project` interceptor. + When both interceptors are used, this `post_get_project_with_metadata` interceptor runs after the + `post_get_project` interceptor. The (possibly modified) response returned by + `post_get_project` will be passed to + `post_get_project_with_metadata`. + """ + return response, metadata + def pre_provision_project( self, request: project_service.ProvisionProjectRequest, @@ -144,12 +167,35 @@ def post_provision_project( ) -> operations_pb2.Operation: """Post-rpc interceptor for provision_project - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_provision_project_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ProjectService server but before - it is returned to user code. + it is returned to user code. This `post_provision_project` interceptor runs + before the `post_provision_project_with_metadata` interceptor. """ return response + def post_provision_project_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for provision_project + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ProjectService server but before it is returned to user code. + + We recommend only using this `post_provision_project_with_metadata` + interceptor in new development instead of the `post_provision_project` interceptor. + When both interceptors are used, this `post_provision_project_with_metadata` interceptor runs after the + `post_provision_project` interceptor. The (possibly modified) response returned by + `post_provision_project` will be passed to + `post_provision_project_with_metadata`. + """ + return response, metadata + def pre_report_consent_change( self, request: project_service.ReportConsentChangeRequest, @@ -170,12 +216,35 @@ def post_report_consent_change( ) -> gcd_project.Project: """Post-rpc interceptor for report_consent_change - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_report_consent_change_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ProjectService server but before - it is returned to user code. + it is returned to user code. This `post_report_consent_change` interceptor runs + before the `post_report_consent_change_with_metadata` interceptor. """ return response + def post_report_consent_change_with_metadata( + self, + response: gcd_project.Project, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_project.Project, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for report_consent_change + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ProjectService server but before it is returned to user code. + + We recommend only using this `post_report_consent_change_with_metadata` + interceptor in new development instead of the `post_report_consent_change` interceptor. + When both interceptors are used, this `post_report_consent_change_with_metadata` interceptor runs after the + `post_report_consent_change` interceptor. The (possibly modified) response returned by + `post_report_consent_change` will be passed to + `post_report_consent_change_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -633,6 +702,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_project(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_project_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -784,6 +857,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_provision_project(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_provision_project_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -936,6 +1013,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_report_consent_change(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_report_consent_change_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py index d6b5797f9682..91452ffdeefe 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -484,6 +486,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -823,16 +852,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -878,16 +911,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py index 89fd6747b0e4..79b8a40a7ad1 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/rank_service/transports/rest.py @@ -101,12 +101,35 @@ def post_rank( ) -> rank_service.RankResponse: """Post-rpc interceptor for rank - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rank_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RankService server but before - it is returned to user code. + it is returned to user code. This `post_rank` interceptor runs + before the `post_rank_with_metadata` interceptor. """ return response + def post_rank_with_metadata( + self, + response: rank_service.RankResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rank_service.RankResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rank + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RankService server but before it is returned to user code. + + We recommend only using this `post_rank_with_metadata` + interceptor in new development instead of the `post_rank` interceptor. + When both interceptors are used, this `post_rank_with_metadata` interceptor runs after the + `post_rank` interceptor. The (possibly modified) response returned by + `post_rank` will be passed to + `post_rank_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -395,6 +418,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rank(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rank_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py index 5eeba0346cd3..b3274ab4aaa0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -562,6 +564,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -911,16 +940,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -966,16 +999,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py index 69a77197e128..4c165c141afe 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/recommendation_service/transports/rest.py @@ -103,12 +103,38 @@ def post_recommend( ) -> recommendation_service.RecommendResponse: """Post-rpc interceptor for recommend - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recommend_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RecommendationService server but before - it is returned to user code. + it is returned to user code. This `post_recommend` interceptor runs + before the `post_recommend_with_metadata` interceptor. """ return response + def post_recommend_with_metadata( + self, + response: recommendation_service.RecommendResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + recommendation_service.RecommendResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for recommend + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RecommendationService server but before it is returned to user code. + + We recommend only using this `post_recommend_with_metadata` + interceptor in new development instead of the `post_recommend` interceptor. + When both interceptors are used, this `post_recommend_with_metadata` interceptor runs after the + `post_recommend` interceptor. The (possibly modified) response returned by + `post_recommend` will be passed to + `post_recommend_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -395,6 +421,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_recommend(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recommend_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py index 7b048b486d3c..31fa66505510 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -520,6 +522,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1540,16 +1569,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1595,16 +1628,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/transports/rest.py index d8084dcb7cba..d20ce3d47973 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_service/transports/rest.py @@ -144,12 +144,35 @@ def post_create_sample_query( ) -> gcd_sample_query.SampleQuery: """Post-rpc interceptor for create_sample_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sample_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_create_sample_query` interceptor runs + before the `post_create_sample_query_with_metadata` interceptor. """ return response + def post_create_sample_query_with_metadata( + self, + response: gcd_sample_query.SampleQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_sample_query.SampleQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_sample_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_create_sample_query_with_metadata` + interceptor in new development instead of the `post_create_sample_query` interceptor. + When both interceptors are used, this `post_create_sample_query_with_metadata` interceptor runs after the + `post_create_sample_query` interceptor. The (possibly modified) response returned by + `post_create_sample_query` will be passed to + `post_create_sample_query_with_metadata`. + """ + return response, metadata + def pre_delete_sample_query( self, request: sample_query_service.DeleteSampleQueryRequest, @@ -185,12 +208,35 @@ def post_get_sample_query( ) -> sample_query.SampleQuery: """Post-rpc interceptor for get_sample_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sample_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_get_sample_query` interceptor runs + before the `post_get_sample_query_with_metadata` interceptor. """ return response + def post_get_sample_query_with_metadata( + self, + response: sample_query.SampleQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sample_query.SampleQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_sample_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_get_sample_query_with_metadata` + interceptor in new development instead of the `post_get_sample_query` interceptor. + When both interceptors are used, this `post_get_sample_query_with_metadata` interceptor runs after the + `post_get_sample_query` interceptor. The (possibly modified) response returned by + `post_get_sample_query` will be passed to + `post_get_sample_query_with_metadata`. + """ + return response, metadata + def pre_import_sample_queries( self, request: import_config.ImportSampleQueriesRequest, @@ -211,12 +257,35 @@ def post_import_sample_queries( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_sample_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_sample_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_import_sample_queries` interceptor runs + before the `post_import_sample_queries_with_metadata` interceptor. """ return response + def post_import_sample_queries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_sample_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_import_sample_queries_with_metadata` + interceptor in new development instead of the `post_import_sample_queries` interceptor. + When both interceptors are used, this `post_import_sample_queries_with_metadata` interceptor runs after the + `post_import_sample_queries` interceptor. The (possibly modified) response returned by + `post_import_sample_queries` will be passed to + `post_import_sample_queries_with_metadata`. + """ + return response, metadata + def pre_list_sample_queries( self, request: sample_query_service.ListSampleQueriesRequest, @@ -237,12 +306,38 @@ def post_list_sample_queries( ) -> sample_query_service.ListSampleQueriesResponse: """Post-rpc interceptor for list_sample_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sample_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_list_sample_queries` interceptor runs + before the `post_list_sample_queries_with_metadata` interceptor. """ return response + def post_list_sample_queries_with_metadata( + self, + response: sample_query_service.ListSampleQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sample_query_service.ListSampleQueriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sample_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_list_sample_queries_with_metadata` + interceptor in new development instead of the `post_list_sample_queries` interceptor. + When both interceptors are used, this `post_list_sample_queries_with_metadata` interceptor runs after the + `post_list_sample_queries` interceptor. The (possibly modified) response returned by + `post_list_sample_queries` will be passed to + `post_list_sample_queries_with_metadata`. + """ + return response, metadata + def pre_update_sample_query( self, request: sample_query_service.UpdateSampleQueryRequest, @@ -263,12 +358,35 @@ def post_update_sample_query( ) -> gcd_sample_query.SampleQuery: """Post-rpc interceptor for update_sample_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_sample_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_update_sample_query` interceptor runs + before the `post_update_sample_query_with_metadata` interceptor. """ return response + def post_update_sample_query_with_metadata( + self, + response: gcd_sample_query.SampleQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_sample_query.SampleQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_sample_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_update_sample_query_with_metadata` + interceptor in new development instead of the `post_update_sample_query` interceptor. + When both interceptors are used, this `post_update_sample_query_with_metadata` interceptor runs after the + `post_update_sample_query` interceptor. The (possibly modified) response returned by + `post_update_sample_query` will be passed to + `post_update_sample_query_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -733,6 +851,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_sample_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sample_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -991,6 +1113,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_sample_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sample_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1145,6 +1271,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_sample_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_sample_queries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1293,6 +1423,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sample_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sample_queries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1448,6 +1582,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_sample_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_sample_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py index 7c541642e547..8dba3e40cd20 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1434,16 +1463,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1489,16 +1522,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/transports/rest.py index ce064e1bb6bd..19a90990d89d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/sample_query_set_service/transports/rest.py @@ -137,12 +137,37 @@ def post_create_sample_query_set( ) -> gcd_sample_query_set.SampleQuerySet: """Post-rpc interceptor for create_sample_query_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sample_query_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_create_sample_query_set` interceptor runs + before the `post_create_sample_query_set_with_metadata` interceptor. """ return response + def post_create_sample_query_set_with_metadata( + self, + response: gcd_sample_query_set.SampleQuerySet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_sample_query_set.SampleQuerySet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_sample_query_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_create_sample_query_set_with_metadata` + interceptor in new development instead of the `post_create_sample_query_set` interceptor. + When both interceptors are used, this `post_create_sample_query_set_with_metadata` interceptor runs after the + `post_create_sample_query_set` interceptor. The (possibly modified) response returned by + `post_create_sample_query_set` will be passed to + `post_create_sample_query_set_with_metadata`. + """ + return response, metadata + def pre_delete_sample_query_set( self, request: sample_query_set_service.DeleteSampleQuerySetRequest, @@ -178,12 +203,37 @@ def post_get_sample_query_set( ) -> sample_query_set.SampleQuerySet: """Post-rpc interceptor for get_sample_query_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sample_query_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_get_sample_query_set` interceptor runs + before the `post_get_sample_query_set_with_metadata` interceptor. """ return response + def post_get_sample_query_set_with_metadata( + self, + response: sample_query_set.SampleQuerySet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sample_query_set.SampleQuerySet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_sample_query_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_get_sample_query_set_with_metadata` + interceptor in new development instead of the `post_get_sample_query_set` interceptor. + When both interceptors are used, this `post_get_sample_query_set_with_metadata` interceptor runs after the + `post_get_sample_query_set` interceptor. The (possibly modified) response returned by + `post_get_sample_query_set` will be passed to + `post_get_sample_query_set_with_metadata`. + """ + return response, metadata + def pre_list_sample_query_sets( self, request: sample_query_set_service.ListSampleQuerySetsRequest, @@ -204,12 +254,38 @@ def post_list_sample_query_sets( ) -> sample_query_set_service.ListSampleQuerySetsResponse: """Post-rpc interceptor for list_sample_query_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sample_query_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_list_sample_query_sets` interceptor runs + before the `post_list_sample_query_sets_with_metadata` interceptor. """ return response + def post_list_sample_query_sets_with_metadata( + self, + response: sample_query_set_service.ListSampleQuerySetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sample_query_set_service.ListSampleQuerySetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sample_query_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_list_sample_query_sets_with_metadata` + interceptor in new development instead of the `post_list_sample_query_sets` interceptor. + When both interceptors are used, this `post_list_sample_query_sets_with_metadata` interceptor runs after the + `post_list_sample_query_sets` interceptor. The (possibly modified) response returned by + `post_list_sample_query_sets` will be passed to + `post_list_sample_query_sets_with_metadata`. + """ + return response, metadata + def pre_update_sample_query_set( self, request: sample_query_set_service.UpdateSampleQuerySetRequest, @@ -230,12 +306,37 @@ def post_update_sample_query_set( ) -> gcd_sample_query_set.SampleQuerySet: """Post-rpc interceptor for update_sample_query_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_sample_query_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_update_sample_query_set` interceptor runs + before the `post_update_sample_query_set_with_metadata` interceptor. """ return response + def post_update_sample_query_set_with_metadata( + self, + response: gcd_sample_query_set.SampleQuerySet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_sample_query_set.SampleQuerySet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_sample_query_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_update_sample_query_set_with_metadata` + interceptor in new development instead of the `post_update_sample_query_set` interceptor. + When both interceptors are used, this `post_update_sample_query_set_with_metadata` interceptor runs after the + `post_update_sample_query_set` interceptor. The (possibly modified) response returned by + `post_update_sample_query_set` will be passed to + `post_update_sample_query_set_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -531,6 +632,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_sample_query_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sample_query_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -797,6 +902,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_sample_query_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sample_query_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -947,6 +1056,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sample_query_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sample_query_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1108,6 +1221,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_sample_query_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_sample_query_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py index 49690a412cd7..bdd60bd0940a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1383,16 +1412,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1438,16 +1471,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py index c162de309e52..cc807e2e2c8c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/schema_service/transports/rest.py @@ -135,12 +135,35 @@ def post_create_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_create_schema` interceptor runs + before the `post_create_schema_with_metadata` interceptor. """ return response + def post_create_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_create_schema_with_metadata` + interceptor in new development instead of the `post_create_schema` interceptor. + When both interceptors are used, this `post_create_schema_with_metadata` interceptor runs after the + `post_create_schema` interceptor. The (possibly modified) response returned by + `post_create_schema` will be passed to + `post_create_schema_with_metadata`. + """ + return response, metadata + def pre_delete_schema( self, request: schema_service.DeleteSchemaRequest, @@ -160,12 +183,35 @@ def post_delete_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_delete_schema` interceptor runs + before the `post_delete_schema_with_metadata` interceptor. """ return response + def post_delete_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_delete_schema_with_metadata` + interceptor in new development instead of the `post_delete_schema` interceptor. + When both interceptors are used, this `post_delete_schema_with_metadata` interceptor runs after the + `post_delete_schema` interceptor. The (possibly modified) response returned by + `post_delete_schema` will be passed to + `post_delete_schema_with_metadata`. + """ + return response, metadata + def pre_get_schema( self, request: schema_service.GetSchemaRequest, @@ -183,12 +229,33 @@ def pre_get_schema( def post_get_schema(self, response: schema.Schema) -> schema.Schema: """Post-rpc interceptor for get_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_get_schema` interceptor runs + before the `post_get_schema_with_metadata` interceptor. """ return response + def post_get_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_get_schema_with_metadata` + interceptor in new development instead of the `post_get_schema` interceptor. + When both interceptors are used, this `post_get_schema_with_metadata` interceptor runs after the + `post_get_schema` interceptor. The (possibly modified) response returned by + `post_get_schema` will be passed to + `post_get_schema_with_metadata`. + """ + return response, metadata + def pre_list_schemas( self, request: schema_service.ListSchemasRequest, @@ -208,12 +275,37 @@ def post_list_schemas( ) -> schema_service.ListSchemasResponse: """Post-rpc interceptor for list_schemas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_list_schemas` interceptor runs + before the `post_list_schemas_with_metadata` interceptor. """ return response + def post_list_schemas_with_metadata( + self, + response: schema_service.ListSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema_service.ListSchemasResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schemas_with_metadata` + interceptor in new development instead of the `post_list_schemas` interceptor. + When both interceptors are used, this `post_list_schemas_with_metadata` interceptor runs after the + `post_list_schemas` interceptor. The (possibly modified) response returned by + `post_list_schemas` will be passed to + `post_list_schemas_with_metadata`. + """ + return response, metadata + def pre_update_schema( self, request: schema_service.UpdateSchemaRequest, @@ -233,12 +325,35 @@ def post_update_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_update_schema` interceptor runs + before the `post_update_schema_with_metadata` interceptor. """ return response + def post_update_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_update_schema_with_metadata` + interceptor in new development instead of the `post_update_schema` interceptor. + When both interceptors are used, this `post_update_schema_with_metadata` interceptor runs after the + `post_update_schema` interceptor. The (possibly modified) response returned by + `post_update_schema` will be passed to + `post_update_schema_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -699,6 +814,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -842,6 +961,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -990,6 +1113,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1137,6 +1264,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_schemas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schemas_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1288,6 +1419,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py index 9ab02e50d87d..62f31b196180 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -611,6 +613,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -967,16 +996,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1022,16 +1055,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py index f192c9a4f4f1..81d8b3dcc3ae 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_service/transports/rest.py @@ -101,12 +101,35 @@ def post_search( ) -> search_service.SearchResponse: """Post-rpc interceptor for search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchService server but before - it is returned to user code. + it is returned to user code. This `post_search` interceptor runs + before the `post_search_with_metadata` interceptor. """ return response + def post_search_with_metadata( + self, + response: search_service.SearchResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[search_service.SearchResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchService server but before it is returned to user code. + + We recommend only using this `post_search_with_metadata` + interceptor in new development instead of the `post_search` interceptor. + When both interceptors are used, this `post_search_with_metadata` interceptor runs after the + `post_search` interceptor. The (possibly modified) response returned by + `post_search` will be passed to + `post_search_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -397,6 +420,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py index be4d72c8e69e..3068ff9bdf8f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -968,16 +997,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1023,16 +1056,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py index 22f51036803d..cdd5867f6aff 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/search_tuning_service/transports/rest.py @@ -112,12 +112,38 @@ def post_list_custom_models( ) -> search_tuning_service.ListCustomModelsResponse: """Post-rpc interceptor for list_custom_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchTuningService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_models` interceptor runs + before the `post_list_custom_models_with_metadata` interceptor. """ return response + def post_list_custom_models_with_metadata( + self, + response: search_tuning_service.ListCustomModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchTuningService server but before it is returned to user code. + + We recommend only using this `post_list_custom_models_with_metadata` + interceptor in new development instead of the `post_list_custom_models` interceptor. + When both interceptors are used, this `post_list_custom_models_with_metadata` interceptor runs after the + `post_list_custom_models` interceptor. The (possibly modified) response returned by + `post_list_custom_models` will be passed to + `post_list_custom_models_with_metadata`. + """ + return response, metadata + def pre_train_custom_model( self, request: search_tuning_service.TrainCustomModelRequest, @@ -138,12 +164,35 @@ def post_train_custom_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_custom_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_custom_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchTuningService server but before - it is returned to user code. + it is returned to user code. This `post_train_custom_model` interceptor runs + before the `post_train_custom_model_with_metadata` interceptor. """ return response + def post_train_custom_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchTuningService server but before it is returned to user code. + + We recommend only using this `post_train_custom_model_with_metadata` + interceptor in new development instead of the `post_train_custom_model` interceptor. + When both interceptors are used, this `post_train_custom_model_with_metadata` interceptor runs after the + `post_train_custom_model` interceptor. The (possibly modified) response returned by + `post_train_custom_model` will be passed to + `post_train_custom_model_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -602,6 +651,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -756,6 +809,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_custom_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_custom_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py index 3d74a72d9aac..4a8b402e45e4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1128,16 +1157,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1183,16 +1216,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py index a41d8c9d08d2..5a308a95b1cb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/serving_config_service/transports/rest.py @@ -124,12 +124,35 @@ def post_get_serving_config( ) -> serving_config.ServingConfig: """Post-rpc interceptor for get_serving_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_serving_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServingConfigService server but before - it is returned to user code. + it is returned to user code. This `post_get_serving_config` interceptor runs + before the `post_get_serving_config_with_metadata` interceptor. """ return response + def post_get_serving_config_with_metadata( + self, + response: serving_config.ServingConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[serving_config.ServingConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_serving_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServingConfigService server but before it is returned to user code. + + We recommend only using this `post_get_serving_config_with_metadata` + interceptor in new development instead of the `post_get_serving_config` interceptor. + When both interceptors are used, this `post_get_serving_config_with_metadata` interceptor runs after the + `post_get_serving_config` interceptor. The (possibly modified) response returned by + `post_get_serving_config` will be passed to + `post_get_serving_config_with_metadata`. + """ + return response, metadata + def pre_list_serving_configs( self, request: serving_config_service.ListServingConfigsRequest, @@ -150,12 +173,38 @@ def post_list_serving_configs( ) -> serving_config_service.ListServingConfigsResponse: """Post-rpc interceptor for list_serving_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_serving_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServingConfigService server but before - it is returned to user code. + it is returned to user code. This `post_list_serving_configs` interceptor runs + before the `post_list_serving_configs_with_metadata` interceptor. """ return response + def post_list_serving_configs_with_metadata( + self, + response: serving_config_service.ListServingConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + serving_config_service.ListServingConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_serving_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServingConfigService server but before it is returned to user code. + + We recommend only using this `post_list_serving_configs_with_metadata` + interceptor in new development instead of the `post_list_serving_configs` interceptor. + When both interceptors are used, this `post_list_serving_configs_with_metadata` interceptor runs after the + `post_list_serving_configs` interceptor. The (possibly modified) response returned by + `post_list_serving_configs` will be passed to + `post_list_serving_configs_with_metadata`. + """ + return response, metadata + def pre_update_serving_config( self, request: serving_config_service.UpdateServingConfigRequest, @@ -176,12 +225,37 @@ def post_update_serving_config( ) -> gcd_serving_config.ServingConfig: """Post-rpc interceptor for update_serving_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_serving_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServingConfigService server but before - it is returned to user code. + it is returned to user code. This `post_update_serving_config` interceptor runs + before the `post_update_serving_config_with_metadata` interceptor. """ return response + def post_update_serving_config_with_metadata( + self, + response: gcd_serving_config.ServingConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_serving_config.ServingConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_serving_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServingConfigService server but before it is returned to user code. + + We recommend only using this `post_update_serving_config_with_metadata` + interceptor in new development instead of the `post_update_serving_config` interceptor. + When both interceptors are used, this `post_update_serving_config_with_metadata` interceptor runs after the + `post_update_serving_config` interceptor. The (possibly modified) response returned by + `post_update_serving_config` will be passed to + `post_update_serving_config_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -471,6 +545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_serving_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_serving_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -619,6 +697,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_serving_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_serving_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -781,6 +863,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_serving_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_serving_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py index be00111936d1..ca2f705cb6b5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -522,6 +524,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2441,16 +2470,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2496,16 +2529,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py index 2ddcd8c300c5..a940b44e3030 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/site_search_engine_service/transports/rest.py @@ -211,12 +211,35 @@ def post_batch_create_target_sites( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_target_sites` interceptor runs + before the `post_batch_create_target_sites_with_metadata` interceptor. """ return response + def post_batch_create_target_sites_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_batch_create_target_sites_with_metadata` + interceptor in new development instead of the `post_batch_create_target_sites` interceptor. + When both interceptors are used, this `post_batch_create_target_sites_with_metadata` interceptor runs after the + `post_batch_create_target_sites` interceptor. The (possibly modified) response returned by + `post_batch_create_target_sites` will be passed to + `post_batch_create_target_sites_with_metadata`. + """ + return response, metadata + def pre_batch_verify_target_sites( self, request: site_search_engine_service.BatchVerifyTargetSitesRequest, @@ -237,12 +260,35 @@ def post_batch_verify_target_sites( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_verify_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_verify_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_batch_verify_target_sites` interceptor runs + before the `post_batch_verify_target_sites_with_metadata` interceptor. """ return response + def post_batch_verify_target_sites_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_verify_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_batch_verify_target_sites_with_metadata` + interceptor in new development instead of the `post_batch_verify_target_sites` interceptor. + When both interceptors are used, this `post_batch_verify_target_sites_with_metadata` interceptor runs after the + `post_batch_verify_target_sites` interceptor. The (possibly modified) response returned by + `post_batch_verify_target_sites` will be passed to + `post_batch_verify_target_sites_with_metadata`. + """ + return response, metadata + def pre_create_target_site( self, request: site_search_engine_service.CreateTargetSiteRequest, @@ -263,12 +309,35 @@ def post_create_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_target_site` interceptor runs + before the `post_create_target_site_with_metadata` interceptor. """ return response + def post_create_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_create_target_site_with_metadata` + interceptor in new development instead of the `post_create_target_site` interceptor. + When both interceptors are used, this `post_create_target_site_with_metadata` interceptor runs after the + `post_create_target_site` interceptor. The (possibly modified) response returned by + `post_create_target_site` will be passed to + `post_create_target_site_with_metadata`. + """ + return response, metadata + def pre_delete_target_site( self, request: site_search_engine_service.DeleteTargetSiteRequest, @@ -289,12 +358,35 @@ def post_delete_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_target_site` interceptor runs + before the `post_delete_target_site_with_metadata` interceptor. """ return response + def post_delete_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_delete_target_site_with_metadata` + interceptor in new development instead of the `post_delete_target_site` interceptor. + When both interceptors are used, this `post_delete_target_site_with_metadata` interceptor runs after the + `post_delete_target_site` interceptor. The (possibly modified) response returned by + `post_delete_target_site` will be passed to + `post_delete_target_site_with_metadata`. + """ + return response, metadata + def pre_disable_advanced_site_search( self, request: site_search_engine_service.DisableAdvancedSiteSearchRequest, @@ -315,12 +407,35 @@ def post_disable_advanced_site_search( ) -> operations_pb2.Operation: """Post-rpc interceptor for disable_advanced_site_search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_advanced_site_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_disable_advanced_site_search` interceptor runs + before the `post_disable_advanced_site_search_with_metadata` interceptor. """ return response + def post_disable_advanced_site_search_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_advanced_site_search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_disable_advanced_site_search_with_metadata` + interceptor in new development instead of the `post_disable_advanced_site_search` interceptor. + When both interceptors are used, this `post_disable_advanced_site_search_with_metadata` interceptor runs after the + `post_disable_advanced_site_search` interceptor. The (possibly modified) response returned by + `post_disable_advanced_site_search` will be passed to + `post_disable_advanced_site_search_with_metadata`. + """ + return response, metadata + def pre_enable_advanced_site_search( self, request: site_search_engine_service.EnableAdvancedSiteSearchRequest, @@ -341,12 +456,35 @@ def post_enable_advanced_site_search( ) -> operations_pb2.Operation: """Post-rpc interceptor for enable_advanced_site_search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_advanced_site_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_enable_advanced_site_search` interceptor runs + before the `post_enable_advanced_site_search_with_metadata` interceptor. """ return response + def post_enable_advanced_site_search_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_advanced_site_search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_enable_advanced_site_search_with_metadata` + interceptor in new development instead of the `post_enable_advanced_site_search` interceptor. + When both interceptors are used, this `post_enable_advanced_site_search_with_metadata` interceptor runs after the + `post_enable_advanced_site_search` interceptor. The (possibly modified) response returned by + `post_enable_advanced_site_search` will be passed to + `post_enable_advanced_site_search_with_metadata`. + """ + return response, metadata + def pre_fetch_domain_verification_status( self, request: site_search_engine_service.FetchDomainVerificationStatusRequest, @@ -367,12 +505,38 @@ def post_fetch_domain_verification_status( ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: """Post-rpc interceptor for fetch_domain_verification_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_domain_verification_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_domain_verification_status` interceptor runs + before the `post_fetch_domain_verification_status_with_metadata` interceptor. """ return response + def post_fetch_domain_verification_status_with_metadata( + self, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.FetchDomainVerificationStatusResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_fetch_domain_verification_status_with_metadata` + interceptor in new development instead of the `post_fetch_domain_verification_status` interceptor. + When both interceptors are used, this `post_fetch_domain_verification_status_with_metadata` interceptor runs after the + `post_fetch_domain_verification_status` interceptor. The (possibly modified) response returned by + `post_fetch_domain_verification_status` will be passed to + `post_fetch_domain_verification_status_with_metadata`. + """ + return response, metadata + def pre_get_site_search_engine( self, request: site_search_engine_service.GetSiteSearchEngineRequest, @@ -393,12 +557,37 @@ def post_get_site_search_engine( ) -> site_search_engine.SiteSearchEngine: """Post-rpc interceptor for get_site_search_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_site_search_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_site_search_engine` interceptor runs + before the `post_get_site_search_engine_with_metadata` interceptor. """ return response + def post_get_site_search_engine_with_metadata( + self, + response: site_search_engine.SiteSearchEngine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine.SiteSearchEngine, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_site_search_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_site_search_engine_with_metadata` + interceptor in new development instead of the `post_get_site_search_engine` interceptor. + When both interceptors are used, this `post_get_site_search_engine_with_metadata` interceptor runs after the + `post_get_site_search_engine` interceptor. The (possibly modified) response returned by + `post_get_site_search_engine` will be passed to + `post_get_site_search_engine_with_metadata`. + """ + return response, metadata + def pre_get_target_site( self, request: site_search_engine_service.GetTargetSiteRequest, @@ -419,12 +608,35 @@ def post_get_target_site( ) -> site_search_engine.TargetSite: """Post-rpc interceptor for get_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_target_site` interceptor runs + before the `post_get_target_site_with_metadata` interceptor. """ return response + def post_get_target_site_with_metadata( + self, + response: site_search_engine.TargetSite, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[site_search_engine.TargetSite, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_target_site_with_metadata` + interceptor in new development instead of the `post_get_target_site` interceptor. + When both interceptors are used, this `post_get_target_site_with_metadata` interceptor runs after the + `post_get_target_site` interceptor. The (possibly modified) response returned by + `post_get_target_site` will be passed to + `post_get_target_site_with_metadata`. + """ + return response, metadata + def pre_get_uri_pattern_document_data( self, request: site_search_engine_service.GetUriPatternDocumentDataRequest, @@ -445,12 +657,38 @@ def post_get_uri_pattern_document_data( ) -> site_search_engine_service.GetUriPatternDocumentDataResponse: """Post-rpc interceptor for get_uri_pattern_document_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_uri_pattern_document_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_uri_pattern_document_data` interceptor runs + before the `post_get_uri_pattern_document_data_with_metadata` interceptor. """ return response + def post_get_uri_pattern_document_data_with_metadata( + self, + response: site_search_engine_service.GetUriPatternDocumentDataResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.GetUriPatternDocumentDataResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_uri_pattern_document_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_uri_pattern_document_data_with_metadata` + interceptor in new development instead of the `post_get_uri_pattern_document_data` interceptor. + When both interceptors are used, this `post_get_uri_pattern_document_data_with_metadata` interceptor runs after the + `post_get_uri_pattern_document_data` interceptor. The (possibly modified) response returned by + `post_get_uri_pattern_document_data` will be passed to + `post_get_uri_pattern_document_data_with_metadata`. + """ + return response, metadata + def pre_list_target_sites( self, request: site_search_engine_service.ListTargetSitesRequest, @@ -471,12 +709,38 @@ def post_list_target_sites( ) -> site_search_engine_service.ListTargetSitesResponse: """Post-rpc interceptor for list_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_list_target_sites` interceptor runs + before the `post_list_target_sites_with_metadata` interceptor. """ return response + def post_list_target_sites_with_metadata( + self, + response: site_search_engine_service.ListTargetSitesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.ListTargetSitesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_list_target_sites_with_metadata` + interceptor in new development instead of the `post_list_target_sites` interceptor. + When both interceptors are used, this `post_list_target_sites_with_metadata` interceptor runs after the + `post_list_target_sites` interceptor. The (possibly modified) response returned by + `post_list_target_sites` will be passed to + `post_list_target_sites_with_metadata`. + """ + return response, metadata + def pre_recrawl_uris( self, request: site_search_engine_service.RecrawlUrisRequest, @@ -497,12 +761,35 @@ def post_recrawl_uris( ) -> operations_pb2.Operation: """Post-rpc interceptor for recrawl_uris - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recrawl_uris_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_recrawl_uris` interceptor runs + before the `post_recrawl_uris_with_metadata` interceptor. """ return response + def post_recrawl_uris_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for recrawl_uris + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_recrawl_uris_with_metadata` + interceptor in new development instead of the `post_recrawl_uris` interceptor. + When both interceptors are used, this `post_recrawl_uris_with_metadata` interceptor runs after the + `post_recrawl_uris` interceptor. The (possibly modified) response returned by + `post_recrawl_uris` will be passed to + `post_recrawl_uris_with_metadata`. + """ + return response, metadata + def pre_set_uri_pattern_document_data( self, request: site_search_engine_service.SetUriPatternDocumentDataRequest, @@ -523,12 +810,35 @@ def post_set_uri_pattern_document_data( ) -> operations_pb2.Operation: """Post-rpc interceptor for set_uri_pattern_document_data - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_uri_pattern_document_data_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_set_uri_pattern_document_data` interceptor runs + before the `post_set_uri_pattern_document_data_with_metadata` interceptor. """ return response + def post_set_uri_pattern_document_data_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_uri_pattern_document_data + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_set_uri_pattern_document_data_with_metadata` + interceptor in new development instead of the `post_set_uri_pattern_document_data` interceptor. + When both interceptors are used, this `post_set_uri_pattern_document_data_with_metadata` interceptor runs after the + `post_set_uri_pattern_document_data` interceptor. The (possibly modified) response returned by + `post_set_uri_pattern_document_data` will be passed to + `post_set_uri_pattern_document_data_with_metadata`. + """ + return response, metadata + def pre_update_target_site( self, request: site_search_engine_service.UpdateTargetSiteRequest, @@ -549,12 +859,35 @@ def post_update_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_update_target_site` interceptor runs + before the `post_update_target_site_with_metadata` interceptor. """ return response + def post_update_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_update_target_site_with_metadata` + interceptor in new development instead of the `post_update_target_site` interceptor. + When both interceptors are used, this `post_update_target_site_with_metadata` interceptor runs after the + `post_update_target_site` interceptor. The (possibly modified) response returned by + `post_update_target_site` will be passed to + `post_update_target_site_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -1017,6 +1350,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1169,6 +1506,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_verify_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_verify_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1323,6 +1664,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1471,6 +1816,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1626,6 +1975,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_advanced_site_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_advanced_site_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1779,6 +2132,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_advanced_site_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_advanced_site_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1934,6 +2291,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_domain_verification_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_domain_verification_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2087,6 +2451,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_site_search_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_site_search_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2236,6 +2604,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2389,6 +2761,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_uri_pattern_document_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_uri_pattern_document_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2541,6 +2920,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2695,6 +3078,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_recrawl_uris(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recrawl_uris_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2850,6 +3237,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_uri_pattern_document_data(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_uri_pattern_document_data_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3004,6 +3398,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py index 3a83d17becd1..e9b23dcc52b9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -549,6 +551,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1245,16 +1274,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1300,16 +1333,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py index b77472f62a1e..d4d4a297d3d3 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/services/user_event_service/transports/rest.py @@ -134,12 +134,35 @@ def post_collect_user_event( ) -> httpbody_pb2.HttpBody: """Post-rpc interceptor for collect_user_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_collect_user_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_collect_user_event` interceptor runs + before the `post_collect_user_event_with_metadata` interceptor. """ return response + def post_collect_user_event_with_metadata( + self, + response: httpbody_pb2.HttpBody, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[httpbody_pb2.HttpBody, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for collect_user_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_collect_user_event_with_metadata` + interceptor in new development instead of the `post_collect_user_event` interceptor. + When both interceptors are used, this `post_collect_user_event_with_metadata` interceptor runs after the + `post_collect_user_event` interceptor. The (possibly modified) response returned by + `post_collect_user_event` will be passed to + `post_collect_user_event_with_metadata`. + """ + return response, metadata + def pre_import_user_events( self, request: import_config.ImportUserEventsRequest, @@ -159,12 +182,35 @@ def post_import_user_events( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_user_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_user_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_import_user_events` interceptor runs + before the `post_import_user_events_with_metadata` interceptor. """ return response + def post_import_user_events_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_user_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_import_user_events_with_metadata` + interceptor in new development instead of the `post_import_user_events` interceptor. + When both interceptors are used, this `post_import_user_events_with_metadata` interceptor runs after the + `post_import_user_events` interceptor. The (possibly modified) response returned by + `post_import_user_events` will be passed to + `post_import_user_events_with_metadata`. + """ + return response, metadata + def pre_purge_user_events( self, request: purge_config.PurgeUserEventsRequest, @@ -184,12 +230,35 @@ def post_purge_user_events( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_user_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_user_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_purge_user_events` interceptor runs + before the `post_purge_user_events_with_metadata` interceptor. """ return response + def post_purge_user_events_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_user_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_purge_user_events_with_metadata` + interceptor in new development instead of the `post_purge_user_events` interceptor. + When both interceptors are used, this `post_purge_user_events_with_metadata` interceptor runs after the + `post_purge_user_events` interceptor. The (possibly modified) response returned by + `post_purge_user_events` will be passed to + `post_purge_user_events_with_metadata`. + """ + return response, metadata + def pre_write_user_event( self, request: user_event_service.WriteUserEventRequest, @@ -210,12 +279,35 @@ def post_write_user_event( ) -> user_event.UserEvent: """Post-rpc interceptor for write_user_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_write_user_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_write_user_event` interceptor runs + before the `post_write_user_event_with_metadata` interceptor. """ return response + def post_write_user_event_with_metadata( + self, + response: user_event.UserEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[user_event.UserEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for write_user_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_write_user_event_with_metadata` + interceptor in new development instead of the `post_write_user_event` interceptor. + When both interceptors are used, this `post_write_user_event_with_metadata` interceptor runs after the + `post_write_user_event` interceptor. The (possibly modified) response returned by + `post_write_user_event` will be passed to + `post_write_user_event_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -720,6 +812,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_collect_user_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_collect_user_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -871,6 +967,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_user_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_user_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1022,6 +1122,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_user_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_user_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1175,6 +1279,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_write_user_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_write_user_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py index 458413f5c30f..154c54fd3917 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.13.5" # {x-release-please-version} +__version__ = "0.13.6" # {x-release-please-version} diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py index b24f8435d2f7..dd11c714ebac 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -540,6 +542,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1419,16 +1448,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1474,16 +1507,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py index b12df6e5c386..e059ef643e44 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/completion_service/transports/rest.py @@ -148,12 +148,38 @@ def post_advanced_complete_query( ) -> completion_service.AdvancedCompleteQueryResponse: """Post-rpc interceptor for advanced_complete_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_advanced_complete_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_advanced_complete_query` interceptor runs + before the `post_advanced_complete_query_with_metadata` interceptor. """ return response + def post_advanced_complete_query_with_metadata( + self, + response: completion_service.AdvancedCompleteQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + completion_service.AdvancedCompleteQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for advanced_complete_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_advanced_complete_query_with_metadata` + interceptor in new development instead of the `post_advanced_complete_query` interceptor. + When both interceptors are used, this `post_advanced_complete_query_with_metadata` interceptor runs after the + `post_advanced_complete_query` interceptor. The (possibly modified) response returned by + `post_advanced_complete_query` will be passed to + `post_advanced_complete_query_with_metadata`. + """ + return response, metadata + def pre_complete_query( self, request: completion_service.CompleteQueryRequest, @@ -173,12 +199,38 @@ def post_complete_query( ) -> completion_service.CompleteQueryResponse: """Post-rpc interceptor for complete_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_complete_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_complete_query` interceptor runs + before the `post_complete_query_with_metadata` interceptor. """ return response + def post_complete_query_with_metadata( + self, + response: completion_service.CompleteQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + completion_service.CompleteQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for complete_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_complete_query_with_metadata` + interceptor in new development instead of the `post_complete_query` interceptor. + When both interceptors are used, this `post_complete_query_with_metadata` interceptor runs after the + `post_complete_query` interceptor. The (possibly modified) response returned by + `post_complete_query` will be passed to + `post_complete_query_with_metadata`. + """ + return response, metadata + def pre_import_completion_suggestions( self, request: import_config.ImportCompletionSuggestionsRequest, @@ -199,12 +251,35 @@ def post_import_completion_suggestions( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_completion_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_completion_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_import_completion_suggestions` interceptor runs + before the `post_import_completion_suggestions_with_metadata` interceptor. """ return response + def post_import_completion_suggestions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_completion_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_import_completion_suggestions_with_metadata` + interceptor in new development instead of the `post_import_completion_suggestions` interceptor. + When both interceptors are used, this `post_import_completion_suggestions_with_metadata` interceptor runs after the + `post_import_completion_suggestions` interceptor. The (possibly modified) response returned by + `post_import_completion_suggestions` will be passed to + `post_import_completion_suggestions_with_metadata`. + """ + return response, metadata + def pre_import_suggestion_deny_list_entries( self, request: import_config.ImportSuggestionDenyListEntriesRequest, @@ -225,12 +300,35 @@ def post_import_suggestion_deny_list_entries( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_suggestion_deny_list_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_suggestion_deny_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_import_suggestion_deny_list_entries` interceptor runs + before the `post_import_suggestion_deny_list_entries_with_metadata` interceptor. """ return response + def post_import_suggestion_deny_list_entries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_suggestion_deny_list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_import_suggestion_deny_list_entries_with_metadata` + interceptor in new development instead of the `post_import_suggestion_deny_list_entries` interceptor. + When both interceptors are used, this `post_import_suggestion_deny_list_entries_with_metadata` interceptor runs after the + `post_import_suggestion_deny_list_entries` interceptor. The (possibly modified) response returned by + `post_import_suggestion_deny_list_entries` will be passed to + `post_import_suggestion_deny_list_entries_with_metadata`. + """ + return response, metadata + def pre_purge_completion_suggestions( self, request: purge_config.PurgeCompletionSuggestionsRequest, @@ -251,12 +349,35 @@ def post_purge_completion_suggestions( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_completion_suggestions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_completion_suggestions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_purge_completion_suggestions` interceptor runs + before the `post_purge_completion_suggestions_with_metadata` interceptor. """ return response + def post_purge_completion_suggestions_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_completion_suggestions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_purge_completion_suggestions_with_metadata` + interceptor in new development instead of the `post_purge_completion_suggestions` interceptor. + When both interceptors are used, this `post_purge_completion_suggestions_with_metadata` interceptor runs after the + `post_purge_completion_suggestions` interceptor. The (possibly modified) response returned by + `post_purge_completion_suggestions` will be passed to + `post_purge_completion_suggestions_with_metadata`. + """ + return response, metadata + def pre_purge_suggestion_deny_list_entries( self, request: purge_config.PurgeSuggestionDenyListEntriesRequest, @@ -277,12 +398,35 @@ def post_purge_suggestion_deny_list_entries( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_suggestion_deny_list_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_suggestion_deny_list_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CompletionService server but before - it is returned to user code. + it is returned to user code. This `post_purge_suggestion_deny_list_entries` interceptor runs + before the `post_purge_suggestion_deny_list_entries_with_metadata` interceptor. """ return response + def post_purge_suggestion_deny_list_entries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_suggestion_deny_list_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CompletionService server but before it is returned to user code. + + We recommend only using this `post_purge_suggestion_deny_list_entries_with_metadata` + interceptor in new development instead of the `post_purge_suggestion_deny_list_entries` interceptor. + When both interceptors are used, this `post_purge_suggestion_deny_list_entries_with_metadata` interceptor runs after the + `post_purge_suggestion_deny_list_entries` interceptor. The (possibly modified) response returned by + `post_purge_suggestion_deny_list_entries` will be passed to + `post_purge_suggestion_deny_list_entries_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -741,6 +885,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_advanced_complete_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_advanced_complete_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -891,6 +1039,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_complete_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_complete_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1046,6 +1198,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_completion_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_import_completion_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1204,6 +1363,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_suggestion_deny_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_import_suggestion_deny_list_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1357,6 +1523,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_completion_suggestions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_completion_suggestions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1513,6 +1683,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_suggestion_deny_list_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_purge_suggestion_deny_list_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py index 7d8f75591dec..9ec6499c37ad 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -519,6 +521,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1379,16 +1408,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1434,16 +1467,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py index e60174444857..c9efc66e0c96 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/control_service/transports/rest.py @@ -132,12 +132,35 @@ def pre_create_control( def post_create_control(self, response: gcd_control.Control) -> gcd_control.Control: """Post-rpc interceptor for create_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_create_control` interceptor runs + before the `post_create_control_with_metadata` interceptor. """ return response + def post_create_control_with_metadata( + self, + response: gcd_control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_create_control_with_metadata` + interceptor in new development instead of the `post_create_control` interceptor. + When both interceptors are used, this `post_create_control_with_metadata` interceptor runs after the + `post_create_control` interceptor. The (possibly modified) response returned by + `post_create_control` will be passed to + `post_create_control_with_metadata`. + """ + return response, metadata + def pre_delete_control( self, request: control_service.DeleteControlRequest, @@ -169,12 +192,35 @@ def pre_get_control( def post_get_control(self, response: control.Control) -> control.Control: """Post-rpc interceptor for get_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_get_control` interceptor runs + before the `post_get_control_with_metadata` interceptor. """ return response + def post_get_control_with_metadata( + self, + response: control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_get_control_with_metadata` + interceptor in new development instead of the `post_get_control` interceptor. + When both interceptors are used, this `post_get_control_with_metadata` interceptor runs after the + `post_get_control` interceptor. The (possibly modified) response returned by + `post_get_control` will be passed to + `post_get_control_with_metadata`. + """ + return response, metadata + def pre_list_controls( self, request: control_service.ListControlsRequest, @@ -194,12 +240,37 @@ def post_list_controls( ) -> control_service.ListControlsResponse: """Post-rpc interceptor for list_controls - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_controls_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_list_controls` interceptor runs + before the `post_list_controls_with_metadata` interceptor. """ return response + def post_list_controls_with_metadata( + self, + response: control_service.ListControlsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + control_service.ListControlsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_controls + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_list_controls_with_metadata` + interceptor in new development instead of the `post_list_controls` interceptor. + When both interceptors are used, this `post_list_controls_with_metadata` interceptor runs after the + `post_list_controls` interceptor. The (possibly modified) response returned by + `post_list_controls` will be passed to + `post_list_controls_with_metadata`. + """ + return response, metadata + def pre_update_control( self, request: control_service.UpdateControlRequest, @@ -217,12 +288,35 @@ def pre_update_control( def post_update_control(self, response: gcd_control.Control) -> gcd_control.Control: """Post-rpc interceptor for update_control - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_control_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ControlService server but before - it is returned to user code. + it is returned to user code. This `post_update_control` interceptor runs + before the `post_update_control_with_metadata` interceptor. """ return response + def post_update_control_with_metadata( + self, + response: gcd_control.Control, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_control.Control, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_control + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ControlService server but before it is returned to user code. + + We recommend only using this `post_update_control_with_metadata` + interceptor in new development instead of the `post_update_control` interceptor. + When both interceptors are used, this `post_update_control_with_metadata` interceptor runs after the + `post_update_control` interceptor. The (possibly modified) response returned by + `post_update_control` will be passed to + `post_update_control_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -514,6 +608,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -767,6 +865,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -907,6 +1009,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_controls(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_controls_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1060,6 +1166,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_control(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_control_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py index 0be5f6265ed2..46d4349edf39 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -657,6 +659,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2392,16 +2421,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2447,16 +2480,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py index 92909d7aeb25..8d02bdf0d96d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/conversational_search_service/transports/rest.py @@ -198,12 +198,38 @@ def post_answer_query( ) -> conversational_search_service.AnswerQueryResponse: """Post-rpc interceptor for answer_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_answer_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_answer_query` interceptor runs + before the `post_answer_query_with_metadata` interceptor. """ return response + def post_answer_query_with_metadata( + self, + response: conversational_search_service.AnswerQueryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.AnswerQueryResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for answer_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_answer_query_with_metadata` + interceptor in new development instead of the `post_answer_query` interceptor. + When both interceptors are used, this `post_answer_query_with_metadata` interceptor runs after the + `post_answer_query` interceptor. The (possibly modified) response returned by + `post_answer_query` will be passed to + `post_answer_query_with_metadata`. + """ + return response, metadata + def pre_converse_conversation( self, request: conversational_search_service.ConverseConversationRequest, @@ -224,12 +250,38 @@ def post_converse_conversation( ) -> conversational_search_service.ConverseConversationResponse: """Post-rpc interceptor for converse_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_converse_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_converse_conversation` interceptor runs + before the `post_converse_conversation_with_metadata` interceptor. """ return response + def post_converse_conversation_with_metadata( + self, + response: conversational_search_service.ConverseConversationResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ConverseConversationResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for converse_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_converse_conversation_with_metadata` + interceptor in new development instead of the `post_converse_conversation` interceptor. + When both interceptors are used, this `post_converse_conversation_with_metadata` interceptor runs after the + `post_converse_conversation` interceptor. The (possibly modified) response returned by + `post_converse_conversation` will be passed to + `post_converse_conversation_with_metadata`. + """ + return response, metadata + def pre_create_conversation( self, request: conversational_search_service.CreateConversationRequest, @@ -250,12 +302,35 @@ def post_create_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for create_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_create_conversation` interceptor runs + before the `post_create_conversation_with_metadata` interceptor. """ return response + def post_create_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_create_conversation_with_metadata` + interceptor in new development instead of the `post_create_conversation` interceptor. + When both interceptors are used, this `post_create_conversation_with_metadata` interceptor runs after the + `post_create_conversation` interceptor. The (possibly modified) response returned by + `post_create_conversation` will be passed to + `post_create_conversation_with_metadata`. + """ + return response, metadata + def pre_create_session( self, request: conversational_search_service.CreateSessionRequest, @@ -274,12 +349,35 @@ def pre_create_session( def post_create_session(self, response: gcd_session.Session) -> gcd_session.Session: """Post-rpc interceptor for create_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_create_session` interceptor runs + before the `post_create_session_with_metadata` interceptor. """ return response + def post_create_session_with_metadata( + self, + response: gcd_session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_create_session_with_metadata` + interceptor in new development instead of the `post_create_session` interceptor. + When both interceptors are used, this `post_create_session_with_metadata` interceptor runs after the + `post_create_session` interceptor. The (possibly modified) response returned by + `post_create_session` will be passed to + `post_create_session_with_metadata`. + """ + return response, metadata + def pre_delete_conversation( self, request: conversational_search_service.DeleteConversationRequest, @@ -328,12 +426,33 @@ def pre_get_answer( def post_get_answer(self, response: answer.Answer) -> answer.Answer: """Post-rpc interceptor for get_answer - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_answer_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_answer` interceptor runs + before the `post_get_answer_with_metadata` interceptor. """ return response + def post_get_answer_with_metadata( + self, response: answer.Answer, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[answer.Answer, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_answer + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_answer_with_metadata` + interceptor in new development instead of the `post_get_answer` interceptor. + When both interceptors are used, this `post_get_answer_with_metadata` interceptor runs after the + `post_get_answer` interceptor. The (possibly modified) response returned by + `post_get_answer` will be passed to + `post_get_answer_with_metadata`. + """ + return response, metadata + def pre_get_conversation( self, request: conversational_search_service.GetConversationRequest, @@ -354,12 +473,35 @@ def post_get_conversation( ) -> conversation.Conversation: """Post-rpc interceptor for get_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_conversation` interceptor runs + before the `post_get_conversation_with_metadata` interceptor. """ return response + def post_get_conversation_with_metadata( + self, + response: conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_conversation_with_metadata` + interceptor in new development instead of the `post_get_conversation` interceptor. + When both interceptors are used, this `post_get_conversation_with_metadata` interceptor runs after the + `post_get_conversation` interceptor. The (possibly modified) response returned by + `post_get_conversation` will be passed to + `post_get_conversation_with_metadata`. + """ + return response, metadata + def pre_get_session( self, request: conversational_search_service.GetSessionRequest, @@ -378,12 +520,35 @@ def pre_get_session( def post_get_session(self, response: session.Session) -> session.Session: """Post-rpc interceptor for get_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_get_session` interceptor runs + before the `post_get_session_with_metadata` interceptor. """ return response + def post_get_session_with_metadata( + self, + response: session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_get_session_with_metadata` + interceptor in new development instead of the `post_get_session` interceptor. + When both interceptors are used, this `post_get_session_with_metadata` interceptor runs after the + `post_get_session` interceptor. The (possibly modified) response returned by + `post_get_session` will be passed to + `post_get_session_with_metadata`. + """ + return response, metadata + def pre_list_conversations( self, request: conversational_search_service.ListConversationsRequest, @@ -404,12 +569,38 @@ def post_list_conversations( ) -> conversational_search_service.ListConversationsResponse: """Post-rpc interceptor for list_conversations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_conversations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_list_conversations` interceptor runs + before the `post_list_conversations_with_metadata` interceptor. """ return response + def post_list_conversations_with_metadata( + self, + response: conversational_search_service.ListConversationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ListConversationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_conversations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_list_conversations_with_metadata` + interceptor in new development instead of the `post_list_conversations` interceptor. + When both interceptors are used, this `post_list_conversations_with_metadata` interceptor runs after the + `post_list_conversations` interceptor. The (possibly modified) response returned by + `post_list_conversations` will be passed to + `post_list_conversations_with_metadata`. + """ + return response, metadata + def pre_list_sessions( self, request: conversational_search_service.ListSessionsRequest, @@ -430,12 +621,38 @@ def post_list_sessions( ) -> conversational_search_service.ListSessionsResponse: """Post-rpc interceptor for list_sessions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sessions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_list_sessions` interceptor runs + before the `post_list_sessions_with_metadata` interceptor. """ return response + def post_list_sessions_with_metadata( + self, + response: conversational_search_service.ListSessionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + conversational_search_service.ListSessionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sessions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_list_sessions_with_metadata` + interceptor in new development instead of the `post_list_sessions` interceptor. + When both interceptors are used, this `post_list_sessions_with_metadata` interceptor runs after the + `post_list_sessions` interceptor. The (possibly modified) response returned by + `post_list_sessions` will be passed to + `post_list_sessions_with_metadata`. + """ + return response, metadata + def pre_update_conversation( self, request: conversational_search_service.UpdateConversationRequest, @@ -456,12 +673,35 @@ def post_update_conversation( ) -> gcd_conversation.Conversation: """Post-rpc interceptor for update_conversation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_conversation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_update_conversation` interceptor runs + before the `post_update_conversation_with_metadata` interceptor. """ return response + def post_update_conversation_with_metadata( + self, + response: gcd_conversation.Conversation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_conversation.Conversation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_conversation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_update_conversation_with_metadata` + interceptor in new development instead of the `post_update_conversation` interceptor. + When both interceptors are used, this `post_update_conversation_with_metadata` interceptor runs after the + `post_update_conversation` interceptor. The (possibly modified) response returned by + `post_update_conversation` will be passed to + `post_update_conversation_with_metadata`. + """ + return response, metadata + def pre_update_session( self, request: conversational_search_service.UpdateSessionRequest, @@ -480,12 +720,35 @@ def pre_update_session( def post_update_session(self, response: gcd_session.Session) -> gcd_session.Session: """Post-rpc interceptor for update_session - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_session_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ConversationalSearchService server but before - it is returned to user code. + it is returned to user code. This `post_update_session` interceptor runs + before the `post_update_session_with_metadata` interceptor. """ return response + def post_update_session_with_metadata( + self, + response: gcd_session.Session, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_session.Session, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_session + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ConversationalSearchService server but before it is returned to user code. + + We recommend only using this `post_update_session_with_metadata` + interceptor in new development instead of the `post_update_session` interceptor. + When both interceptors are used, this `post_update_session_with_metadata` interceptor runs after the + `post_update_session` interceptor. The (possibly modified) response returned by + `post_update_session` will be passed to + `post_update_session_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -779,6 +1042,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_answer_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_answer_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -939,6 +1206,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_converse_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_converse_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1093,6 +1364,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1242,6 +1517,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1604,6 +1883,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_answer(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_answer_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1751,6 +2034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1894,6 +2181,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2039,6 +2330,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_conversations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_conversations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2186,6 +2481,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sessions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sessions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2342,6 +2641,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_conversation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_conversation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2491,6 +2794,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_session(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_session_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py index daeae1ace236..c601957251e5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -572,6 +574,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1521,16 +1550,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1576,16 +1609,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py index 0e35259551ed..1eaa80b45d62 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/data_store_service/transports/rest.py @@ -138,12 +138,35 @@ def post_create_data_store( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_create_data_store` interceptor runs + before the `post_create_data_store_with_metadata` interceptor. """ return response + def post_create_data_store_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_create_data_store_with_metadata` + interceptor in new development instead of the `post_create_data_store` interceptor. + When both interceptors are used, this `post_create_data_store_with_metadata` interceptor runs after the + `post_create_data_store` interceptor. The (possibly modified) response returned by + `post_create_data_store` will be passed to + `post_create_data_store_with_metadata`. + """ + return response, metadata + def pre_delete_data_store( self, request: data_store_service.DeleteDataStoreRequest, @@ -164,12 +187,35 @@ def post_delete_data_store( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_delete_data_store` interceptor runs + before the `post_delete_data_store_with_metadata` interceptor. """ return response + def post_delete_data_store_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_delete_data_store_with_metadata` + interceptor in new development instead of the `post_delete_data_store` interceptor. + When both interceptors are used, this `post_delete_data_store_with_metadata` interceptor runs after the + `post_delete_data_store` interceptor. The (possibly modified) response returned by + `post_delete_data_store` will be passed to + `post_delete_data_store_with_metadata`. + """ + return response, metadata + def pre_get_data_store( self, request: data_store_service.GetDataStoreRequest, @@ -189,12 +235,35 @@ def post_get_data_store( ) -> data_store.DataStore: """Post-rpc interceptor for get_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_get_data_store` interceptor runs + before the `post_get_data_store_with_metadata` interceptor. """ return response + def post_get_data_store_with_metadata( + self, + response: data_store.DataStore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_store.DataStore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_get_data_store_with_metadata` + interceptor in new development instead of the `post_get_data_store` interceptor. + When both interceptors are used, this `post_get_data_store_with_metadata` interceptor runs after the + `post_get_data_store` interceptor. The (possibly modified) response returned by + `post_get_data_store` will be passed to + `post_get_data_store_with_metadata`. + """ + return response, metadata + def pre_list_data_stores( self, request: data_store_service.ListDataStoresRequest, @@ -215,12 +284,38 @@ def post_list_data_stores( ) -> data_store_service.ListDataStoresResponse: """Post-rpc interceptor for list_data_stores - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_data_stores_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_list_data_stores` interceptor runs + before the `post_list_data_stores_with_metadata` interceptor. """ return response + def post_list_data_stores_with_metadata( + self, + response: data_store_service.ListDataStoresResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_store_service.ListDataStoresResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_data_stores + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_list_data_stores_with_metadata` + interceptor in new development instead of the `post_list_data_stores` interceptor. + When both interceptors are used, this `post_list_data_stores_with_metadata` interceptor runs after the + `post_list_data_stores` interceptor. The (possibly modified) response returned by + `post_list_data_stores` will be passed to + `post_list_data_stores_with_metadata`. + """ + return response, metadata + def pre_update_data_store( self, request: data_store_service.UpdateDataStoreRequest, @@ -241,12 +336,35 @@ def post_update_data_store( ) -> gcd_data_store.DataStore: """Post-rpc interceptor for update_data_store - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_data_store_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DataStoreService server but before - it is returned to user code. + it is returned to user code. This `post_update_data_store` interceptor runs + before the `post_update_data_store_with_metadata` interceptor. """ return response + def post_update_data_store_with_metadata( + self, + response: gcd_data_store.DataStore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_data_store.DataStore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_store + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataStoreService server but before it is returned to user code. + + We recommend only using this `post_update_data_store_with_metadata` + interceptor in new development instead of the `post_update_data_store` interceptor. + When both interceptors are used, this `post_update_data_store_with_metadata` interceptor runs after the + `post_update_data_store` interceptor. The (possibly modified) response returned by + `post_update_data_store` will be passed to + `post_update_data_store_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -703,6 +821,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -849,6 +971,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -993,6 +1119,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1140,6 +1270,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_data_stores(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_stores_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1295,6 +1429,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_data_store(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_store_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py index 78d2dbb375d7..e3c7d1609741 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -581,6 +583,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1817,16 +1846,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1872,16 +1905,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py index 3818376f82dd..96556c75b02a 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/document_service/transports/rest.py @@ -163,12 +163,38 @@ def post_batch_get_documents_metadata( ) -> document_service.BatchGetDocumentsMetadataResponse: """Post-rpc interceptor for batch_get_documents_metadata - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_documents_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_documents_metadata` interceptor runs + before the `post_batch_get_documents_metadata_with_metadata` interceptor. """ return response + def post_batch_get_documents_metadata_with_metadata( + self, + response: document_service.BatchGetDocumentsMetadataResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.BatchGetDocumentsMetadataResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_get_documents_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_batch_get_documents_metadata_with_metadata` + interceptor in new development instead of the `post_batch_get_documents_metadata` interceptor. + When both interceptors are used, this `post_batch_get_documents_metadata_with_metadata` interceptor runs after the + `post_batch_get_documents_metadata` interceptor. The (possibly modified) response returned by + `post_batch_get_documents_metadata` will be passed to + `post_batch_get_documents_metadata_with_metadata`. + """ + return response, metadata + def pre_create_document( self, request: document_service.CreateDocumentRequest, @@ -188,12 +214,35 @@ def post_create_document( ) -> gcd_document.Document: """Post-rpc interceptor for create_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_create_document` interceptor runs + before the `post_create_document_with_metadata` interceptor. """ return response + def post_create_document_with_metadata( + self, + response: gcd_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_create_document_with_metadata` + interceptor in new development instead of the `post_create_document` interceptor. + When both interceptors are used, this `post_create_document_with_metadata` interceptor runs after the + `post_create_document` interceptor. The (possibly modified) response returned by + `post_create_document` will be passed to + `post_create_document_with_metadata`. + """ + return response, metadata + def pre_delete_document( self, request: document_service.DeleteDocumentRequest, @@ -225,12 +274,35 @@ def pre_get_document( def post_get_document(self, response: document.Document) -> document.Document: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: import_config.ImportDocumentsRequest, @@ -250,12 +322,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: document_service.ListDocumentsRequest, @@ -275,12 +370,37 @@ def post_list_documents( ) -> document_service.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: document_service.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_purge_documents( self, request: purge_config.PurgeDocumentsRequest, @@ -300,12 +420,35 @@ def post_purge_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_purge_documents` interceptor runs + before the `post_purge_documents_with_metadata` interceptor. """ return response + def post_purge_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_purge_documents_with_metadata` + interceptor in new development instead of the `post_purge_documents` interceptor. + When both interceptors are used, this `post_purge_documents_with_metadata` interceptor runs after the + `post_purge_documents` interceptor. The (possibly modified) response returned by + `post_purge_documents` will be passed to + `post_purge_documents_with_metadata`. + """ + return response, metadata + def pre_update_document( self, request: document_service.UpdateDocumentRequest, @@ -325,12 +468,35 @@ def post_update_document( ) -> gcd_document.Document: """Post-rpc interceptor for update_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_update_document` interceptor runs + before the `post_update_document_with_metadata` interceptor. """ return response + def post_update_document_with_metadata( + self, + response: gcd_document.Document, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_document.Document, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_update_document_with_metadata` + interceptor in new development instead of the `post_update_document` interceptor. + When both interceptors are used, this `post_update_document_with_metadata` interceptor runs after the + `post_update_document` interceptor. The (possibly modified) response returned by + `post_update_document` will be passed to + `post_update_document_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -786,6 +952,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_documents_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_documents_metadata_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -941,6 +1111,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1194,6 +1368,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1343,6 +1521,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1488,6 +1670,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1639,6 +1825,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1790,6 +1980,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py index bcd8087a94c0..349add7a2ffd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -521,6 +523,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1778,16 +1807,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1833,16 +1866,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py index 4457f32edcc4..8d83098395d9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/engine_service/transports/rest.py @@ -161,12 +161,35 @@ def post_create_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_engine` interceptor runs + before the `post_create_engine_with_metadata` interceptor. """ return response + def post_create_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_create_engine_with_metadata` + interceptor in new development instead of the `post_create_engine` interceptor. + When both interceptors are used, this `post_create_engine_with_metadata` interceptor runs after the + `post_create_engine` interceptor. The (possibly modified) response returned by + `post_create_engine` will be passed to + `post_create_engine_with_metadata`. + """ + return response, metadata + def pre_delete_engine( self, request: engine_service.DeleteEngineRequest, @@ -186,12 +209,35 @@ def post_delete_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_engine` interceptor runs + before the `post_delete_engine_with_metadata` interceptor. """ return response + def post_delete_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_delete_engine_with_metadata` + interceptor in new development instead of the `post_delete_engine` interceptor. + When both interceptors are used, this `post_delete_engine_with_metadata` interceptor runs after the + `post_delete_engine` interceptor. The (possibly modified) response returned by + `post_delete_engine` will be passed to + `post_delete_engine_with_metadata`. + """ + return response, metadata + def pre_get_engine( self, request: engine_service.GetEngineRequest, @@ -209,12 +255,33 @@ def pre_get_engine( def post_get_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for get_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_engine` interceptor runs + before the `post_get_engine_with_metadata` interceptor. """ return response + def post_get_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_get_engine_with_metadata` + interceptor in new development instead of the `post_get_engine` interceptor. + When both interceptors are used, this `post_get_engine_with_metadata` interceptor runs after the + `post_get_engine` interceptor. The (possibly modified) response returned by + `post_get_engine` will be passed to + `post_get_engine_with_metadata`. + """ + return response, metadata + def pre_list_engines( self, request: engine_service.ListEnginesRequest, @@ -234,12 +301,37 @@ def post_list_engines( ) -> engine_service.ListEnginesResponse: """Post-rpc interceptor for list_engines - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_engines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_list_engines` interceptor runs + before the `post_list_engines_with_metadata` interceptor. """ return response + def post_list_engines_with_metadata( + self, + response: engine_service.ListEnginesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + engine_service.ListEnginesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_engines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_list_engines_with_metadata` + interceptor in new development instead of the `post_list_engines` interceptor. + When both interceptors are used, this `post_list_engines_with_metadata` interceptor runs after the + `post_list_engines` interceptor. The (possibly modified) response returned by + `post_list_engines` will be passed to + `post_list_engines_with_metadata`. + """ + return response, metadata + def pre_pause_engine( self, request: engine_service.PauseEngineRequest, @@ -257,12 +349,33 @@ def pre_pause_engine( def post_pause_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for pause_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_pause_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_pause_engine` interceptor runs + before the `post_pause_engine_with_metadata` interceptor. """ return response + def post_pause_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for pause_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_pause_engine_with_metadata` + interceptor in new development instead of the `post_pause_engine` interceptor. + When both interceptors are used, this `post_pause_engine_with_metadata` interceptor runs after the + `post_pause_engine` interceptor. The (possibly modified) response returned by + `post_pause_engine` will be passed to + `post_pause_engine_with_metadata`. + """ + return response, metadata + def pre_resume_engine( self, request: engine_service.ResumeEngineRequest, @@ -280,12 +393,33 @@ def pre_resume_engine( def post_resume_engine(self, response: engine.Engine) -> engine.Engine: """Post-rpc interceptor for resume_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_resume_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_resume_engine` interceptor runs + before the `post_resume_engine_with_metadata` interceptor. """ return response + def post_resume_engine_with_metadata( + self, response: engine.Engine, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for resume_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_resume_engine_with_metadata` + interceptor in new development instead of the `post_resume_engine` interceptor. + When both interceptors are used, this `post_resume_engine_with_metadata` interceptor runs after the + `post_resume_engine` interceptor. The (possibly modified) response returned by + `post_resume_engine` will be passed to + `post_resume_engine_with_metadata`. + """ + return response, metadata + def pre_tune_engine( self, request: engine_service.TuneEngineRequest, @@ -305,12 +439,35 @@ def post_tune_engine( ) -> operations_pb2.Operation: """Post-rpc interceptor for tune_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_tune_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_tune_engine` interceptor runs + before the `post_tune_engine_with_metadata` interceptor. """ return response + def post_tune_engine_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for tune_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_tune_engine_with_metadata` + interceptor in new development instead of the `post_tune_engine` interceptor. + When both interceptors are used, this `post_tune_engine_with_metadata` interceptor runs after the + `post_tune_engine` interceptor. The (possibly modified) response returned by + `post_tune_engine` will be passed to + `post_tune_engine_with_metadata`. + """ + return response, metadata + def pre_update_engine( self, request: engine_service.UpdateEngineRequest, @@ -328,12 +485,35 @@ def pre_update_engine( def post_update_engine(self, response: gcd_engine.Engine) -> gcd_engine.Engine: """Post-rpc interceptor for update_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EngineService server but before - it is returned to user code. + it is returned to user code. This `post_update_engine` interceptor runs + before the `post_update_engine_with_metadata` interceptor. """ return response + def post_update_engine_with_metadata( + self, + response: gcd_engine.Engine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_engine.Engine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EngineService server but before it is returned to user code. + + We recommend only using this `post_update_engine_with_metadata` + interceptor in new development instead of the `post_update_engine` interceptor. + When both interceptors are used, this `post_update_engine_with_metadata` interceptor runs after the + `post_update_engine` interceptor. The (possibly modified) response returned by + `post_update_engine` will be passed to + `post_update_engine_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -786,6 +966,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -929,6 +1113,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1078,6 +1266,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1225,6 +1417,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_engines(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_engines_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1381,6 +1577,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_pause_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_pause_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1531,6 +1731,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_resume_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_resume_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1687,6 +1891,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_tune_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_tune_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1838,6 +2046,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py index 023fd8fc2c42..6c05e376e4a9 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -654,6 +656,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1438,16 +1467,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1493,16 +1526,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/transports/rest.py index df57fcb5b490..eb2bd54e24c8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/evaluation_service/transports/rest.py @@ -128,12 +128,35 @@ def post_create_evaluation( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_create_evaluation` interceptor runs + before the `post_create_evaluation_with_metadata` interceptor. """ return response + def post_create_evaluation_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_create_evaluation_with_metadata` + interceptor in new development instead of the `post_create_evaluation` interceptor. + When both interceptors are used, this `post_create_evaluation_with_metadata` interceptor runs after the + `post_create_evaluation` interceptor. The (possibly modified) response returned by + `post_create_evaluation` will be passed to + `post_create_evaluation_with_metadata`. + """ + return response, metadata + def pre_get_evaluation( self, request: evaluation_service.GetEvaluationRequest, @@ -153,12 +176,35 @@ def post_get_evaluation( ) -> evaluation.Evaluation: """Post-rpc interceptor for get_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_get_evaluation` interceptor runs + before the `post_get_evaluation_with_metadata` interceptor. """ return response + def post_get_evaluation_with_metadata( + self, + response: evaluation.Evaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[evaluation.Evaluation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_get_evaluation_with_metadata` + interceptor in new development instead of the `post_get_evaluation` interceptor. + When both interceptors are used, this `post_get_evaluation_with_metadata` interceptor runs after the + `post_get_evaluation` interceptor. The (possibly modified) response returned by + `post_get_evaluation` will be passed to + `post_get_evaluation_with_metadata`. + """ + return response, metadata + def pre_list_evaluation_results( self, request: evaluation_service.ListEvaluationResultsRequest, @@ -179,12 +225,38 @@ def post_list_evaluation_results( ) -> evaluation_service.ListEvaluationResultsResponse: """Post-rpc interceptor for list_evaluation_results - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_evaluation_results_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_list_evaluation_results` interceptor runs + before the `post_list_evaluation_results_with_metadata` interceptor. """ return response + def post_list_evaluation_results_with_metadata( + self, + response: evaluation_service.ListEvaluationResultsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + evaluation_service.ListEvaluationResultsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_evaluation_results + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_list_evaluation_results_with_metadata` + interceptor in new development instead of the `post_list_evaluation_results` interceptor. + When both interceptors are used, this `post_list_evaluation_results_with_metadata` interceptor runs after the + `post_list_evaluation_results` interceptor. The (possibly modified) response returned by + `post_list_evaluation_results` will be passed to + `post_list_evaluation_results_with_metadata`. + """ + return response, metadata + def pre_list_evaluations( self, request: evaluation_service.ListEvaluationsRequest, @@ -205,12 +277,38 @@ def post_list_evaluations( ) -> evaluation_service.ListEvaluationsResponse: """Post-rpc interceptor for list_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EvaluationService server but before - it is returned to user code. + it is returned to user code. This `post_list_evaluations` interceptor runs + before the `post_list_evaluations_with_metadata` interceptor. """ return response + def post_list_evaluations_with_metadata( + self, + response: evaluation_service.ListEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + evaluation_service.ListEvaluationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_list_evaluations_with_metadata` + interceptor in new development instead of the `post_list_evaluations` interceptor. + When both interceptors are used, this `post_list_evaluations_with_metadata` interceptor runs after the + `post_list_evaluations` interceptor. The (possibly modified) response returned by + `post_list_evaluations` will be passed to + `post_list_evaluations_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -666,6 +764,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -813,6 +915,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -963,6 +1069,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_evaluation_results(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_evaluation_results_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1115,6 +1225,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py index 35f8e2c35eb6..cfc79955f1cb 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -534,6 +536,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1064,16 +1093,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1119,16 +1152,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py index c3d7ede2fd68..859a8cbc4f7e 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/grounded_generation_service/transports/rest.py @@ -112,12 +112,38 @@ def post_check_grounding( ) -> grounded_generation_service.CheckGroundingResponse: """Post-rpc interceptor for check_grounding - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_check_grounding_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GroundedGenerationService server but before - it is returned to user code. + it is returned to user code. This `post_check_grounding` interceptor runs + before the `post_check_grounding_with_metadata` interceptor. """ return response + def post_check_grounding_with_metadata( + self, + response: grounded_generation_service.CheckGroundingResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + grounded_generation_service.CheckGroundingResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for check_grounding + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GroundedGenerationService server but before it is returned to user code. + + We recommend only using this `post_check_grounding_with_metadata` + interceptor in new development instead of the `post_check_grounding` interceptor. + When both interceptors are used, this `post_check_grounding_with_metadata` interceptor runs after the + `post_check_grounding` interceptor. The (possibly modified) response returned by + `post_check_grounding` will be passed to + `post_check_grounding_with_metadata`. + """ + return response, metadata + def pre_generate_grounded_content( self, request: grounded_generation_service.GenerateGroundedContentRequest, @@ -138,12 +164,38 @@ def post_generate_grounded_content( ) -> grounded_generation_service.GenerateGroundedContentResponse: """Post-rpc interceptor for generate_grounded_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_grounded_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GroundedGenerationService server but before - it is returned to user code. + it is returned to user code. This `post_generate_grounded_content` interceptor runs + before the `post_generate_grounded_content_with_metadata` interceptor. """ return response + def post_generate_grounded_content_with_metadata( + self, + response: grounded_generation_service.GenerateGroundedContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + grounded_generation_service.GenerateGroundedContentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_grounded_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GroundedGenerationService server but before it is returned to user code. + + We recommend only using this `post_generate_grounded_content_with_metadata` + interceptor in new development instead of the `post_generate_grounded_content` interceptor. + When both interceptors are used, this `post_generate_grounded_content_with_metadata` interceptor runs after the + `post_generate_grounded_content` interceptor. The (possibly modified) response returned by + `post_generate_grounded_content` will be passed to + `post_generate_grounded_content_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -437,6 +489,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_check_grounding(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_check_grounding_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -595,6 +651,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_grounded_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_grounded_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py index 369b68f41e0f..8ec1200ae477 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -483,6 +485,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -867,16 +896,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -922,16 +955,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py index c8a8b9231e9f..34d79de7c32f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/project_service/transports/rest.py @@ -103,12 +103,35 @@ def post_provision_project( ) -> operations_pb2.Operation: """Post-rpc interceptor for provision_project - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_provision_project_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ProjectService server but before - it is returned to user code. + it is returned to user code. This `post_provision_project` interceptor runs + before the `post_provision_project_with_metadata` interceptor. """ return response + def post_provision_project_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for provision_project + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ProjectService server but before it is returned to user code. + + We recommend only using this `post_provision_project_with_metadata` + interceptor in new development instead of the `post_provision_project` interceptor. + When both interceptors are used, this `post_provision_project_with_metadata` interceptor runs after the + `post_provision_project` interceptor. The (possibly modified) response returned by + `post_provision_project` will be passed to + `post_provision_project_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -563,6 +586,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_provision_project(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_provision_project_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py index 472456d31cda..c768562a8466 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -484,6 +486,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -823,16 +852,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -878,16 +911,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py index fc5c1b60ce30..5bc33308cca5 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/rank_service/transports/rest.py @@ -101,12 +101,35 @@ def post_rank( ) -> rank_service.RankResponse: """Post-rpc interceptor for rank - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rank_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RankService server but before - it is returned to user code. + it is returned to user code. This `post_rank` interceptor runs + before the `post_rank_with_metadata` interceptor. """ return response + def post_rank_with_metadata( + self, + response: rank_service.RankResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[rank_service.RankResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rank + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RankService server but before it is returned to user code. + + We recommend only using this `post_rank_with_metadata` + interceptor in new development instead of the `post_rank` interceptor. + When both interceptors are used, this `post_rank_with_metadata` interceptor runs after the + `post_rank` interceptor. The (possibly modified) response returned by + `post_rank` will be passed to + `post_rank_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -395,6 +418,8 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_rank(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rank_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py index 93efc2d0bb0d..3f84e6471637 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -562,6 +564,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -911,16 +940,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -966,16 +999,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py index ff298830189b..a192875982a0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/recommendation_service/transports/rest.py @@ -103,12 +103,38 @@ def post_recommend( ) -> recommendation_service.RecommendResponse: """Post-rpc interceptor for recommend - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recommend_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the RecommendationService server but before - it is returned to user code. + it is returned to user code. This `post_recommend` interceptor runs + before the `post_recommend_with_metadata` interceptor. """ return response + def post_recommend_with_metadata( + self, + response: recommendation_service.RecommendResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + recommendation_service.RecommendResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for recommend + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RecommendationService server but before it is returned to user code. + + We recommend only using this `post_recommend_with_metadata` + interceptor in new development instead of the `post_recommend` interceptor. + When both interceptors are used, this `post_recommend_with_metadata` interceptor runs after the + `post_recommend` interceptor. The (possibly modified) response returned by + `post_recommend` will be passed to + `post_recommend_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -395,6 +421,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_recommend(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recommend_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py index aa76fdfc8f2c..4b33b0a4d68d 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -520,6 +522,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1540,16 +1569,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1595,16 +1628,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/transports/rest.py index 65604dea9693..98d752825c34 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_service/transports/rest.py @@ -144,12 +144,35 @@ def post_create_sample_query( ) -> gcd_sample_query.SampleQuery: """Post-rpc interceptor for create_sample_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sample_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_create_sample_query` interceptor runs + before the `post_create_sample_query_with_metadata` interceptor. """ return response + def post_create_sample_query_with_metadata( + self, + response: gcd_sample_query.SampleQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_sample_query.SampleQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_sample_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_create_sample_query_with_metadata` + interceptor in new development instead of the `post_create_sample_query` interceptor. + When both interceptors are used, this `post_create_sample_query_with_metadata` interceptor runs after the + `post_create_sample_query` interceptor. The (possibly modified) response returned by + `post_create_sample_query` will be passed to + `post_create_sample_query_with_metadata`. + """ + return response, metadata + def pre_delete_sample_query( self, request: sample_query_service.DeleteSampleQueryRequest, @@ -185,12 +208,35 @@ def post_get_sample_query( ) -> sample_query.SampleQuery: """Post-rpc interceptor for get_sample_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sample_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_get_sample_query` interceptor runs + before the `post_get_sample_query_with_metadata` interceptor. """ return response + def post_get_sample_query_with_metadata( + self, + response: sample_query.SampleQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[sample_query.SampleQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_sample_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_get_sample_query_with_metadata` + interceptor in new development instead of the `post_get_sample_query` interceptor. + When both interceptors are used, this `post_get_sample_query_with_metadata` interceptor runs after the + `post_get_sample_query` interceptor. The (possibly modified) response returned by + `post_get_sample_query` will be passed to + `post_get_sample_query_with_metadata`. + """ + return response, metadata + def pre_import_sample_queries( self, request: import_config.ImportSampleQueriesRequest, @@ -211,12 +257,35 @@ def post_import_sample_queries( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_sample_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_sample_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_import_sample_queries` interceptor runs + before the `post_import_sample_queries_with_metadata` interceptor. """ return response + def post_import_sample_queries_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_sample_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_import_sample_queries_with_metadata` + interceptor in new development instead of the `post_import_sample_queries` interceptor. + When both interceptors are used, this `post_import_sample_queries_with_metadata` interceptor runs after the + `post_import_sample_queries` interceptor. The (possibly modified) response returned by + `post_import_sample_queries` will be passed to + `post_import_sample_queries_with_metadata`. + """ + return response, metadata + def pre_list_sample_queries( self, request: sample_query_service.ListSampleQueriesRequest, @@ -237,12 +306,38 @@ def post_list_sample_queries( ) -> sample_query_service.ListSampleQueriesResponse: """Post-rpc interceptor for list_sample_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sample_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_list_sample_queries` interceptor runs + before the `post_list_sample_queries_with_metadata` interceptor. """ return response + def post_list_sample_queries_with_metadata( + self, + response: sample_query_service.ListSampleQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sample_query_service.ListSampleQueriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sample_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_list_sample_queries_with_metadata` + interceptor in new development instead of the `post_list_sample_queries` interceptor. + When both interceptors are used, this `post_list_sample_queries_with_metadata` interceptor runs after the + `post_list_sample_queries` interceptor. The (possibly modified) response returned by + `post_list_sample_queries` will be passed to + `post_list_sample_queries_with_metadata`. + """ + return response, metadata + def pre_update_sample_query( self, request: sample_query_service.UpdateSampleQueryRequest, @@ -263,12 +358,35 @@ def post_update_sample_query( ) -> gcd_sample_query.SampleQuery: """Post-rpc interceptor for update_sample_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_sample_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQueryService server but before - it is returned to user code. + it is returned to user code. This `post_update_sample_query` interceptor runs + before the `post_update_sample_query_with_metadata` interceptor. """ return response + def post_update_sample_query_with_metadata( + self, + response: gcd_sample_query.SampleQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_sample_query.SampleQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_sample_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQueryService server but before it is returned to user code. + + We recommend only using this `post_update_sample_query_with_metadata` + interceptor in new development instead of the `post_update_sample_query` interceptor. + When both interceptors are used, this `post_update_sample_query_with_metadata` interceptor runs after the + `post_update_sample_query` interceptor. The (possibly modified) response returned by + `post_update_sample_query` will be passed to + `post_update_sample_query_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -725,6 +843,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_sample_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sample_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -983,6 +1105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_sample_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sample_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1137,6 +1263,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_sample_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_sample_queries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1285,6 +1415,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sample_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sample_queries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1440,6 +1574,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_sample_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_sample_query_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py index 79d1433af75a..1ebc5a3350dd 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -514,6 +516,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1432,16 +1461,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1487,16 +1520,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/transports/rest.py index 71d6ec3172ab..045bebe076a8 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/sample_query_set_service/transports/rest.py @@ -137,12 +137,37 @@ def post_create_sample_query_set( ) -> gcd_sample_query_set.SampleQuerySet: """Post-rpc interceptor for create_sample_query_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sample_query_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_create_sample_query_set` interceptor runs + before the `post_create_sample_query_set_with_metadata` interceptor. """ return response + def post_create_sample_query_set_with_metadata( + self, + response: gcd_sample_query_set.SampleQuerySet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_sample_query_set.SampleQuerySet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_sample_query_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_create_sample_query_set_with_metadata` + interceptor in new development instead of the `post_create_sample_query_set` interceptor. + When both interceptors are used, this `post_create_sample_query_set_with_metadata` interceptor runs after the + `post_create_sample_query_set` interceptor. The (possibly modified) response returned by + `post_create_sample_query_set` will be passed to + `post_create_sample_query_set_with_metadata`. + """ + return response, metadata + def pre_delete_sample_query_set( self, request: sample_query_set_service.DeleteSampleQuerySetRequest, @@ -178,12 +203,37 @@ def post_get_sample_query_set( ) -> sample_query_set.SampleQuerySet: """Post-rpc interceptor for get_sample_query_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sample_query_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_get_sample_query_set` interceptor runs + before the `post_get_sample_query_set_with_metadata` interceptor. """ return response + def post_get_sample_query_set_with_metadata( + self, + response: sample_query_set.SampleQuerySet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sample_query_set.SampleQuerySet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_sample_query_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_get_sample_query_set_with_metadata` + interceptor in new development instead of the `post_get_sample_query_set` interceptor. + When both interceptors are used, this `post_get_sample_query_set_with_metadata` interceptor runs after the + `post_get_sample_query_set` interceptor. The (possibly modified) response returned by + `post_get_sample_query_set` will be passed to + `post_get_sample_query_set_with_metadata`. + """ + return response, metadata + def pre_list_sample_query_sets( self, request: sample_query_set_service.ListSampleQuerySetsRequest, @@ -204,12 +254,38 @@ def post_list_sample_query_sets( ) -> sample_query_set_service.ListSampleQuerySetsResponse: """Post-rpc interceptor for list_sample_query_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sample_query_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_list_sample_query_sets` interceptor runs + before the `post_list_sample_query_sets_with_metadata` interceptor. """ return response + def post_list_sample_query_sets_with_metadata( + self, + response: sample_query_set_service.ListSampleQuerySetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + sample_query_set_service.ListSampleQuerySetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_sample_query_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_list_sample_query_sets_with_metadata` + interceptor in new development instead of the `post_list_sample_query_sets` interceptor. + When both interceptors are used, this `post_list_sample_query_sets_with_metadata` interceptor runs after the + `post_list_sample_query_sets` interceptor. The (possibly modified) response returned by + `post_list_sample_query_sets` will be passed to + `post_list_sample_query_sets_with_metadata`. + """ + return response, metadata + def pre_update_sample_query_set( self, request: sample_query_set_service.UpdateSampleQuerySetRequest, @@ -230,12 +306,37 @@ def post_update_sample_query_set( ) -> gcd_sample_query_set.SampleQuerySet: """Post-rpc interceptor for update_sample_query_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_sample_query_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SampleQuerySetService server but before - it is returned to user code. + it is returned to user code. This `post_update_sample_query_set` interceptor runs + before the `post_update_sample_query_set_with_metadata` interceptor. """ return response + def post_update_sample_query_set_with_metadata( + self, + response: gcd_sample_query_set.SampleQuerySet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_sample_query_set.SampleQuerySet, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_sample_query_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SampleQuerySetService server but before it is returned to user code. + + We recommend only using this `post_update_sample_query_set_with_metadata` + interceptor in new development instead of the `post_update_sample_query_set` interceptor. + When both interceptors are used, this `post_update_sample_query_set_with_metadata` interceptor runs after the + `post_update_sample_query_set` interceptor. The (possibly modified) response returned by + `post_update_sample_query_set` will be passed to + `post_update_sample_query_set_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -531,6 +632,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_sample_query_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sample_query_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -797,6 +902,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_sample_query_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sample_query_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -947,6 +1056,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sample_query_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sample_query_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1108,6 +1221,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_sample_query_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_sample_query_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py index d470edadbe9d..b3aaa38ccfb0 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -517,6 +519,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1383,16 +1412,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1438,16 +1471,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py index 367ac4f6da22..e23b28f44a51 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/schema_service/transports/rest.py @@ -135,12 +135,35 @@ def post_create_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_create_schema` interceptor runs + before the `post_create_schema_with_metadata` interceptor. """ return response + def post_create_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_create_schema_with_metadata` + interceptor in new development instead of the `post_create_schema` interceptor. + When both interceptors are used, this `post_create_schema_with_metadata` interceptor runs after the + `post_create_schema` interceptor. The (possibly modified) response returned by + `post_create_schema` will be passed to + `post_create_schema_with_metadata`. + """ + return response, metadata + def pre_delete_schema( self, request: schema_service.DeleteSchemaRequest, @@ -160,12 +183,35 @@ def post_delete_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_delete_schema` interceptor runs + before the `post_delete_schema_with_metadata` interceptor. """ return response + def post_delete_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_delete_schema_with_metadata` + interceptor in new development instead of the `post_delete_schema` interceptor. + When both interceptors are used, this `post_delete_schema_with_metadata` interceptor runs after the + `post_delete_schema` interceptor. The (possibly modified) response returned by + `post_delete_schema` will be passed to + `post_delete_schema_with_metadata`. + """ + return response, metadata + def pre_get_schema( self, request: schema_service.GetSchemaRequest, @@ -183,12 +229,33 @@ def pre_get_schema( def post_get_schema(self, response: schema.Schema) -> schema.Schema: """Post-rpc interceptor for get_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_get_schema` interceptor runs + before the `post_get_schema_with_metadata` interceptor. """ return response + def post_get_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_get_schema_with_metadata` + interceptor in new development instead of the `post_get_schema` interceptor. + When both interceptors are used, this `post_get_schema_with_metadata` interceptor runs after the + `post_get_schema` interceptor. The (possibly modified) response returned by + `post_get_schema` will be passed to + `post_get_schema_with_metadata`. + """ + return response, metadata + def pre_list_schemas( self, request: schema_service.ListSchemasRequest, @@ -208,12 +275,37 @@ def post_list_schemas( ) -> schema_service.ListSchemasResponse: """Post-rpc interceptor for list_schemas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_list_schemas` interceptor runs + before the `post_list_schemas_with_metadata` interceptor. """ return response + def post_list_schemas_with_metadata( + self, + response: schema_service.ListSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema_service.ListSchemasResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schemas_with_metadata` + interceptor in new development instead of the `post_list_schemas` interceptor. + When both interceptors are used, this `post_list_schemas_with_metadata` interceptor runs after the + `post_list_schemas` interceptor. The (possibly modified) response returned by + `post_list_schemas` will be passed to + `post_list_schemas_with_metadata`. + """ + return response, metadata + def pre_update_schema( self, request: schema_service.UpdateSchemaRequest, @@ -233,12 +325,35 @@ def post_update_schema( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_update_schema` interceptor runs + before the `post_update_schema_with_metadata` interceptor. """ return response + def post_update_schema_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_update_schema_with_metadata` + interceptor in new development instead of the `post_update_schema` interceptor. + When both interceptors are used, this `post_update_schema_with_metadata` interceptor runs after the + `post_update_schema` interceptor. The (possibly modified) response returned by + `post_update_schema` will be passed to + `post_update_schema_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -691,6 +806,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -834,6 +953,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -982,6 +1105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1129,6 +1256,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_schemas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schemas_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1280,6 +1411,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py index 7fc4c027965c..d92f58cd021b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -611,6 +613,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1084,16 +1113,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1139,16 +1172,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py index 8d4cb105fd0b..8ffbd9f89c3b 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_service/transports/rest.py @@ -109,12 +109,35 @@ def post_search( ) -> search_service.SearchResponse: """Post-rpc interceptor for search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchService server but before - it is returned to user code. + it is returned to user code. This `post_search` interceptor runs + before the `post_search_with_metadata` interceptor. """ return response + def post_search_with_metadata( + self, + response: search_service.SearchResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[search_service.SearchResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchService server but before it is returned to user code. + + We recommend only using this `post_search_with_metadata` + interceptor in new development instead of the `post_search` interceptor. + When both interceptors are used, this `post_search_with_metadata` interceptor runs after the + `post_search` interceptor. The (possibly modified) response returned by + `post_search` will be passed to + `post_search_with_metadata`. + """ + return response, metadata + def pre_search_lite( self, request: search_service.SearchRequest, @@ -132,12 +155,35 @@ def post_search_lite( ) -> search_service.SearchResponse: """Post-rpc interceptor for search_lite - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_lite_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchService server but before - it is returned to user code. + it is returned to user code. This `post_search_lite` interceptor runs + before the `post_search_lite_with_metadata` interceptor. """ return response + def post_search_lite_with_metadata( + self, + response: search_service.SearchResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[search_service.SearchResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_lite + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchService server but before it is returned to user code. + + We recommend only using this `post_search_lite_with_metadata` + interceptor in new development instead of the `post_search_lite` interceptor. + When both interceptors are used, this `post_search_lite_with_metadata` interceptor runs after the + `post_search_lite` interceptor. The (possibly modified) response returned by + `post_search_lite` will be passed to + `post_search_lite_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -428,6 +474,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -585,6 +635,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_lite(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_lite_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py index 6f111fe8fc34..94e200d0f0e7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -515,6 +517,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -968,16 +997,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1023,16 +1056,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py index 63696693afe4..01479945d7a7 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/search_tuning_service/transports/rest.py @@ -112,12 +112,38 @@ def post_list_custom_models( ) -> search_tuning_service.ListCustomModelsResponse: """Post-rpc interceptor for list_custom_models - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_custom_models_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchTuningService server but before - it is returned to user code. + it is returned to user code. This `post_list_custom_models` interceptor runs + before the `post_list_custom_models_with_metadata` interceptor. """ return response + def post_list_custom_models_with_metadata( + self, + response: search_tuning_service.ListCustomModelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + search_tuning_service.ListCustomModelsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_custom_models + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchTuningService server but before it is returned to user code. + + We recommend only using this `post_list_custom_models_with_metadata` + interceptor in new development instead of the `post_list_custom_models` interceptor. + When both interceptors are used, this `post_list_custom_models_with_metadata` interceptor runs after the + `post_list_custom_models` interceptor. The (possibly modified) response returned by + `post_list_custom_models` will be passed to + `post_list_custom_models_with_metadata`. + """ + return response, metadata + def pre_train_custom_model( self, request: search_tuning_service.TrainCustomModelRequest, @@ -138,12 +164,35 @@ def post_train_custom_model( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_custom_model - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_custom_model_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SearchTuningService server but before - it is returned to user code. + it is returned to user code. This `post_train_custom_model` interceptor runs + before the `post_train_custom_model_with_metadata` interceptor. """ return response + def post_train_custom_model_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_custom_model + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SearchTuningService server but before it is returned to user code. + + We recommend only using this `post_train_custom_model_with_metadata` + interceptor in new development instead of the `post_train_custom_model` interceptor. + When both interceptors are used, this `post_train_custom_model_with_metadata` interceptor runs after the + `post_train_custom_model` interceptor. The (possibly modified) response returned by + `post_train_custom_model` will be passed to + `post_train_custom_model_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -594,6 +643,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_custom_models(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_custom_models_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -748,6 +801,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_custom_model(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_custom_model_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py index 320a24c75476..4d9a2e4a7855 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -498,6 +500,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1128,16 +1157,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1183,16 +1216,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py index 61757debef56..4dd5b791d672 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/serving_config_service/transports/rest.py @@ -124,12 +124,35 @@ def post_get_serving_config( ) -> serving_config.ServingConfig: """Post-rpc interceptor for get_serving_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_serving_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServingConfigService server but before - it is returned to user code. + it is returned to user code. This `post_get_serving_config` interceptor runs + before the `post_get_serving_config_with_metadata` interceptor. """ return response + def post_get_serving_config_with_metadata( + self, + response: serving_config.ServingConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[serving_config.ServingConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_serving_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServingConfigService server but before it is returned to user code. + + We recommend only using this `post_get_serving_config_with_metadata` + interceptor in new development instead of the `post_get_serving_config` interceptor. + When both interceptors are used, this `post_get_serving_config_with_metadata` interceptor runs after the + `post_get_serving_config` interceptor. The (possibly modified) response returned by + `post_get_serving_config` will be passed to + `post_get_serving_config_with_metadata`. + """ + return response, metadata + def pre_list_serving_configs( self, request: serving_config_service.ListServingConfigsRequest, @@ -150,12 +173,38 @@ def post_list_serving_configs( ) -> serving_config_service.ListServingConfigsResponse: """Post-rpc interceptor for list_serving_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_serving_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServingConfigService server but before - it is returned to user code. + it is returned to user code. This `post_list_serving_configs` interceptor runs + before the `post_list_serving_configs_with_metadata` interceptor. """ return response + def post_list_serving_configs_with_metadata( + self, + response: serving_config_service.ListServingConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + serving_config_service.ListServingConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_serving_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServingConfigService server but before it is returned to user code. + + We recommend only using this `post_list_serving_configs_with_metadata` + interceptor in new development instead of the `post_list_serving_configs` interceptor. + When both interceptors are used, this `post_list_serving_configs_with_metadata` interceptor runs after the + `post_list_serving_configs` interceptor. The (possibly modified) response returned by + `post_list_serving_configs` will be passed to + `post_list_serving_configs_with_metadata`. + """ + return response, metadata + def pre_update_serving_config( self, request: serving_config_service.UpdateServingConfigRequest, @@ -176,12 +225,37 @@ def post_update_serving_config( ) -> gcd_serving_config.ServingConfig: """Post-rpc interceptor for update_serving_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_serving_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ServingConfigService server but before - it is returned to user code. + it is returned to user code. This `post_update_serving_config` interceptor runs + before the `post_update_serving_config_with_metadata` interceptor. """ return response + def post_update_serving_config_with_metadata( + self, + response: gcd_serving_config.ServingConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcd_serving_config.ServingConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_serving_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ServingConfigService server but before it is returned to user code. + + We recommend only using this `post_update_serving_config_with_metadata` + interceptor in new development instead of the `post_update_serving_config` interceptor. + When both interceptors are used, this `post_update_serving_config_with_metadata` interceptor runs after the + `post_update_serving_config` interceptor. The (possibly modified) response returned by + `post_update_serving_config` will be passed to + `post_update_serving_config_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -471,6 +545,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_serving_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_serving_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -619,6 +697,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_serving_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_serving_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -781,6 +863,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_serving_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_serving_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py index 0f7530f5eaa3..63a884053a38 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -546,6 +548,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2652,16 +2681,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2707,16 +2740,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py index f3e51c0403a7..1a8e6a8cf6f4 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/site_search_engine_service/transports/rest.py @@ -219,12 +219,35 @@ def post_batch_create_target_sites( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_create_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_create_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_batch_create_target_sites` interceptor runs + before the `post_batch_create_target_sites_with_metadata` interceptor. """ return response + def post_batch_create_target_sites_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_create_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_batch_create_target_sites_with_metadata` + interceptor in new development instead of the `post_batch_create_target_sites` interceptor. + When both interceptors are used, this `post_batch_create_target_sites_with_metadata` interceptor runs after the + `post_batch_create_target_sites` interceptor. The (possibly modified) response returned by + `post_batch_create_target_sites` will be passed to + `post_batch_create_target_sites_with_metadata`. + """ + return response, metadata + def pre_batch_verify_target_sites( self, request: site_search_engine_service.BatchVerifyTargetSitesRequest, @@ -245,12 +268,35 @@ def post_batch_verify_target_sites( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_verify_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_verify_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_batch_verify_target_sites` interceptor runs + before the `post_batch_verify_target_sites_with_metadata` interceptor. """ return response + def post_batch_verify_target_sites_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_verify_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_batch_verify_target_sites_with_metadata` + interceptor in new development instead of the `post_batch_verify_target_sites` interceptor. + When both interceptors are used, this `post_batch_verify_target_sites_with_metadata` interceptor runs after the + `post_batch_verify_target_sites` interceptor. The (possibly modified) response returned by + `post_batch_verify_target_sites` will be passed to + `post_batch_verify_target_sites_with_metadata`. + """ + return response, metadata + def pre_create_sitemap( self, request: site_search_engine_service.CreateSitemapRequest, @@ -271,12 +317,35 @@ def post_create_sitemap( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_sitemap - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_sitemap_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_sitemap` interceptor runs + before the `post_create_sitemap_with_metadata` interceptor. """ return response + def post_create_sitemap_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_sitemap + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_create_sitemap_with_metadata` + interceptor in new development instead of the `post_create_sitemap` interceptor. + When both interceptors are used, this `post_create_sitemap_with_metadata` interceptor runs after the + `post_create_sitemap` interceptor. The (possibly modified) response returned by + `post_create_sitemap` will be passed to + `post_create_sitemap_with_metadata`. + """ + return response, metadata + def pre_create_target_site( self, request: site_search_engine_service.CreateTargetSiteRequest, @@ -297,12 +366,35 @@ def post_create_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_create_target_site` interceptor runs + before the `post_create_target_site_with_metadata` interceptor. """ return response + def post_create_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_create_target_site_with_metadata` + interceptor in new development instead of the `post_create_target_site` interceptor. + When both interceptors are used, this `post_create_target_site_with_metadata` interceptor runs after the + `post_create_target_site` interceptor. The (possibly modified) response returned by + `post_create_target_site` will be passed to + `post_create_target_site_with_metadata`. + """ + return response, metadata + def pre_delete_sitemap( self, request: site_search_engine_service.DeleteSitemapRequest, @@ -323,12 +415,35 @@ def post_delete_sitemap( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_sitemap - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_sitemap_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_sitemap` interceptor runs + before the `post_delete_sitemap_with_metadata` interceptor. """ return response + def post_delete_sitemap_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_sitemap + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_delete_sitemap_with_metadata` + interceptor in new development instead of the `post_delete_sitemap` interceptor. + When both interceptors are used, this `post_delete_sitemap_with_metadata` interceptor runs after the + `post_delete_sitemap` interceptor. The (possibly modified) response returned by + `post_delete_sitemap` will be passed to + `post_delete_sitemap_with_metadata`. + """ + return response, metadata + def pre_delete_target_site( self, request: site_search_engine_service.DeleteTargetSiteRequest, @@ -349,12 +464,35 @@ def post_delete_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_delete_target_site` interceptor runs + before the `post_delete_target_site_with_metadata` interceptor. """ return response + def post_delete_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_delete_target_site_with_metadata` + interceptor in new development instead of the `post_delete_target_site` interceptor. + When both interceptors are used, this `post_delete_target_site_with_metadata` interceptor runs after the + `post_delete_target_site` interceptor. The (possibly modified) response returned by + `post_delete_target_site` will be passed to + `post_delete_target_site_with_metadata`. + """ + return response, metadata + def pre_disable_advanced_site_search( self, request: site_search_engine_service.DisableAdvancedSiteSearchRequest, @@ -375,12 +513,35 @@ def post_disable_advanced_site_search( ) -> operations_pb2.Operation: """Post-rpc interceptor for disable_advanced_site_search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_advanced_site_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_disable_advanced_site_search` interceptor runs + before the `post_disable_advanced_site_search_with_metadata` interceptor. """ return response + def post_disable_advanced_site_search_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_advanced_site_search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_disable_advanced_site_search_with_metadata` + interceptor in new development instead of the `post_disable_advanced_site_search` interceptor. + When both interceptors are used, this `post_disable_advanced_site_search_with_metadata` interceptor runs after the + `post_disable_advanced_site_search` interceptor. The (possibly modified) response returned by + `post_disable_advanced_site_search` will be passed to + `post_disable_advanced_site_search_with_metadata`. + """ + return response, metadata + def pre_enable_advanced_site_search( self, request: site_search_engine_service.EnableAdvancedSiteSearchRequest, @@ -401,12 +562,35 @@ def post_enable_advanced_site_search( ) -> operations_pb2.Operation: """Post-rpc interceptor for enable_advanced_site_search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_advanced_site_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_enable_advanced_site_search` interceptor runs + before the `post_enable_advanced_site_search_with_metadata` interceptor. """ return response + def post_enable_advanced_site_search_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_advanced_site_search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_enable_advanced_site_search_with_metadata` + interceptor in new development instead of the `post_enable_advanced_site_search` interceptor. + When both interceptors are used, this `post_enable_advanced_site_search_with_metadata` interceptor runs after the + `post_enable_advanced_site_search` interceptor. The (possibly modified) response returned by + `post_enable_advanced_site_search` will be passed to + `post_enable_advanced_site_search_with_metadata`. + """ + return response, metadata + def pre_fetch_domain_verification_status( self, request: site_search_engine_service.FetchDomainVerificationStatusRequest, @@ -427,12 +611,38 @@ def post_fetch_domain_verification_status( ) -> site_search_engine_service.FetchDomainVerificationStatusResponse: """Post-rpc interceptor for fetch_domain_verification_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_domain_verification_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_domain_verification_status` interceptor runs + before the `post_fetch_domain_verification_status_with_metadata` interceptor. """ return response + def post_fetch_domain_verification_status_with_metadata( + self, + response: site_search_engine_service.FetchDomainVerificationStatusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.FetchDomainVerificationStatusResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_domain_verification_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_fetch_domain_verification_status_with_metadata` + interceptor in new development instead of the `post_fetch_domain_verification_status` interceptor. + When both interceptors are used, this `post_fetch_domain_verification_status_with_metadata` interceptor runs after the + `post_fetch_domain_verification_status` interceptor. The (possibly modified) response returned by + `post_fetch_domain_verification_status` will be passed to + `post_fetch_domain_verification_status_with_metadata`. + """ + return response, metadata + def pre_fetch_sitemaps( self, request: site_search_engine_service.FetchSitemapsRequest, @@ -453,12 +663,38 @@ def post_fetch_sitemaps( ) -> site_search_engine_service.FetchSitemapsResponse: """Post-rpc interceptor for fetch_sitemaps - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_sitemaps_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_sitemaps` interceptor runs + before the `post_fetch_sitemaps_with_metadata` interceptor. """ return response + def post_fetch_sitemaps_with_metadata( + self, + response: site_search_engine_service.FetchSitemapsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.FetchSitemapsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_sitemaps + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_fetch_sitemaps_with_metadata` + interceptor in new development instead of the `post_fetch_sitemaps` interceptor. + When both interceptors are used, this `post_fetch_sitemaps_with_metadata` interceptor runs after the + `post_fetch_sitemaps` interceptor. The (possibly modified) response returned by + `post_fetch_sitemaps` will be passed to + `post_fetch_sitemaps_with_metadata`. + """ + return response, metadata + def pre_get_site_search_engine( self, request: site_search_engine_service.GetSiteSearchEngineRequest, @@ -479,12 +715,37 @@ def post_get_site_search_engine( ) -> site_search_engine.SiteSearchEngine: """Post-rpc interceptor for get_site_search_engine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_site_search_engine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_site_search_engine` interceptor runs + before the `post_get_site_search_engine_with_metadata` interceptor. """ return response + def post_get_site_search_engine_with_metadata( + self, + response: site_search_engine.SiteSearchEngine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine.SiteSearchEngine, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_site_search_engine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_site_search_engine_with_metadata` + interceptor in new development instead of the `post_get_site_search_engine` interceptor. + When both interceptors are used, this `post_get_site_search_engine_with_metadata` interceptor runs after the + `post_get_site_search_engine` interceptor. The (possibly modified) response returned by + `post_get_site_search_engine` will be passed to + `post_get_site_search_engine_with_metadata`. + """ + return response, metadata + def pre_get_target_site( self, request: site_search_engine_service.GetTargetSiteRequest, @@ -505,12 +766,35 @@ def post_get_target_site( ) -> site_search_engine.TargetSite: """Post-rpc interceptor for get_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_get_target_site` interceptor runs + before the `post_get_target_site_with_metadata` interceptor. """ return response + def post_get_target_site_with_metadata( + self, + response: site_search_engine.TargetSite, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[site_search_engine.TargetSite, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_get_target_site_with_metadata` + interceptor in new development instead of the `post_get_target_site` interceptor. + When both interceptors are used, this `post_get_target_site_with_metadata` interceptor runs after the + `post_get_target_site` interceptor. The (possibly modified) response returned by + `post_get_target_site` will be passed to + `post_get_target_site_with_metadata`. + """ + return response, metadata + def pre_list_target_sites( self, request: site_search_engine_service.ListTargetSitesRequest, @@ -531,12 +815,38 @@ def post_list_target_sites( ) -> site_search_engine_service.ListTargetSitesResponse: """Post-rpc interceptor for list_target_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_target_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_list_target_sites` interceptor runs + before the `post_list_target_sites_with_metadata` interceptor. """ return response + def post_list_target_sites_with_metadata( + self, + response: site_search_engine_service.ListTargetSitesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + site_search_engine_service.ListTargetSitesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_target_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_list_target_sites_with_metadata` + interceptor in new development instead of the `post_list_target_sites` interceptor. + When both interceptors are used, this `post_list_target_sites_with_metadata` interceptor runs after the + `post_list_target_sites` interceptor. The (possibly modified) response returned by + `post_list_target_sites` will be passed to + `post_list_target_sites_with_metadata`. + """ + return response, metadata + def pre_recrawl_uris( self, request: site_search_engine_service.RecrawlUrisRequest, @@ -557,12 +867,35 @@ def post_recrawl_uris( ) -> operations_pb2.Operation: """Post-rpc interceptor for recrawl_uris - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_recrawl_uris_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_recrawl_uris` interceptor runs + before the `post_recrawl_uris_with_metadata` interceptor. """ return response + def post_recrawl_uris_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for recrawl_uris + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_recrawl_uris_with_metadata` + interceptor in new development instead of the `post_recrawl_uris` interceptor. + When both interceptors are used, this `post_recrawl_uris_with_metadata` interceptor runs after the + `post_recrawl_uris` interceptor. The (possibly modified) response returned by + `post_recrawl_uris` will be passed to + `post_recrawl_uris_with_metadata`. + """ + return response, metadata + def pre_update_target_site( self, request: site_search_engine_service.UpdateTargetSiteRequest, @@ -583,12 +916,35 @@ def post_update_target_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_target_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_target_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SiteSearchEngineService server but before - it is returned to user code. + it is returned to user code. This `post_update_target_site` interceptor runs + before the `post_update_target_site_with_metadata` interceptor. """ return response + def post_update_target_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_target_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SiteSearchEngineService server but before it is returned to user code. + + We recommend only using this `post_update_target_site_with_metadata` + interceptor in new development instead of the `post_update_target_site` interceptor. + When both interceptors are used, this `post_update_target_site_with_metadata` interceptor runs after the + `post_update_target_site` interceptor. The (possibly modified) response returned by + `post_update_target_site` will be passed to + `post_update_target_site_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -1043,6 +1399,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_create_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_create_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1195,6 +1555,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_verify_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_verify_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1347,6 +1711,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_sitemap(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_sitemap_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1501,6 +1869,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1647,6 +2019,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_sitemap(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_sitemap_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1795,6 +2171,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1950,6 +2330,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_advanced_site_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_advanced_site_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2103,6 +2487,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_advanced_site_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_advanced_site_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2258,6 +2646,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_domain_verification_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_domain_verification_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2408,6 +2803,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_sitemaps(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_sitemaps_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2563,6 +2962,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_site_search_engine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_site_search_engine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2712,6 +3115,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2862,6 +3269,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_target_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_target_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3016,6 +3427,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_recrawl_uris(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_recrawl_uris_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3170,6 +3585,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_target_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_target_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py index 3cab989ead94..2bbf91a98e7c 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -549,6 +551,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1245,16 +1274,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1300,16 +1333,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py index c7b1e70e05cc..f91b390a3f30 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/services/user_event_service/transports/rest.py @@ -134,12 +134,35 @@ def post_collect_user_event( ) -> httpbody_pb2.HttpBody: """Post-rpc interceptor for collect_user_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_collect_user_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_collect_user_event` interceptor runs + before the `post_collect_user_event_with_metadata` interceptor. """ return response + def post_collect_user_event_with_metadata( + self, + response: httpbody_pb2.HttpBody, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[httpbody_pb2.HttpBody, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for collect_user_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_collect_user_event_with_metadata` + interceptor in new development instead of the `post_collect_user_event` interceptor. + When both interceptors are used, this `post_collect_user_event_with_metadata` interceptor runs after the + `post_collect_user_event` interceptor. The (possibly modified) response returned by + `post_collect_user_event` will be passed to + `post_collect_user_event_with_metadata`. + """ + return response, metadata + def pre_import_user_events( self, request: import_config.ImportUserEventsRequest, @@ -159,12 +182,35 @@ def post_import_user_events( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_user_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_user_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_import_user_events` interceptor runs + before the `post_import_user_events_with_metadata` interceptor. """ return response + def post_import_user_events_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_user_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_import_user_events_with_metadata` + interceptor in new development instead of the `post_import_user_events` interceptor. + When both interceptors are used, this `post_import_user_events_with_metadata` interceptor runs after the + `post_import_user_events` interceptor. The (possibly modified) response returned by + `post_import_user_events` will be passed to + `post_import_user_events_with_metadata`. + """ + return response, metadata + def pre_purge_user_events( self, request: purge_config.PurgeUserEventsRequest, @@ -184,12 +230,35 @@ def post_purge_user_events( ) -> operations_pb2.Operation: """Post-rpc interceptor for purge_user_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_purge_user_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_purge_user_events` interceptor runs + before the `post_purge_user_events_with_metadata` interceptor. """ return response + def post_purge_user_events_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for purge_user_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_purge_user_events_with_metadata` + interceptor in new development instead of the `post_purge_user_events` interceptor. + When both interceptors are used, this `post_purge_user_events_with_metadata` interceptor runs after the + `post_purge_user_events` interceptor. The (possibly modified) response returned by + `post_purge_user_events` will be passed to + `post_purge_user_events_with_metadata`. + """ + return response, metadata + def pre_write_user_event( self, request: user_event_service.WriteUserEventRequest, @@ -210,12 +279,35 @@ def post_write_user_event( ) -> user_event.UserEvent: """Post-rpc interceptor for write_user_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_write_user_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the UserEventService server but before - it is returned to user code. + it is returned to user code. This `post_write_user_event` interceptor runs + before the `post_write_user_event_with_metadata` interceptor. """ return response + def post_write_user_event_with_metadata( + self, + response: user_event.UserEvent, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[user_event.UserEvent, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for write_user_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the UserEventService server but before it is returned to user code. + + We recommend only using this `post_write_user_event_with_metadata` + interceptor in new development instead of the `post_write_user_event` interceptor. + When both interceptors are used, this `post_write_user_event_with_metadata` interceptor runs after the + `post_write_user_event` interceptor. The (possibly modified) response returned by + `post_write_user_event` will be passed to + `post_write_user_event_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -712,6 +804,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_collect_user_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_collect_user_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -863,6 +959,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_user_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_user_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1014,6 +1114,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_purge_user_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_purge_user_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1167,6 +1271,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_write_user_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_write_user_event_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json index 558b61c7883b..a085d1bdaf15 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.13.5" + "version": "0.13.6" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json index f23bdf041eb2..2c52610259fd 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.13.5" + "version": "0.13.6" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json index 1dc95a67874e..752d3fac4a17 100644 --- a/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json +++ b/packages/google-cloud-discoveryengine/samples/generated_samples/snippet_metadata_google.cloud.discoveryengine.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-discoveryengine", - "version": "0.13.5" + "version": "0.13.6" }, "snippets": [ { diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py index 3d66399da10d..6a5e2854bdd3 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_completion_service.py @@ -76,6 +76,13 @@ purge_config, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CompletionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CompletionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3538,10 +3588,13 @@ def test_complete_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CompletionServiceRestInterceptor, "post_complete_query" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, "post_complete_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_complete_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = completion_service.CompleteQueryRequest.pb( completion_service.CompleteQueryRequest() ) @@ -3567,6 +3620,10 @@ def test_complete_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = completion_service.CompleteQueryResponse() + post_with_metadata.return_value = ( + completion_service.CompleteQueryResponse(), + metadata, + ) client.complete_query( request, @@ -3578,6 +3635,7 @@ def test_complete_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_suggestion_deny_list_entries_rest_bad_request( @@ -3663,11 +3721,15 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) transports.CompletionServiceRestInterceptor, "post_import_suggestion_deny_list_entries", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_suggestion_deny_list_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_import_suggestion_deny_list_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportSuggestionDenyListEntriesRequest.pb( import_config.ImportSuggestionDenyListEntriesRequest() ) @@ -3691,6 +3753,7 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_suggestion_deny_list_entries( request, @@ -3702,6 +3765,7 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_suggestion_deny_list_entries_rest_bad_request( @@ -3787,11 +3851,15 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): transports.CompletionServiceRestInterceptor, "post_purge_suggestion_deny_list_entries", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_suggestion_deny_list_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_purge_suggestion_deny_list_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeSuggestionDenyListEntriesRequest.pb( purge_config.PurgeSuggestionDenyListEntriesRequest() ) @@ -3815,6 +3883,7 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_suggestion_deny_list_entries( request, @@ -3826,6 +3895,7 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_completion_suggestions_rest_bad_request( @@ -3911,10 +3981,14 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): transports.CompletionServiceRestInterceptor, "post_import_completion_suggestions", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_completion_suggestions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_import_completion_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportCompletionSuggestionsRequest.pb( import_config.ImportCompletionSuggestionsRequest() ) @@ -3938,6 +4012,7 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_completion_suggestions( request, @@ -3949,6 +4024,7 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_completion_suggestions_rest_bad_request( @@ -4033,10 +4109,14 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CompletionServiceRestInterceptor, "post_purge_completion_suggestions" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_completion_suggestions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_purge_completion_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeCompletionSuggestionsRequest.pb( purge_config.PurgeCompletionSuggestionsRequest() ) @@ -4060,6 +4140,7 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_completion_suggestions( request, @@ -4071,6 +4152,7 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py index 1c69a48b17f3..762e2480c1b7 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_control_service.py @@ -67,6 +67,13 @@ from google.cloud.discoveryengine_v1.types import control as gcd_control from google.cloud.discoveryengine_v1.types import control_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -325,6 +332,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ControlServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ControlServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4545,10 +4595,13 @@ def test_create_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_create_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_create_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_create_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.CreateControlRequest.pb( control_service.CreateControlRequest() ) @@ -4572,6 +4625,7 @@ def test_create_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_control.Control() + post_with_metadata.return_value = gcd_control.Control(), metadata client.create_control( request, @@ -4583,6 +4637,7 @@ def test_create_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_control_rest_bad_request( @@ -4892,10 +4947,13 @@ def test_update_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_update_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_update_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_update_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.UpdateControlRequest.pb( control_service.UpdateControlRequest() ) @@ -4919,6 +4977,7 @@ def test_update_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_control.Control() + post_with_metadata.return_value = gcd_control.Control(), metadata client.update_control( request, @@ -4930,6 +4989,7 @@ def test_update_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_control_rest_bad_request(request_type=control_service.GetControlRequest): @@ -5026,10 +5086,13 @@ def test_get_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_get_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_get_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_get_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.GetControlRequest.pb( control_service.GetControlRequest() ) @@ -5053,6 +5116,7 @@ def test_get_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control.Control() + post_with_metadata.return_value = control.Control(), metadata client.get_control( request, @@ -5064,6 +5128,7 @@ def test_get_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_controls_rest_bad_request( @@ -5148,10 +5213,13 @@ def test_list_controls_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_list_controls" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_list_controls_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_list_controls" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.ListControlsRequest.pb( control_service.ListControlsRequest() ) @@ -5177,6 +5245,10 @@ def test_list_controls_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control_service.ListControlsResponse() + post_with_metadata.return_value = ( + control_service.ListControlsResponse(), + metadata, + ) client.list_controls( request, @@ -5188,6 +5260,7 @@ def test_list_controls_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py index b0894d71fab3..a126189e51b5 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_conversational_search_service.py @@ -72,6 +72,13 @@ from google.cloud.discoveryengine_v1.types import session from google.cloud.discoveryengine_v1.types import session as gcd_session +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -355,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationalSearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationalSearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9253,11 +9303,15 @@ def test_converse_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_converse_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_converse_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_converse_conversation", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ConverseConversationRequest.pb( conversational_search_service.ConverseConversationRequest() ) @@ -9285,6 +9339,10 @@ def test_converse_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ConverseConversationResponse() + post_with_metadata.return_value = ( + conversational_search_service.ConverseConversationResponse(), + metadata, + ) client.converse_conversation( request, @@ -9296,6 +9354,7 @@ def test_converse_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversation_rest_bad_request( @@ -9511,10 +9570,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_create_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_create_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.CreateConversationRequest.pb( conversational_search_service.CreateConversationRequest() ) @@ -9540,6 +9603,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.create_conversation( request, @@ -9551,6 +9615,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_rest_bad_request( @@ -9887,10 +9952,14 @@ def test_update_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_update_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_update_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.UpdateConversationRequest.pb( conversational_search_service.UpdateConversationRequest() ) @@ -9916,6 +9985,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.update_conversation( request, @@ -9927,6 +9997,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -10019,10 +10090,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetConversationRequest.pb( conversational_search_service.GetConversationRequest() ) @@ -10046,6 +10121,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.get_conversation( request, @@ -10057,6 +10133,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversations_rest_bad_request( @@ -10143,10 +10220,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_list_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ListConversationsRequest.pb( conversational_search_service.ListConversationsRequest() ) @@ -10172,6 +10253,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ListConversationsResponse() + post_with_metadata.return_value = ( + conversational_search_service.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -10183,6 +10268,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_answer_query_rest_bad_request( @@ -10273,10 +10359,14 @@ def test_answer_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_answer_query" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_answer_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_answer_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.AnswerQueryRequest.pb( conversational_search_service.AnswerQueryRequest() ) @@ -10302,6 +10392,10 @@ def test_answer_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.AnswerQueryResponse() + post_with_metadata.return_value = ( + conversational_search_service.AnswerQueryResponse(), + metadata, + ) client.answer_query( request, @@ -10313,6 +10407,7 @@ def test_answer_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_answer_rest_bad_request( @@ -10413,10 +10508,14 @@ def test_get_answer_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_answer" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_answer_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_answer" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetAnswerRequest.pb( conversational_search_service.GetAnswerRequest() ) @@ -10440,6 +10539,7 @@ def test_get_answer_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = answer.Answer() + post_with_metadata.return_value = answer.Answer(), metadata client.get_answer( request, @@ -10451,6 +10551,7 @@ def test_get_answer_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_rest_bad_request( @@ -10621,10 +10722,14 @@ def test_create_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_create_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_create_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.CreateSessionRequest.pb( conversational_search_service.CreateSessionRequest() ) @@ -10648,6 +10753,7 @@ def test_create_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.Session() + post_with_metadata.return_value = gcd_session.Session(), metadata client.create_session( request, @@ -10659,6 +10765,7 @@ def test_create_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_rest_bad_request( @@ -10950,10 +11057,14 @@ def test_update_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_update_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_update_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.UpdateSessionRequest.pb( conversational_search_service.UpdateSessionRequest() ) @@ -10977,6 +11088,7 @@ def test_update_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.Session() + post_with_metadata.return_value = gcd_session.Session(), metadata client.update_session( request, @@ -10988,6 +11100,7 @@ def test_update_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_rest_bad_request( @@ -11080,10 +11193,14 @@ def test_get_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetSessionRequest.pb( conversational_search_service.GetSessionRequest() ) @@ -11107,6 +11224,7 @@ def test_get_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.Session() + post_with_metadata.return_value = session.Session(), metadata client.get_session( request, @@ -11118,6 +11236,7 @@ def test_get_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sessions_rest_bad_request( @@ -11204,10 +11323,14 @@ def test_list_sessions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_list_sessions" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_list_sessions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_list_sessions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ListSessionsRequest.pb( conversational_search_service.ListSessionsRequest() ) @@ -11233,6 +11356,10 @@ def test_list_sessions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ListSessionsResponse() + post_with_metadata.return_value = ( + conversational_search_service.ListSessionsResponse(), + metadata, + ) client.list_sessions( request, @@ -11244,6 +11371,7 @@ def test_list_sessions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py index 11bf2a84feee..26aad45e31ec 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_data_store_service.py @@ -82,6 +82,13 @@ from google.cloud.discoveryengine_v1.types import data_store from google.cloud.discoveryengine_v1.types import data_store as gcd_data_store +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataStoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataStoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4655,10 +4705,14 @@ def test_create_data_store_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_create_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_create_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_create_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.CreateDataStoreRequest.pb( data_store_service.CreateDataStoreRequest() ) @@ -4682,6 +4736,7 @@ def test_create_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_data_store( request, @@ -4693,6 +4748,7 @@ def test_create_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_store_rest_bad_request( @@ -4787,10 +4843,13 @@ def test_get_data_store_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_get_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_get_data_store_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_get_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.GetDataStoreRequest.pb( data_store_service.GetDataStoreRequest() ) @@ -4814,6 +4873,7 @@ def test_get_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = data_store.DataStore() + post_with_metadata.return_value = data_store.DataStore(), metadata client.get_data_store( request, @@ -4825,6 +4885,7 @@ def test_get_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_stores_rest_bad_request( @@ -4909,10 +4970,14 @@ def test_list_data_stores_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_list_data_stores" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_list_data_stores_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_list_data_stores" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.ListDataStoresRequest.pb( data_store_service.ListDataStoresRequest() ) @@ -4938,6 +5003,10 @@ def test_list_data_stores_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = data_store_service.ListDataStoresResponse() + post_with_metadata.return_value = ( + data_store_service.ListDataStoresResponse(), + metadata, + ) client.list_data_stores( request, @@ -4949,6 +5018,7 @@ def test_list_data_stores_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_data_store_rest_bad_request( @@ -5029,10 +5099,14 @@ def test_delete_data_store_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_delete_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_delete_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_delete_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.DeleteDataStoreRequest.pb( data_store_service.DeleteDataStoreRequest() ) @@ -5056,6 +5130,7 @@ def test_delete_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_data_store( request, @@ -5067,6 +5142,7 @@ def test_delete_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_store_rest_bad_request( @@ -5281,10 +5357,14 @@ def test_update_data_store_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_update_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_update_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_update_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.UpdateDataStoreRequest.pb( data_store_service.UpdateDataStoreRequest() ) @@ -5308,6 +5388,7 @@ def test_update_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_data_store.DataStore() + post_with_metadata.return_value = gcd_data_store.DataStore(), metadata client.update_data_store( request, @@ -5319,6 +5400,7 @@ def test_update_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py index cfd86eff0c52..b827f3658547 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_document_service.py @@ -83,6 +83,13 @@ from google.cloud.discoveryengine_v1.types import document from google.cloud.discoveryengine_v1.types import document as gcd_document +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6013,10 +6063,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.GetDocumentRequest.pb( document_service.GetDocumentRequest() ) @@ -6040,6 +6093,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.get_document( request, @@ -6051,6 +6105,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_documents_rest_bad_request( @@ -6139,10 +6194,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.ListDocumentsRequest.pb( document_service.ListDocumentsRequest() ) @@ -6168,6 +6226,10 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.ListDocumentsResponse() + post_with_metadata.return_value = ( + document_service.ListDocumentsResponse(), + metadata, + ) client.list_documents( request, @@ -6179,6 +6241,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request( @@ -6371,10 +6434,13 @@ def test_create_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.CreateDocumentRequest.pb( document_service.CreateDocumentRequest() ) @@ -6398,6 +6464,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document.Document() + post_with_metadata.return_value = gcd_document.Document(), metadata client.create_document( request, @@ -6409,6 +6476,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -6605,10 +6673,13 @@ def test_update_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.UpdateDocumentRequest.pb( document_service.UpdateDocumentRequest() ) @@ -6632,6 +6703,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document.Document() + post_with_metadata.return_value = gcd_document.Document(), metadata client.update_document( request, @@ -6643,6 +6715,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request( @@ -6840,10 +6913,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportDocumentsRequest.pb( import_config.ImportDocumentsRequest() ) @@ -6867,6 +6943,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -6878,6 +6955,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_documents_rest_bad_request( @@ -6962,10 +7040,13 @@ def test_purge_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_purge_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeDocumentsRequest.pb( purge_config.PurgeDocumentsRequest() ) @@ -6989,6 +7070,7 @@ def test_purge_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_documents( request, @@ -7000,6 +7082,7 @@ def test_purge_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_get_documents_metadata_rest_bad_request( @@ -7087,10 +7170,14 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_batch_get_documents_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( document_service.BatchGetDocumentsMetadataRequest() ) @@ -7116,6 +7203,10 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.BatchGetDocumentsMetadataResponse() + post_with_metadata.return_value = ( + document_service.BatchGetDocumentsMetadataResponse(), + metadata, + ) client.batch_get_documents_metadata( request, @@ -7127,6 +7218,7 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py index ca2de3026666..282af8a5bd44 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_engine_service.py @@ -77,6 +77,13 @@ from google.cloud.discoveryengine_v1.types import engine as gcd_engine from google.cloud.discoveryengine_v1.types import engine_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4597,10 +4647,13 @@ def test_create_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_create_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_create_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_create_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.CreateEngineRequest.pb( engine_service.CreateEngineRequest() ) @@ -4624,6 +4677,7 @@ def test_create_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_engine( request, @@ -4635,6 +4689,7 @@ def test_create_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_engine_rest_bad_request( @@ -4719,10 +4774,13 @@ def test_delete_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_delete_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_delete_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_delete_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.DeleteEngineRequest.pb( engine_service.DeleteEngineRequest() ) @@ -4746,6 +4804,7 @@ def test_delete_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_engine( request, @@ -4757,6 +4816,7 @@ def test_delete_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_engine_rest_bad_request( @@ -4948,10 +5008,13 @@ def test_update_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_update_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_update_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_update_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.UpdateEngineRequest.pb( engine_service.UpdateEngineRequest() ) @@ -4975,6 +5038,7 @@ def test_update_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_engine.Engine() + post_with_metadata.return_value = gcd_engine.Engine(), metadata client.update_engine( request, @@ -4986,6 +5050,7 @@ def test_update_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_engine_rest_bad_request(request_type=engine_service.GetEngineRequest): @@ -5082,10 +5147,13 @@ def test_get_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_get_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_get_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_get_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.GetEngineRequest.pb( engine_service.GetEngineRequest() ) @@ -5109,6 +5177,7 @@ def test_get_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.get_engine( request, @@ -5120,6 +5189,7 @@ def test_get_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_engines_rest_bad_request(request_type=engine_service.ListEnginesRequest): @@ -5202,10 +5272,13 @@ def test_list_engines_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_list_engines" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_list_engines_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_list_engines" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.ListEnginesRequest.pb( engine_service.ListEnginesRequest() ) @@ -5231,6 +5304,7 @@ def test_list_engines_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine_service.ListEnginesResponse() + post_with_metadata.return_value = engine_service.ListEnginesResponse(), metadata client.list_engines( request, @@ -5242,6 +5316,7 @@ def test_list_engines_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py index 0d1cf496db15..53d7497fda5b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_grounded_generation_service.py @@ -61,6 +61,13 @@ ) from google.cloud.discoveryengine_v1.types import grounded_generation_service, grounding +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GroundedGenerationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GroundedGenerationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2393,11 +2443,15 @@ def test_generate_grounded_content_rest_interceptors(null_interceptor): transports.GroundedGenerationServiceRestInterceptor, "post_generate_grounded_content", ) as post, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, + "post_generate_grounded_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "pre_generate_grounded_content", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = grounded_generation_service.GenerateGroundedContentRequest.pb( grounded_generation_service.GenerateGroundedContentRequest() ) @@ -2427,6 +2481,10 @@ def test_generate_grounded_content_rest_interceptors(null_interceptor): post.return_value = ( grounded_generation_service.GenerateGroundedContentResponse() ) + post_with_metadata.return_value = ( + grounded_generation_service.GenerateGroundedContentResponse(), + metadata, + ) client.generate_grounded_content( request, @@ -2438,6 +2496,7 @@ def test_generate_grounded_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_check_grounding_rest_bad_request( @@ -2528,10 +2587,14 @@ def test_check_grounding_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "post_check_grounding" ) as post, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, + "post_check_grounding_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "pre_check_grounding" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = grounded_generation_service.CheckGroundingRequest.pb( grounded_generation_service.CheckGroundingRequest() ) @@ -2557,6 +2620,10 @@ def test_check_grounding_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = grounded_generation_service.CheckGroundingResponse() + post_with_metadata.return_value = ( + grounded_generation_service.CheckGroundingResponse(), + metadata, + ) client.check_grounding( request, @@ -2568,6 +2635,7 @@ def test_check_grounding_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py index 53292f403612..5b515f2539ab 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_project_service.py @@ -70,6 +70,13 @@ ) from google.cloud.discoveryengine_v1.types import project, project_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ProjectServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ProjectServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1866,10 +1916,13 @@ def test_provision_project_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ProjectServiceRestInterceptor, "post_provision_project" ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_provision_project_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectServiceRestInterceptor, "pre_provision_project" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = project_service.ProvisionProjectRequest.pb( project_service.ProvisionProjectRequest() ) @@ -1893,6 +1946,7 @@ def test_provision_project_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.provision_project( request, @@ -1904,6 +1958,7 @@ def test_provision_project_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py index 5e446f3a7bf3..87fe0b150723 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_rank_service.py @@ -61,6 +61,13 @@ ) from google.cloud.discoveryengine_v1.types import rank_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RankServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RankServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1641,10 +1691,13 @@ def test_rank_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RankServiceRestInterceptor, "post_rank" ) as post, mock.patch.object( + transports.RankServiceRestInterceptor, "post_rank_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RankServiceRestInterceptor, "pre_rank" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = rank_service.RankRequest.pb(rank_service.RankRequest()) transcode.return_value = { "method": "post", @@ -1666,6 +1719,7 @@ def test_rank_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rank_service.RankResponse() + post_with_metadata.return_value = rank_service.RankResponse(), metadata client.rank( request, @@ -1677,6 +1731,7 @@ def test_rank_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py index 9c854fe0cab4..16a1cdda4bdc 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_recommendation_service.py @@ -68,6 +68,13 @@ user_event, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RecommendationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RecommendationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1776,10 +1826,13 @@ def test_recommend_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RecommendationServiceRestInterceptor, "post_recommend" ) as post, mock.patch.object( + transports.RecommendationServiceRestInterceptor, "post_recommend_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RecommendationServiceRestInterceptor, "pre_recommend" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = recommendation_service.RecommendRequest.pb( recommendation_service.RecommendRequest() ) @@ -1805,6 +1858,10 @@ def test_recommend_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = recommendation_service.RecommendResponse() + post_with_metadata.return_value = ( + recommendation_service.RecommendResponse(), + metadata, + ) client.recommend( request, @@ -1816,6 +1873,7 @@ def test_recommend_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py index 17025bf91f2c..4cd86d2e0e77 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_schema_service.py @@ -75,6 +75,13 @@ from google.cloud.discoveryengine_v1.types import schema as gcd_schema from google.cloud.discoveryengine_v1.types import schema_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4279,10 +4329,13 @@ def test_get_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_get_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_get_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.GetSchemaRequest.pb( schema_service.GetSchemaRequest() ) @@ -4306,6 +4359,7 @@ def test_get_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata client.get_schema( request, @@ -4317,6 +4371,7 @@ def test_get_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_schemas_rest_bad_request(request_type=schema_service.ListSchemasRequest): @@ -4399,10 +4454,13 @@ def test_list_schemas_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_list_schemas" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_list_schemas" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.ListSchemasRequest.pb( schema_service.ListSchemasRequest() ) @@ -4428,6 +4486,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema_service.ListSchemasResponse() + post_with_metadata.return_value = schema_service.ListSchemasResponse(), metadata client.list_schemas( request, @@ -4439,6 +4498,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_schema_rest_bad_request( @@ -4591,10 +4651,13 @@ def test_create_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_create_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_create_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.CreateSchemaRequest.pb( schema_service.CreateSchemaRequest() ) @@ -4618,6 +4681,7 @@ def test_create_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_schema( request, @@ -4629,6 +4693,7 @@ def test_create_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_schema_rest_bad_request( @@ -4789,10 +4854,13 @@ def test_update_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_update_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_update_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_update_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.UpdateSchemaRequest.pb( schema_service.UpdateSchemaRequest() ) @@ -4816,6 +4884,7 @@ def test_update_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_schema( request, @@ -4827,6 +4896,7 @@ def test_update_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_schema_rest_bad_request( @@ -4911,10 +4981,13 @@ def test_delete_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_delete_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_delete_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.DeleteSchemaRequest.pb( schema_service.DeleteSchemaRequest() ) @@ -4938,6 +5011,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_schema( request, @@ -4949,6 +5023,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py index f3b5c71d262f..3c5b33b5fcdf 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_service.py @@ -63,6 +63,13 @@ ) from google.cloud.discoveryengine_v1.types import common, search_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2692,10 +2742,13 @@ def test_search_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchServiceRestInterceptor, "post_search" ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SearchServiceRestInterceptor, "pre_search" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) transcode.return_value = { "method": "post", @@ -2719,6 +2772,7 @@ def test_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_service.SearchResponse() + post_with_metadata.return_value = search_service.SearchResponse(), metadata client.search( request, @@ -2730,6 +2784,7 @@ def test_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_lite_rest_bad_request(request_type=search_service.SearchRequest): @@ -2824,10 +2879,13 @@ def test_search_lite_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchServiceRestInterceptor, "post_search_lite" ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search_lite_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SearchServiceRestInterceptor, "pre_search_lite" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) transcode.return_value = { "method": "post", @@ -2851,6 +2909,7 @@ def test_search_lite_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_service.SearchResponse() + post_with_metadata.return_value = search_service.SearchResponse(), metadata client.search_lite( request, @@ -2862,6 +2921,7 @@ def test_search_lite_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py index 6e20c9d9a6c3..a5bce0bacb9c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_search_tuning_service.py @@ -74,6 +74,13 @@ search_tuning_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SearchTuningServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SearchTuningServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2209,10 +2259,14 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, + "post_train_custom_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_tuning_service.TrainCustomModelRequest.pb( search_tuning_service.TrainCustomModelRequest() ) @@ -2236,6 +2290,7 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_custom_model( request, @@ -2247,6 +2302,7 @@ def test_train_custom_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_models_rest_bad_request( @@ -2332,10 +2388,14 @@ def test_list_custom_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, + "post_list_custom_models_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_tuning_service.ListCustomModelsRequest.pb( search_tuning_service.ListCustomModelsRequest() ) @@ -2361,6 +2421,10 @@ def test_list_custom_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_tuning_service.ListCustomModelsResponse() + post_with_metadata.return_value = ( + search_tuning_service.ListCustomModelsResponse(), + metadata, + ) client.list_custom_models( request, @@ -2372,6 +2436,7 @@ def test_list_custom_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py index 8e5abfd09483..f8d0fd78b211 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_site_search_engine_service.py @@ -76,6 +76,13 @@ site_search_engine_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SiteSearchEngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SiteSearchEngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8069,10 +8119,14 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_get_site_search_engine" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_site_search_engine_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_site_search_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetSiteSearchEngineRequest.pb( site_search_engine_service.GetSiteSearchEngineRequest() ) @@ -8098,6 +8152,10 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine.SiteSearchEngine() + post_with_metadata.return_value = ( + site_search_engine.SiteSearchEngine(), + metadata, + ) client.get_site_search_engine( request, @@ -8109,6 +8167,7 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_target_site_rest_bad_request( @@ -8277,10 +8336,14 @@ def test_create_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_create_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_create_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_create_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.CreateTargetSiteRequest.pb( site_search_engine_service.CreateTargetSiteRequest() ) @@ -8304,6 +8367,7 @@ def test_create_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_target_site( request, @@ -8315,6 +8379,7 @@ def test_create_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_target_sites_rest_bad_request( @@ -8400,11 +8465,15 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_batch_create_target_sites", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_create_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_batch_create_target_sites", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.BatchCreateTargetSitesRequest.pb( site_search_engine_service.BatchCreateTargetSitesRequest() ) @@ -8428,6 +8497,7 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_target_sites( request, @@ -8439,6 +8509,7 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_target_site_rest_bad_request( @@ -8541,10 +8612,14 @@ def test_get_target_site_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_get_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetTargetSiteRequest.pb( site_search_engine_service.GetTargetSiteRequest() ) @@ -8570,6 +8645,7 @@ def test_get_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine.TargetSite() + post_with_metadata.return_value = site_search_engine.TargetSite(), metadata client.get_target_site( request, @@ -8581,6 +8657,7 @@ def test_get_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_target_site_rest_bad_request( @@ -8753,10 +8830,14 @@ def test_update_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_update_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_update_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_update_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.UpdateTargetSiteRequest.pb( site_search_engine_service.UpdateTargetSiteRequest() ) @@ -8780,6 +8861,7 @@ def test_update_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_target_site( request, @@ -8791,6 +8873,7 @@ def test_update_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_target_site_rest_bad_request( @@ -8875,10 +8958,14 @@ def test_delete_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_delete_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_delete_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DeleteTargetSiteRequest.pb( site_search_engine_service.DeleteTargetSiteRequest() ) @@ -8902,6 +8989,7 @@ def test_delete_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_target_site( request, @@ -8913,6 +9001,7 @@ def test_delete_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_target_sites_rest_bad_request( @@ -9005,10 +9094,14 @@ def test_list_target_sites_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_list_target_sites" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_list_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_list_target_sites" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.ListTargetSitesRequest.pb( site_search_engine_service.ListTargetSitesRequest() ) @@ -9034,6 +9127,10 @@ def test_list_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine_service.ListTargetSitesResponse() + post_with_metadata.return_value = ( + site_search_engine_service.ListTargetSitesResponse(), + metadata, + ) client.list_target_sites( request, @@ -9045,6 +9142,7 @@ def test_list_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_advanced_site_search_rest_bad_request( @@ -9130,11 +9228,15 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_enable_advanced_site_search", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_enable_advanced_site_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_enable_advanced_site_search", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( site_search_engine_service.EnableAdvancedSiteSearchRequest() ) @@ -9158,6 +9260,7 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.enable_advanced_site_search( request, @@ -9169,6 +9272,7 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_advanced_site_search_rest_bad_request( @@ -9254,11 +9358,15 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_disable_advanced_site_search", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_disable_advanced_site_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_disable_advanced_site_search", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( site_search_engine_service.DisableAdvancedSiteSearchRequest() ) @@ -9282,6 +9390,7 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.disable_advanced_site_search( request, @@ -9293,6 +9402,7 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_recrawl_uris_rest_bad_request( @@ -9377,10 +9487,14 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_recrawl_uris_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.RecrawlUrisRequest.pb( site_search_engine_service.RecrawlUrisRequest() ) @@ -9404,6 +9518,7 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.recrawl_uris( request, @@ -9415,6 +9530,7 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_verify_target_sites_rest_bad_request( @@ -9500,11 +9616,15 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_batch_verify_target_sites", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_verify_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_batch_verify_target_sites", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( site_search_engine_service.BatchVerifyTargetSitesRequest() ) @@ -9528,6 +9648,7 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_verify_target_sites( request, @@ -9539,6 +9660,7 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_domain_verification_status_rest_bad_request( @@ -9634,11 +9756,15 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_fetch_domain_verification_status", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_domain_verification_status_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_fetch_domain_verification_status", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.FetchDomainVerificationStatusRequest.pb( site_search_engine_service.FetchDomainVerificationStatusRequest() ) @@ -9668,6 +9794,10 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): post.return_value = ( site_search_engine_service.FetchDomainVerificationStatusResponse() ) + post_with_metadata.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse(), + metadata, + ) client.fetch_domain_verification_status( request, @@ -9679,6 +9809,7 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py index 213b446b8237..775fdc8af782 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1/test_user_event_service.py @@ -81,6 +81,13 @@ user_event_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = UserEventServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = UserEventServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3287,10 +3337,14 @@ def test_write_user_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserEventServiceRestInterceptor, "post_write_user_event" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_write_user_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_write_user_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_event_service.WriteUserEventRequest.pb( user_event_service.WriteUserEventRequest() ) @@ -3314,6 +3368,7 @@ def test_write_user_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = user_event.UserEvent() + post_with_metadata.return_value = user_event.UserEvent(), metadata client.write_user_event( request, @@ -3325,6 +3380,7 @@ def test_write_user_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_collect_user_event_rest_bad_request( @@ -3408,10 +3464,14 @@ def test_collect_user_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserEventServiceRestInterceptor, "post_collect_user_event" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_collect_user_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_collect_user_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_event_service.CollectUserEventRequest.pb( user_event_service.CollectUserEventRequest() ) @@ -3435,6 +3495,7 @@ def test_collect_user_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = httpbody_pb2.HttpBody() + post_with_metadata.return_value = httpbody_pb2.HttpBody(), metadata client.collect_user_event( request, @@ -3446,6 +3507,7 @@ def test_collect_user_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_user_events_rest_bad_request( @@ -3526,10 +3588,14 @@ def test_purge_user_events_rest_interceptors(null_interceptor): ), mock.patch.object( transports.UserEventServiceRestInterceptor, "post_purge_user_events" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_purge_user_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_purge_user_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeUserEventsRequest.pb( purge_config.PurgeUserEventsRequest() ) @@ -3553,6 +3619,7 @@ def test_purge_user_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_user_events( request, @@ -3564,6 +3631,7 @@ def test_purge_user_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_user_events_rest_bad_request( @@ -3644,10 +3712,14 @@ def test_import_user_events_rest_interceptors(null_interceptor): ), mock.patch.object( transports.UserEventServiceRestInterceptor, "post_import_user_events" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_import_user_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_import_user_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportUserEventsRequest.pb( import_config.ImportUserEventsRequest() ) @@ -3671,6 +3743,7 @@ def test_import_user_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_user_events( request, @@ -3682,6 +3755,7 @@ def test_import_user_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py index e863108bf2a9..0fb4b08a0443 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_acl_config_service.py @@ -65,6 +65,13 @@ common, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -324,6 +331,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AclConfigServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AclConfigServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2351,10 +2401,14 @@ def test_update_acl_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AclConfigServiceRestInterceptor, "post_update_acl_config" ) as post, mock.patch.object( + transports.AclConfigServiceRestInterceptor, + "post_update_acl_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AclConfigServiceRestInterceptor, "pre_update_acl_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = acl_config_service.UpdateAclConfigRequest.pb( acl_config_service.UpdateAclConfigRequest() ) @@ -2378,6 +2432,7 @@ def test_update_acl_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = acl_config.AclConfig() + post_with_metadata.return_value = acl_config.AclConfig(), metadata client.update_acl_config( request, @@ -2389,6 +2444,7 @@ def test_update_acl_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_acl_config_rest_bad_request( @@ -2473,10 +2529,13 @@ def test_get_acl_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AclConfigServiceRestInterceptor, "post_get_acl_config" ) as post, mock.patch.object( + transports.AclConfigServiceRestInterceptor, "post_get_acl_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AclConfigServiceRestInterceptor, "pre_get_acl_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = acl_config_service.GetAclConfigRequest.pb( acl_config_service.GetAclConfigRequest() ) @@ -2500,6 +2559,7 @@ def test_get_acl_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = acl_config.AclConfig() + post_with_metadata.return_value = acl_config.AclConfig(), metadata client.get_acl_config( request, @@ -2511,6 +2571,7 @@ def test_get_acl_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py index f39e961c6f4d..bf0e71f98009 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_chunk_service.py @@ -63,6 +63,13 @@ ) from google.cloud.discoveryengine_v1alpha.types import chunk, chunk_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -306,6 +313,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ChunkServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ChunkServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2638,10 +2688,13 @@ def test_get_chunk_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChunkServiceRestInterceptor, "post_get_chunk" ) as post, mock.patch.object( + transports.ChunkServiceRestInterceptor, "post_get_chunk_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChunkServiceRestInterceptor, "pre_get_chunk" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = chunk_service.GetChunkRequest.pb(chunk_service.GetChunkRequest()) transcode.return_value = { "method": "post", @@ -2663,6 +2716,7 @@ def test_get_chunk_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = chunk.Chunk() + post_with_metadata.return_value = chunk.Chunk(), metadata client.get_chunk( request, @@ -2674,6 +2728,7 @@ def test_get_chunk_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_chunks_rest_bad_request(request_type=chunk_service.ListChunksRequest): @@ -2760,10 +2815,13 @@ def test_list_chunks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ChunkServiceRestInterceptor, "post_list_chunks" ) as post, mock.patch.object( + transports.ChunkServiceRestInterceptor, "post_list_chunks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ChunkServiceRestInterceptor, "pre_list_chunks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = chunk_service.ListChunksRequest.pb( chunk_service.ListChunksRequest() ) @@ -2789,6 +2847,7 @@ def test_list_chunks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = chunk_service.ListChunksResponse() + post_with_metadata.return_value = chunk_service.ListChunksResponse(), metadata client.list_chunks( request, @@ -2800,6 +2859,7 @@ def test_list_chunks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py index 4ac78f140bdb..e04d1b81df03 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_completion_service.py @@ -76,6 +76,13 @@ purge_config, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CompletionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CompletionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3538,10 +3588,13 @@ def test_complete_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CompletionServiceRestInterceptor, "post_complete_query" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, "post_complete_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_complete_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = completion_service.CompleteQueryRequest.pb( completion_service.CompleteQueryRequest() ) @@ -3567,6 +3620,10 @@ def test_complete_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = completion_service.CompleteQueryResponse() + post_with_metadata.return_value = ( + completion_service.CompleteQueryResponse(), + metadata, + ) client.complete_query( request, @@ -3578,6 +3635,7 @@ def test_complete_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_suggestion_deny_list_entries_rest_bad_request( @@ -3663,11 +3721,15 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) transports.CompletionServiceRestInterceptor, "post_import_suggestion_deny_list_entries", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_suggestion_deny_list_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_import_suggestion_deny_list_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportSuggestionDenyListEntriesRequest.pb( import_config.ImportSuggestionDenyListEntriesRequest() ) @@ -3691,6 +3753,7 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_suggestion_deny_list_entries( request, @@ -3702,6 +3765,7 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_suggestion_deny_list_entries_rest_bad_request( @@ -3787,11 +3851,15 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): transports.CompletionServiceRestInterceptor, "post_purge_suggestion_deny_list_entries", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_suggestion_deny_list_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_purge_suggestion_deny_list_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeSuggestionDenyListEntriesRequest.pb( purge_config.PurgeSuggestionDenyListEntriesRequest() ) @@ -3815,6 +3883,7 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_suggestion_deny_list_entries( request, @@ -3826,6 +3895,7 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_completion_suggestions_rest_bad_request( @@ -3911,10 +3981,14 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): transports.CompletionServiceRestInterceptor, "post_import_completion_suggestions", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_completion_suggestions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_import_completion_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportCompletionSuggestionsRequest.pb( import_config.ImportCompletionSuggestionsRequest() ) @@ -3938,6 +4012,7 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_completion_suggestions( request, @@ -3949,6 +4024,7 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_completion_suggestions_rest_bad_request( @@ -4033,10 +4109,14 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CompletionServiceRestInterceptor, "post_purge_completion_suggestions" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_completion_suggestions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_purge_completion_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeCompletionSuggestionsRequest.pb( purge_config.PurgeCompletionSuggestionsRequest() ) @@ -4060,6 +4140,7 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_completion_suggestions( request, @@ -4071,6 +4152,7 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py index 6104722ee1dd..c1095ac5eb90 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_control_service.py @@ -67,6 +67,13 @@ from google.cloud.discoveryengine_v1alpha.types import control as gcd_control from google.cloud.discoveryengine_v1alpha.types import control_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -325,6 +332,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ControlServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ControlServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4544,10 +4594,13 @@ def test_create_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_create_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_create_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_create_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.CreateControlRequest.pb( control_service.CreateControlRequest() ) @@ -4571,6 +4624,7 @@ def test_create_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_control.Control() + post_with_metadata.return_value = gcd_control.Control(), metadata client.create_control( request, @@ -4582,6 +4636,7 @@ def test_create_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_control_rest_bad_request( @@ -4890,10 +4945,13 @@ def test_update_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_update_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_update_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_update_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.UpdateControlRequest.pb( control_service.UpdateControlRequest() ) @@ -4917,6 +4975,7 @@ def test_update_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_control.Control() + post_with_metadata.return_value = gcd_control.Control(), metadata client.update_control( request, @@ -4928,6 +4987,7 @@ def test_update_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_control_rest_bad_request(request_type=control_service.GetControlRequest): @@ -5024,10 +5084,13 @@ def test_get_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_get_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_get_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_get_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.GetControlRequest.pb( control_service.GetControlRequest() ) @@ -5051,6 +5114,7 @@ def test_get_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control.Control() + post_with_metadata.return_value = control.Control(), metadata client.get_control( request, @@ -5062,6 +5126,7 @@ def test_get_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_controls_rest_bad_request( @@ -5146,10 +5211,13 @@ def test_list_controls_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_list_controls" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_list_controls_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_list_controls" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.ListControlsRequest.pb( control_service.ListControlsRequest() ) @@ -5175,6 +5243,10 @@ def test_list_controls_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control_service.ListControlsResponse() + post_with_metadata.return_value = ( + control_service.ListControlsResponse(), + metadata, + ) client.list_controls( request, @@ -5186,6 +5258,7 @@ def test_list_controls_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py index 217e8e57d1f6..500c1b1bdc34 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_conversational_search_service.py @@ -72,6 +72,13 @@ from google.cloud.discoveryengine_v1alpha.types import session from google.cloud.discoveryengine_v1alpha.types import session as gcd_session +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -355,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationalSearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationalSearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9264,11 +9314,15 @@ def test_converse_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_converse_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_converse_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_converse_conversation", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ConverseConversationRequest.pb( conversational_search_service.ConverseConversationRequest() ) @@ -9296,6 +9350,10 @@ def test_converse_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ConverseConversationResponse() + post_with_metadata.return_value = ( + conversational_search_service.ConverseConversationResponse(), + metadata, + ) client.converse_conversation( request, @@ -9307,6 +9365,7 @@ def test_converse_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversation_rest_bad_request( @@ -9531,10 +9590,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_create_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_create_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.CreateConversationRequest.pb( conversational_search_service.CreateConversationRequest() ) @@ -9560,6 +9623,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.create_conversation( request, @@ -9571,6 +9635,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_rest_bad_request( @@ -9916,10 +9981,14 @@ def test_update_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_update_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_update_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.UpdateConversationRequest.pb( conversational_search_service.UpdateConversationRequest() ) @@ -9945,6 +10014,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.update_conversation( request, @@ -9956,6 +10026,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -10048,10 +10119,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetConversationRequest.pb( conversational_search_service.GetConversationRequest() ) @@ -10075,6 +10150,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.get_conversation( request, @@ -10086,6 +10162,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversations_rest_bad_request( @@ -10172,10 +10249,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_list_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ListConversationsRequest.pb( conversational_search_service.ListConversationsRequest() ) @@ -10201,6 +10282,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ListConversationsResponse() + post_with_metadata.return_value = ( + conversational_search_service.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -10212,6 +10297,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_answer_query_rest_bad_request( @@ -10302,10 +10388,14 @@ def test_answer_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_answer_query" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_answer_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_answer_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.AnswerQueryRequest.pb( conversational_search_service.AnswerQueryRequest() ) @@ -10331,6 +10421,10 @@ def test_answer_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.AnswerQueryResponse() + post_with_metadata.return_value = ( + conversational_search_service.AnswerQueryResponse(), + metadata, + ) client.answer_query( request, @@ -10342,6 +10436,7 @@ def test_answer_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_answer_rest_bad_request( @@ -10442,10 +10537,14 @@ def test_get_answer_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_answer" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_answer_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_answer" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetAnswerRequest.pb( conversational_search_service.GetAnswerRequest() ) @@ -10469,6 +10568,7 @@ def test_get_answer_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = answer.Answer() + post_with_metadata.return_value = answer.Answer(), metadata client.get_answer( request, @@ -10480,6 +10580,7 @@ def test_get_answer_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_rest_bad_request( @@ -10650,10 +10751,14 @@ def test_create_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_create_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_create_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.CreateSessionRequest.pb( conversational_search_service.CreateSessionRequest() ) @@ -10677,6 +10782,7 @@ def test_create_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.Session() + post_with_metadata.return_value = gcd_session.Session(), metadata client.create_session( request, @@ -10688,6 +10794,7 @@ def test_create_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_rest_bad_request( @@ -10979,10 +11086,14 @@ def test_update_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_update_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_update_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.UpdateSessionRequest.pb( conversational_search_service.UpdateSessionRequest() ) @@ -11006,6 +11117,7 @@ def test_update_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.Session() + post_with_metadata.return_value = gcd_session.Session(), metadata client.update_session( request, @@ -11017,6 +11129,7 @@ def test_update_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_rest_bad_request( @@ -11109,10 +11222,14 @@ def test_get_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetSessionRequest.pb( conversational_search_service.GetSessionRequest() ) @@ -11136,6 +11253,7 @@ def test_get_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.Session() + post_with_metadata.return_value = session.Session(), metadata client.get_session( request, @@ -11147,6 +11265,7 @@ def test_get_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sessions_rest_bad_request( @@ -11233,10 +11352,14 @@ def test_list_sessions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_list_sessions" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_list_sessions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_list_sessions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ListSessionsRequest.pb( conversational_search_service.ListSessionsRequest() ) @@ -11262,6 +11385,10 @@ def test_list_sessions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ListSessionsResponse() + post_with_metadata.return_value = ( + conversational_search_service.ListSessionsResponse(), + metadata, + ) client.list_sessions( request, @@ -11273,6 +11400,7 @@ def test_list_sessions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py index 8e958e81ea40..5fdecaef364e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_data_store_service.py @@ -83,6 +83,13 @@ from google.cloud.discoveryengine_v1alpha.types import data_store_service from google.cloud.discoveryengine_v1alpha.types import schema +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataStoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataStoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5889,10 +5939,14 @@ def test_create_data_store_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_create_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_create_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_create_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.CreateDataStoreRequest.pb( data_store_service.CreateDataStoreRequest() ) @@ -5916,6 +5970,7 @@ def test_create_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_data_store( request, @@ -5927,6 +5982,7 @@ def test_create_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_store_rest_bad_request( @@ -6023,10 +6079,13 @@ def test_get_data_store_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_get_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_get_data_store_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_get_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.GetDataStoreRequest.pb( data_store_service.GetDataStoreRequest() ) @@ -6050,6 +6109,7 @@ def test_get_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = data_store.DataStore() + post_with_metadata.return_value = data_store.DataStore(), metadata client.get_data_store( request, @@ -6061,6 +6121,7 @@ def test_get_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_stores_rest_bad_request( @@ -6145,10 +6206,14 @@ def test_list_data_stores_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_list_data_stores" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_list_data_stores_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_list_data_stores" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.ListDataStoresRequest.pb( data_store_service.ListDataStoresRequest() ) @@ -6174,6 +6239,10 @@ def test_list_data_stores_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = data_store_service.ListDataStoresResponse() + post_with_metadata.return_value = ( + data_store_service.ListDataStoresResponse(), + metadata, + ) client.list_data_stores( request, @@ -6185,6 +6254,7 @@ def test_list_data_stores_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_data_store_rest_bad_request( @@ -6265,10 +6335,14 @@ def test_delete_data_store_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_delete_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_delete_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_delete_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.DeleteDataStoreRequest.pb( data_store_service.DeleteDataStoreRequest() ) @@ -6292,6 +6366,7 @@ def test_delete_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_data_store( request, @@ -6303,6 +6378,7 @@ def test_delete_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_store_rest_bad_request( @@ -6538,10 +6614,14 @@ def test_update_data_store_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_update_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_update_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_update_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.UpdateDataStoreRequest.pb( data_store_service.UpdateDataStoreRequest() ) @@ -6565,6 +6645,7 @@ def test_update_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_data_store.DataStore() + post_with_metadata.return_value = gcd_data_store.DataStore(), metadata client.update_data_store( request, @@ -6576,6 +6657,7 @@ def test_update_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_processing_config_rest_bad_request( @@ -6667,10 +6749,14 @@ def test_get_document_processing_config_rest_interceptors(null_interceptor): transports.DataStoreServiceRestInterceptor, "post_get_document_processing_config", ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_get_document_processing_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_get_document_processing_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.GetDocumentProcessingConfigRequest.pb( data_store_service.GetDocumentProcessingConfigRequest() ) @@ -6696,6 +6782,10 @@ def test_get_document_processing_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processing_config.DocumentProcessingConfig() + post_with_metadata.return_value = ( + document_processing_config.DocumentProcessingConfig(), + metadata, + ) client.get_document_processing_config( request, @@ -6707,6 +6797,7 @@ def test_get_document_processing_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_processing_config_rest_bad_request( @@ -6896,11 +6987,15 @@ def test_update_document_processing_config_rest_interceptors(null_interceptor): transports.DataStoreServiceRestInterceptor, "post_update_document_processing_config", ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_update_document_processing_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_update_document_processing_config", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.UpdateDocumentProcessingConfigRequest.pb( data_store_service.UpdateDocumentProcessingConfigRequest() ) @@ -6926,6 +7021,10 @@ def test_update_document_processing_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document_processing_config.DocumentProcessingConfig() + post_with_metadata.return_value = ( + gcd_document_processing_config.DocumentProcessingConfig(), + metadata, + ) client.update_document_processing_config( request, @@ -6937,6 +7036,7 @@ def test_update_document_processing_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py index 1750f8d53480..5abf0a13f646 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_document_service.py @@ -84,6 +84,13 @@ from google.cloud.discoveryengine_v1alpha.types import document from google.cloud.discoveryengine_v1alpha.types import document as gcd_document +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6615,10 +6665,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.GetDocumentRequest.pb( document_service.GetDocumentRequest() ) @@ -6642,6 +6695,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.get_document( request, @@ -6653,6 +6707,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_documents_rest_bad_request( @@ -6741,10 +6796,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.ListDocumentsRequest.pb( document_service.ListDocumentsRequest() ) @@ -6770,6 +6828,10 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.ListDocumentsResponse() + post_with_metadata.return_value = ( + document_service.ListDocumentsResponse(), + metadata, + ) client.list_documents( request, @@ -6781,6 +6843,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request( @@ -6983,10 +7046,13 @@ def test_create_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.CreateDocumentRequest.pb( document_service.CreateDocumentRequest() ) @@ -7010,6 +7076,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document.Document() + post_with_metadata.return_value = gcd_document.Document(), metadata client.create_document( request, @@ -7021,6 +7088,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -7227,10 +7295,13 @@ def test_update_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.UpdateDocumentRequest.pb( document_service.UpdateDocumentRequest() ) @@ -7254,6 +7325,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document.Document() + post_with_metadata.return_value = gcd_document.Document(), metadata client.update_document( request, @@ -7265,6 +7337,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request( @@ -7462,10 +7535,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportDocumentsRequest.pb( import_config.ImportDocumentsRequest() ) @@ -7489,6 +7565,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -7500,6 +7577,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_documents_rest_bad_request( @@ -7584,10 +7662,13 @@ def test_purge_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_purge_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeDocumentsRequest.pb( purge_config.PurgeDocumentsRequest() ) @@ -7611,6 +7692,7 @@ def test_purge_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_documents( request, @@ -7622,6 +7704,7 @@ def test_purge_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processed_document_rest_bad_request( @@ -7711,10 +7794,14 @@ def test_get_processed_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_processed_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_get_processed_document_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_processed_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.GetProcessedDocumentRequest.pb( document_service.GetProcessedDocumentRequest() ) @@ -7738,6 +7825,7 @@ def test_get_processed_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.ProcessedDocument() + post_with_metadata.return_value = document.ProcessedDocument(), metadata client.get_processed_document( request, @@ -7749,6 +7837,7 @@ def test_get_processed_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_get_documents_metadata_rest_bad_request( @@ -7836,10 +7925,14 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_batch_get_documents_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( document_service.BatchGetDocumentsMetadataRequest() ) @@ -7865,6 +7958,10 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.BatchGetDocumentsMetadataResponse() + post_with_metadata.return_value = ( + document_service.BatchGetDocumentsMetadataResponse(), + metadata, + ) client.batch_get_documents_metadata( request, @@ -7876,6 +7973,7 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py index 8a47c5b87a19..be4470be1ac4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_engine_service.py @@ -77,6 +77,13 @@ from google.cloud.discoveryengine_v1alpha.types import engine as gcd_engine from google.cloud.discoveryengine_v1alpha.types import engine_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6195,10 +6245,13 @@ def test_create_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_create_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_create_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_create_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.CreateEngineRequest.pb( engine_service.CreateEngineRequest() ) @@ -6222,6 +6275,7 @@ def test_create_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_engine( request, @@ -6233,6 +6287,7 @@ def test_create_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_engine_rest_bad_request( @@ -6317,10 +6372,13 @@ def test_delete_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_delete_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_delete_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_delete_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.DeleteEngineRequest.pb( engine_service.DeleteEngineRequest() ) @@ -6344,6 +6402,7 @@ def test_delete_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_engine( request, @@ -6355,6 +6414,7 @@ def test_delete_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_engine_rest_bad_request( @@ -6559,10 +6619,13 @@ def test_update_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_update_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_update_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_update_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.UpdateEngineRequest.pb( engine_service.UpdateEngineRequest() ) @@ -6586,6 +6649,7 @@ def test_update_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_engine.Engine() + post_with_metadata.return_value = gcd_engine.Engine(), metadata client.update_engine( request, @@ -6597,6 +6661,7 @@ def test_update_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_engine_rest_bad_request(request_type=engine_service.GetEngineRequest): @@ -6691,10 +6756,13 @@ def test_get_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_get_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_get_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_get_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.GetEngineRequest.pb( engine_service.GetEngineRequest() ) @@ -6718,6 +6786,7 @@ def test_get_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.get_engine( request, @@ -6729,6 +6798,7 @@ def test_get_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_engines_rest_bad_request(request_type=engine_service.ListEnginesRequest): @@ -6811,10 +6881,13 @@ def test_list_engines_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_list_engines" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_list_engines_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_list_engines" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.ListEnginesRequest.pb( engine_service.ListEnginesRequest() ) @@ -6840,6 +6913,7 @@ def test_list_engines_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine_service.ListEnginesResponse() + post_with_metadata.return_value = engine_service.ListEnginesResponse(), metadata client.list_engines( request, @@ -6851,6 +6925,7 @@ def test_list_engines_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_pause_engine_rest_bad_request(request_type=engine_service.PauseEngineRequest): @@ -6945,10 +7020,13 @@ def test_pause_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_pause_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_pause_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_pause_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.PauseEngineRequest.pb( engine_service.PauseEngineRequest() ) @@ -6972,6 +7050,7 @@ def test_pause_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.pause_engine( request, @@ -6983,6 +7062,7 @@ def test_pause_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resume_engine_rest_bad_request( @@ -7079,10 +7159,13 @@ def test_resume_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_resume_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_resume_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_resume_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.ResumeEngineRequest.pb( engine_service.ResumeEngineRequest() ) @@ -7106,6 +7189,7 @@ def test_resume_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.resume_engine( request, @@ -7117,6 +7201,7 @@ def test_resume_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_tune_engine_rest_bad_request(request_type=engine_service.TuneEngineRequest): @@ -7199,10 +7284,13 @@ def test_tune_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_tune_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_tune_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_tune_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.TuneEngineRequest.pb( engine_service.TuneEngineRequest() ) @@ -7226,6 +7314,7 @@ def test_tune_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.tune_engine( request, @@ -7237,6 +7326,7 @@ def test_tune_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py index 554a71d31cb7..f2603322c67b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_estimate_billing_service.py @@ -74,6 +74,13 @@ import_config, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -349,6 +356,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EstimateBillingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EstimateBillingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1779,10 +1829,14 @@ def test_estimate_data_size_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EstimateBillingServiceRestInterceptor, "post_estimate_data_size" ) as post, mock.patch.object( + transports.EstimateBillingServiceRestInterceptor, + "post_estimate_data_size_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EstimateBillingServiceRestInterceptor, "pre_estimate_data_size" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = estimate_billing_service.EstimateDataSizeRequest.pb( estimate_billing_service.EstimateDataSizeRequest() ) @@ -1806,6 +1860,7 @@ def test_estimate_data_size_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.estimate_data_size( request, @@ -1817,6 +1872,7 @@ def test_estimate_data_size_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py index 161c3f0fae67..015a243bdb52 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_evaluation_service.py @@ -81,6 +81,13 @@ from google.cloud.discoveryengine_v1alpha.types import common from google.cloud.discoveryengine_v1alpha.types import evaluation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EvaluationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EvaluationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4191,10 +4241,13 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_get_evaluation" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, "post_get_evaluation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_get_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.GetEvaluationRequest.pb( evaluation_service.GetEvaluationRequest() ) @@ -4218,6 +4271,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation.Evaluation() + post_with_metadata.return_value = evaluation.Evaluation(), metadata client.get_evaluation( request, @@ -4229,6 +4283,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_evaluations_rest_bad_request( @@ -4313,10 +4368,14 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_list_evaluations" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_list_evaluations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_list_evaluations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.ListEvaluationsRequest.pb( evaluation_service.ListEvaluationsRequest() ) @@ -4342,6 +4401,10 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation_service.ListEvaluationsResponse() + post_with_metadata.return_value = ( + evaluation_service.ListEvaluationsResponse(), + metadata, + ) client.list_evaluations( request, @@ -4353,6 +4416,7 @@ def test_list_evaluations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_evaluation_rest_bad_request( @@ -4659,10 +4723,14 @@ def test_create_evaluation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_create_evaluation" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_create_evaluation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_create_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.CreateEvaluationRequest.pb( evaluation_service.CreateEvaluationRequest() ) @@ -4686,6 +4754,7 @@ def test_create_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_evaluation( request, @@ -4697,6 +4766,7 @@ def test_create_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_evaluation_results_rest_bad_request( @@ -4785,10 +4855,14 @@ def test_list_evaluation_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_list_evaluation_results" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_list_evaluation_results_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_list_evaluation_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.ListEvaluationResultsRequest.pb( evaluation_service.ListEvaluationResultsRequest() ) @@ -4814,6 +4888,10 @@ def test_list_evaluation_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation_service.ListEvaluationResultsResponse() + post_with_metadata.return_value = ( + evaluation_service.ListEvaluationResultsResponse(), + metadata, + ) client.list_evaluation_results( request, @@ -4825,6 +4903,7 @@ def test_list_evaluation_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py index 720cd375b33d..c466c8932b8c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_grounded_generation_service.py @@ -64,6 +64,13 @@ grounding, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GroundedGenerationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GroundedGenerationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1770,10 +1820,14 @@ def test_check_grounding_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "post_check_grounding" ) as post, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, + "post_check_grounding_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "pre_check_grounding" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = grounded_generation_service.CheckGroundingRequest.pb( grounded_generation_service.CheckGroundingRequest() ) @@ -1799,6 +1853,10 @@ def test_check_grounding_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = grounded_generation_service.CheckGroundingResponse() + post_with_metadata.return_value = ( + grounded_generation_service.CheckGroundingResponse(), + metadata, + ) client.check_grounding( request, @@ -1810,6 +1868,7 @@ def test_check_grounding_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py index 6b0d67012a6d..0e0dd84cdccb 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_project_service.py @@ -73,6 +73,13 @@ from google.cloud.discoveryengine_v1alpha.types import project as gcd_project from google.cloud.discoveryengine_v1alpha.types import project_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ProjectServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ProjectServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3047,10 +3097,13 @@ def test_get_project_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectServiceRestInterceptor, "post_get_project" ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_get_project_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectServiceRestInterceptor, "pre_get_project" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = project_service.GetProjectRequest.pb( project_service.GetProjectRequest() ) @@ -3074,6 +3127,7 @@ def test_get_project_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = project.Project() + post_with_metadata.return_value = project.Project(), metadata client.get_project( request, @@ -3085,6 +3139,7 @@ def test_get_project_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_provision_project_rest_bad_request( @@ -3165,10 +3220,13 @@ def test_provision_project_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ProjectServiceRestInterceptor, "post_provision_project" ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_provision_project_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectServiceRestInterceptor, "pre_provision_project" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = project_service.ProvisionProjectRequest.pb( project_service.ProvisionProjectRequest() ) @@ -3192,6 +3250,7 @@ def test_provision_project_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.provision_project( request, @@ -3203,6 +3262,7 @@ def test_provision_project_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_report_consent_change_rest_bad_request( @@ -3287,10 +3347,14 @@ def test_report_consent_change_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ProjectServiceRestInterceptor, "post_report_consent_change" ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, + "post_report_consent_change_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ProjectServiceRestInterceptor, "pre_report_consent_change" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = project_service.ReportConsentChangeRequest.pb( project_service.ReportConsentChangeRequest() ) @@ -3314,6 +3378,7 @@ def test_report_consent_change_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_project.Project() + post_with_metadata.return_value = gcd_project.Project(), metadata client.report_consent_change( request, @@ -3325,6 +3390,7 @@ def test_report_consent_change_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py index 321c64930ac6..7f90ab420f2a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_rank_service.py @@ -61,6 +61,13 @@ ) from google.cloud.discoveryengine_v1alpha.types import rank_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RankServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RankServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1641,10 +1691,13 @@ def test_rank_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RankServiceRestInterceptor, "post_rank" ) as post, mock.patch.object( + transports.RankServiceRestInterceptor, "post_rank_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RankServiceRestInterceptor, "pre_rank" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = rank_service.RankRequest.pb(rank_service.RankRequest()) transcode.return_value = { "method": "post", @@ -1666,6 +1719,7 @@ def test_rank_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rank_service.RankResponse() + post_with_metadata.return_value = rank_service.RankResponse(), metadata client.rank( request, @@ -1677,6 +1731,7 @@ def test_rank_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py index 3c6cff8b1248..93a646558da4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_recommendation_service.py @@ -68,6 +68,13 @@ user_event, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RecommendationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RecommendationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1776,10 +1826,13 @@ def test_recommend_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RecommendationServiceRestInterceptor, "post_recommend" ) as post, mock.patch.object( + transports.RecommendationServiceRestInterceptor, "post_recommend_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RecommendationServiceRestInterceptor, "pre_recommend" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = recommendation_service.RecommendRequest.pb( recommendation_service.RecommendRequest() ) @@ -1805,6 +1858,10 @@ def test_recommend_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = recommendation_service.RecommendResponse() + post_with_metadata.return_value = ( + recommendation_service.RecommendResponse(), + metadata, + ) client.recommend( request, @@ -1816,6 +1873,7 @@ def test_recommend_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py index ef1e369354e0..f6df20d70dfb 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_service.py @@ -77,6 +77,13 @@ from google.cloud.discoveryengine_v1alpha.types import sample_query from google.cloud.discoveryengine_v1alpha.types import sample_query_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SampleQueryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SampleQueryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4994,10 +5044,14 @@ def test_get_sample_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_get_sample_query" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_get_sample_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_get_sample_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.GetSampleQueryRequest.pb( sample_query_service.GetSampleQueryRequest() ) @@ -5021,6 +5075,7 @@ def test_get_sample_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query.SampleQuery() + post_with_metadata.return_value = sample_query.SampleQuery(), metadata client.get_sample_query( request, @@ -5032,6 +5087,7 @@ def test_get_sample_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sample_queries_rest_bad_request( @@ -5120,10 +5176,14 @@ def test_list_sample_queries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_list_sample_queries" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_list_sample_queries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_list_sample_queries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.ListSampleQueriesRequest.pb( sample_query_service.ListSampleQueriesRequest() ) @@ -5149,6 +5209,10 @@ def test_list_sample_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query_service.ListSampleQueriesResponse() + post_with_metadata.return_value = ( + sample_query_service.ListSampleQueriesResponse(), + metadata, + ) client.list_sample_queries( request, @@ -5160,6 +5224,7 @@ def test_list_sample_queries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_sample_query_rest_bad_request( @@ -5327,10 +5392,14 @@ def test_create_sample_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_create_sample_query" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_create_sample_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_create_sample_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.CreateSampleQueryRequest.pb( sample_query_service.CreateSampleQueryRequest() ) @@ -5356,6 +5425,7 @@ def test_create_sample_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query.SampleQuery() + post_with_metadata.return_value = gcd_sample_query.SampleQuery(), metadata client.create_sample_query( request, @@ -5367,6 +5437,7 @@ def test_create_sample_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_sample_query_rest_bad_request( @@ -5538,10 +5609,14 @@ def test_update_sample_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_update_sample_query" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_update_sample_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_update_sample_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.UpdateSampleQueryRequest.pb( sample_query_service.UpdateSampleQueryRequest() ) @@ -5567,6 +5642,7 @@ def test_update_sample_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query.SampleQuery() + post_with_metadata.return_value = gcd_sample_query.SampleQuery(), metadata client.update_sample_query( request, @@ -5578,6 +5654,7 @@ def test_update_sample_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sample_query_rest_bad_request( @@ -5775,10 +5852,14 @@ def test_import_sample_queries_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_import_sample_queries" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_import_sample_queries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_import_sample_queries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportSampleQueriesRequest.pb( import_config.ImportSampleQueriesRequest() ) @@ -5802,6 +5883,7 @@ def test_import_sample_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_sample_queries( request, @@ -5813,6 +5895,7 @@ def test_import_sample_queries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py index 9d0ff245dbde..e6315a0db6d9 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_sample_query_set_service.py @@ -68,6 +68,13 @@ from google.cloud.discoveryengine_v1alpha.types import sample_query_set from google.cloud.discoveryengine_v1alpha.types import sample_query_set_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SampleQuerySetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SampleQuerySetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4598,10 +4648,14 @@ def test_get_sample_query_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_get_sample_query_set" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_get_sample_query_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_get_sample_query_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.GetSampleQuerySetRequest.pb( sample_query_set_service.GetSampleQuerySetRequest() ) @@ -4627,6 +4681,7 @@ def test_get_sample_query_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query_set.SampleQuerySet() + post_with_metadata.return_value = sample_query_set.SampleQuerySet(), metadata client.get_sample_query_set( request, @@ -4638,6 +4693,7 @@ def test_get_sample_query_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sample_query_sets_rest_bad_request( @@ -4724,10 +4780,14 @@ def test_list_sample_query_sets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_list_sample_query_sets" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_list_sample_query_sets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_list_sample_query_sets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.ListSampleQuerySetsRequest.pb( sample_query_set_service.ListSampleQuerySetsRequest() ) @@ -4753,6 +4813,10 @@ def test_list_sample_query_sets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query_set_service.ListSampleQuerySetsResponse() + post_with_metadata.return_value = ( + sample_query_set_service.ListSampleQuerySetsResponse(), + metadata, + ) client.list_sample_query_sets( request, @@ -4764,6 +4828,7 @@ def test_list_sample_query_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_sample_query_set_rest_bad_request( @@ -4927,10 +4992,14 @@ def test_create_sample_query_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_create_sample_query_set" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_create_sample_query_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_create_sample_query_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.CreateSampleQuerySetRequest.pb( sample_query_set_service.CreateSampleQuerySetRequest() ) @@ -4956,6 +5025,10 @@ def test_create_sample_query_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query_set.SampleQuerySet() + post_with_metadata.return_value = ( + gcd_sample_query_set.SampleQuerySet(), + metadata, + ) client.create_sample_query_set( request, @@ -4967,6 +5040,7 @@ def test_create_sample_query_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_sample_query_set_rest_bad_request( @@ -5138,10 +5212,14 @@ def test_update_sample_query_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_update_sample_query_set" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_update_sample_query_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_update_sample_query_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.UpdateSampleQuerySetRequest.pb( sample_query_set_service.UpdateSampleQuerySetRequest() ) @@ -5167,6 +5245,10 @@ def test_update_sample_query_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query_set.SampleQuerySet() + post_with_metadata.return_value = ( + gcd_sample_query_set.SampleQuerySet(), + metadata, + ) client.update_sample_query_set( request, @@ -5178,6 +5260,7 @@ def test_update_sample_query_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sample_query_set_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py index 150b5e7975e8..3980a030cf61 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_schema_service.py @@ -75,6 +75,13 @@ from google.cloud.discoveryengine_v1alpha.types import schema as gcd_schema from google.cloud.discoveryengine_v1alpha.types import schema_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4279,10 +4329,13 @@ def test_get_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_get_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_get_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.GetSchemaRequest.pb( schema_service.GetSchemaRequest() ) @@ -4306,6 +4359,7 @@ def test_get_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata client.get_schema( request, @@ -4317,6 +4371,7 @@ def test_get_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_schemas_rest_bad_request(request_type=schema_service.ListSchemasRequest): @@ -4399,10 +4454,13 @@ def test_list_schemas_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_list_schemas" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_list_schemas" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.ListSchemasRequest.pb( schema_service.ListSchemasRequest() ) @@ -4428,6 +4486,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema_service.ListSchemasResponse() + post_with_metadata.return_value = schema_service.ListSchemasResponse(), metadata client.list_schemas( request, @@ -4439,6 +4498,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_schema_rest_bad_request( @@ -4609,10 +4669,13 @@ def test_create_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_create_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_create_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.CreateSchemaRequest.pb( schema_service.CreateSchemaRequest() ) @@ -4636,6 +4699,7 @@ def test_create_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_schema( request, @@ -4647,6 +4711,7 @@ def test_create_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_schema_rest_bad_request( @@ -4825,10 +4890,13 @@ def test_update_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_update_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_update_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_update_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.UpdateSchemaRequest.pb( schema_service.UpdateSchemaRequest() ) @@ -4852,6 +4920,7 @@ def test_update_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_schema( request, @@ -4863,6 +4932,7 @@ def test_update_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_schema_rest_bad_request( @@ -4947,10 +5017,13 @@ def test_delete_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_delete_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_delete_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.DeleteSchemaRequest.pb( schema_service.DeleteSchemaRequest() ) @@ -4974,6 +5047,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_schema( request, @@ -4985,6 +5059,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py index d0bb2e9e4f5d..d2c16e7b4288 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_service.py @@ -63,6 +63,13 @@ ) from google.cloud.discoveryengine_v1alpha.types import common, search_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1994,10 +2044,13 @@ def test_search_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchServiceRestInterceptor, "post_search" ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SearchServiceRestInterceptor, "pre_search" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) transcode.return_value = { "method": "post", @@ -2021,6 +2074,7 @@ def test_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_service.SearchResponse() + post_with_metadata.return_value = search_service.SearchResponse(), metadata client.search( request, @@ -2032,6 +2086,7 @@ def test_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py index b942fead1757..5fe8f8bf9b5a 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_search_tuning_service.py @@ -74,6 +74,13 @@ search_tuning_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SearchTuningServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SearchTuningServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2209,10 +2259,14 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, + "post_train_custom_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_tuning_service.TrainCustomModelRequest.pb( search_tuning_service.TrainCustomModelRequest() ) @@ -2236,6 +2290,7 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_custom_model( request, @@ -2247,6 +2302,7 @@ def test_train_custom_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_models_rest_bad_request( @@ -2332,10 +2388,14 @@ def test_list_custom_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, + "post_list_custom_models_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_tuning_service.ListCustomModelsRequest.pb( search_tuning_service.ListCustomModelsRequest() ) @@ -2361,6 +2421,10 @@ def test_list_custom_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_tuning_service.ListCustomModelsResponse() + post_with_metadata.return_value = ( + search_tuning_service.ListCustomModelsResponse(), + metadata, + ) client.list_custom_models( request, @@ -2372,6 +2436,7 @@ def test_list_custom_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py index 0b24f0fab49f..d11889938903 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_serving_config_service.py @@ -69,6 +69,13 @@ from google.cloud.discoveryengine_v1alpha.types import serving_config from google.cloud.discoveryengine_v1alpha.types import serving_config_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ServingConfigServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ServingConfigServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3717,10 +3767,14 @@ def test_update_serving_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "post_update_serving_config" ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, + "post_update_serving_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "pre_update_serving_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = serving_config_service.UpdateServingConfigRequest.pb( serving_config_service.UpdateServingConfigRequest() ) @@ -3746,6 +3800,7 @@ def test_update_serving_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_serving_config.ServingConfig() + post_with_metadata.return_value = gcd_serving_config.ServingConfig(), metadata client.update_serving_config( request, @@ -3757,6 +3812,7 @@ def test_update_serving_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_serving_config_rest_bad_request( @@ -3871,10 +3927,14 @@ def test_get_serving_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "post_get_serving_config" ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, + "post_get_serving_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "pre_get_serving_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = serving_config_service.GetServingConfigRequest.pb( serving_config_service.GetServingConfigRequest() ) @@ -3900,6 +3960,7 @@ def test_get_serving_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = serving_config.ServingConfig() + post_with_metadata.return_value = serving_config.ServingConfig(), metadata client.get_serving_config( request, @@ -3911,6 +3972,7 @@ def test_get_serving_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_serving_configs_rest_bad_request( @@ -3997,10 +4059,14 @@ def test_list_serving_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "post_list_serving_configs" ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, + "post_list_serving_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "pre_list_serving_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = serving_config_service.ListServingConfigsRequest.pb( serving_config_service.ListServingConfigsRequest() ) @@ -4026,6 +4092,10 @@ def test_list_serving_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = serving_config_service.ListServingConfigsResponse() + post_with_metadata.return_value = ( + serving_config_service.ListServingConfigsResponse(), + metadata, + ) client.list_serving_configs( request, @@ -4037,6 +4107,7 @@ def test_list_serving_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py index 9514fd863ff3..7b4a35740c34 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_site_search_engine_service.py @@ -77,6 +77,13 @@ site_search_engine_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -352,6 +359,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SiteSearchEngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SiteSearchEngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -8950,10 +9000,14 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_get_site_search_engine" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_site_search_engine_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_site_search_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetSiteSearchEngineRequest.pb( site_search_engine_service.GetSiteSearchEngineRequest() ) @@ -8979,6 +9033,10 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine.SiteSearchEngine() + post_with_metadata.return_value = ( + site_search_engine.SiteSearchEngine(), + metadata, + ) client.get_site_search_engine( request, @@ -8990,6 +9048,7 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_target_site_rest_bad_request( @@ -9158,10 +9217,14 @@ def test_create_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_create_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_create_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_create_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.CreateTargetSiteRequest.pb( site_search_engine_service.CreateTargetSiteRequest() ) @@ -9185,6 +9248,7 @@ def test_create_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_target_site( request, @@ -9196,6 +9260,7 @@ def test_create_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_target_sites_rest_bad_request( @@ -9281,11 +9346,15 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_batch_create_target_sites", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_create_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_batch_create_target_sites", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.BatchCreateTargetSitesRequest.pb( site_search_engine_service.BatchCreateTargetSitesRequest() ) @@ -9309,6 +9378,7 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_target_sites( request, @@ -9320,6 +9390,7 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_target_site_rest_bad_request( @@ -9422,10 +9493,14 @@ def test_get_target_site_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_get_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetTargetSiteRequest.pb( site_search_engine_service.GetTargetSiteRequest() ) @@ -9451,6 +9526,7 @@ def test_get_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine.TargetSite() + post_with_metadata.return_value = site_search_engine.TargetSite(), metadata client.get_target_site( request, @@ -9462,6 +9538,7 @@ def test_get_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_target_site_rest_bad_request( @@ -9634,10 +9711,14 @@ def test_update_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_update_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_update_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_update_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.UpdateTargetSiteRequest.pb( site_search_engine_service.UpdateTargetSiteRequest() ) @@ -9661,6 +9742,7 @@ def test_update_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_target_site( request, @@ -9672,6 +9754,7 @@ def test_update_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_target_site_rest_bad_request( @@ -9756,10 +9839,14 @@ def test_delete_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_delete_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_delete_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DeleteTargetSiteRequest.pb( site_search_engine_service.DeleteTargetSiteRequest() ) @@ -9783,6 +9870,7 @@ def test_delete_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_target_site( request, @@ -9794,6 +9882,7 @@ def test_delete_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_target_sites_rest_bad_request( @@ -9886,10 +9975,14 @@ def test_list_target_sites_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_list_target_sites" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_list_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_list_target_sites" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.ListTargetSitesRequest.pb( site_search_engine_service.ListTargetSitesRequest() ) @@ -9915,6 +10008,10 @@ def test_list_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine_service.ListTargetSitesResponse() + post_with_metadata.return_value = ( + site_search_engine_service.ListTargetSitesResponse(), + metadata, + ) client.list_target_sites( request, @@ -9926,6 +10023,7 @@ def test_list_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_advanced_site_search_rest_bad_request( @@ -10011,11 +10109,15 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_enable_advanced_site_search", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_enable_advanced_site_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_enable_advanced_site_search", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( site_search_engine_service.EnableAdvancedSiteSearchRequest() ) @@ -10039,6 +10141,7 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.enable_advanced_site_search( request, @@ -10050,6 +10153,7 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_advanced_site_search_rest_bad_request( @@ -10135,11 +10239,15 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_disable_advanced_site_search", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_disable_advanced_site_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_disable_advanced_site_search", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( site_search_engine_service.DisableAdvancedSiteSearchRequest() ) @@ -10163,6 +10271,7 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.disable_advanced_site_search( request, @@ -10174,6 +10283,7 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_recrawl_uris_rest_bad_request( @@ -10258,10 +10368,14 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_recrawl_uris_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.RecrawlUrisRequest.pb( site_search_engine_service.RecrawlUrisRequest() ) @@ -10285,6 +10399,7 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.recrawl_uris( request, @@ -10296,6 +10411,7 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_verify_target_sites_rest_bad_request( @@ -10381,11 +10497,15 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_batch_verify_target_sites", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_verify_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_batch_verify_target_sites", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( site_search_engine_service.BatchVerifyTargetSitesRequest() ) @@ -10409,6 +10529,7 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_verify_target_sites( request, @@ -10420,6 +10541,7 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_domain_verification_status_rest_bad_request( @@ -10515,11 +10637,15 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_fetch_domain_verification_status", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_domain_verification_status_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_fetch_domain_verification_status", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.FetchDomainVerificationStatusRequest.pb( site_search_engine_service.FetchDomainVerificationStatusRequest() ) @@ -10549,6 +10675,10 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): post.return_value = ( site_search_engine_service.FetchDomainVerificationStatusResponse() ) + post_with_metadata.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse(), + metadata, + ) client.fetch_domain_verification_status( request, @@ -10560,6 +10690,7 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_uri_pattern_document_data_rest_bad_request( @@ -10645,11 +10776,15 @@ def test_set_uri_pattern_document_data_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_set_uri_pattern_document_data", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_set_uri_pattern_document_data_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_set_uri_pattern_document_data", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.SetUriPatternDocumentDataRequest.pb( site_search_engine_service.SetUriPatternDocumentDataRequest() ) @@ -10673,6 +10808,7 @@ def test_set_uri_pattern_document_data_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.set_uri_pattern_document_data( request, @@ -10684,6 +10820,7 @@ def test_set_uri_pattern_document_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_uri_pattern_document_data_rest_bad_request( @@ -10774,11 +10911,15 @@ def test_get_uri_pattern_document_data_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_get_uri_pattern_document_data", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_uri_pattern_document_data_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_uri_pattern_document_data", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetUriPatternDocumentDataRequest.pb( site_search_engine_service.GetUriPatternDocumentDataRequest() ) @@ -10808,6 +10949,10 @@ def test_get_uri_pattern_document_data_rest_interceptors(null_interceptor): post.return_value = ( site_search_engine_service.GetUriPatternDocumentDataResponse() ) + post_with_metadata.return_value = ( + site_search_engine_service.GetUriPatternDocumentDataResponse(), + metadata, + ) client.get_uri_pattern_document_data( request, @@ -10819,6 +10964,7 @@ def test_get_uri_pattern_document_data_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py index 44f87f1e4b5a..9bd3257ab529 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1alpha/test_user_event_service.py @@ -81,6 +81,13 @@ user_event_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = UserEventServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = UserEventServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3287,10 +3337,14 @@ def test_write_user_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserEventServiceRestInterceptor, "post_write_user_event" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_write_user_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_write_user_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_event_service.WriteUserEventRequest.pb( user_event_service.WriteUserEventRequest() ) @@ -3314,6 +3368,7 @@ def test_write_user_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = user_event.UserEvent() + post_with_metadata.return_value = user_event.UserEvent(), metadata client.write_user_event( request, @@ -3325,6 +3380,7 @@ def test_write_user_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_collect_user_event_rest_bad_request( @@ -3408,10 +3464,14 @@ def test_collect_user_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserEventServiceRestInterceptor, "post_collect_user_event" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_collect_user_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_collect_user_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_event_service.CollectUserEventRequest.pb( user_event_service.CollectUserEventRequest() ) @@ -3435,6 +3495,7 @@ def test_collect_user_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = httpbody_pb2.HttpBody() + post_with_metadata.return_value = httpbody_pb2.HttpBody(), metadata client.collect_user_event( request, @@ -3446,6 +3507,7 @@ def test_collect_user_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_user_events_rest_bad_request( @@ -3526,10 +3588,14 @@ def test_purge_user_events_rest_interceptors(null_interceptor): ), mock.patch.object( transports.UserEventServiceRestInterceptor, "post_purge_user_events" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_purge_user_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_purge_user_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeUserEventsRequest.pb( purge_config.PurgeUserEventsRequest() ) @@ -3553,6 +3619,7 @@ def test_purge_user_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_user_events( request, @@ -3564,6 +3631,7 @@ def test_purge_user_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_user_events_rest_bad_request( @@ -3644,10 +3712,14 @@ def test_import_user_events_rest_interceptors(null_interceptor): ), mock.patch.object( transports.UserEventServiceRestInterceptor, "post_import_user_events" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_import_user_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_import_user_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportUserEventsRequest.pb( import_config.ImportUserEventsRequest() ) @@ -3671,6 +3743,7 @@ def test_import_user_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_user_events( request, @@ -3682,6 +3755,7 @@ def test_import_user_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py index 6c341d20fa37..fde7ef4caddf 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_completion_service.py @@ -77,6 +77,13 @@ purge_config, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -344,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CompletionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CompletionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3994,10 +4044,13 @@ def test_complete_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CompletionServiceRestInterceptor, "post_complete_query" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, "post_complete_query_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_complete_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = completion_service.CompleteQueryRequest.pb( completion_service.CompleteQueryRequest() ) @@ -4023,6 +4076,10 @@ def test_complete_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = completion_service.CompleteQueryResponse() + post_with_metadata.return_value = ( + completion_service.CompleteQueryResponse(), + metadata, + ) client.complete_query( request, @@ -4034,6 +4091,7 @@ def test_complete_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_advanced_complete_query_rest_bad_request( @@ -4122,10 +4180,14 @@ def test_advanced_complete_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CompletionServiceRestInterceptor, "post_advanced_complete_query" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_advanced_complete_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_advanced_complete_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = completion_service.AdvancedCompleteQueryRequest.pb( completion_service.AdvancedCompleteQueryRequest() ) @@ -4151,6 +4213,10 @@ def test_advanced_complete_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = completion_service.AdvancedCompleteQueryResponse() + post_with_metadata.return_value = ( + completion_service.AdvancedCompleteQueryResponse(), + metadata, + ) client.advanced_complete_query( request, @@ -4162,6 +4228,7 @@ def test_advanced_complete_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_suggestion_deny_list_entries_rest_bad_request( @@ -4247,11 +4314,15 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) transports.CompletionServiceRestInterceptor, "post_import_suggestion_deny_list_entries", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_suggestion_deny_list_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_import_suggestion_deny_list_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportSuggestionDenyListEntriesRequest.pb( import_config.ImportSuggestionDenyListEntriesRequest() ) @@ -4275,6 +4346,7 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_suggestion_deny_list_entries( request, @@ -4286,6 +4358,7 @@ def test_import_suggestion_deny_list_entries_rest_interceptors(null_interceptor) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_suggestion_deny_list_entries_rest_bad_request( @@ -4371,11 +4444,15 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): transports.CompletionServiceRestInterceptor, "post_purge_suggestion_deny_list_entries", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_suggestion_deny_list_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_purge_suggestion_deny_list_entries", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeSuggestionDenyListEntriesRequest.pb( purge_config.PurgeSuggestionDenyListEntriesRequest() ) @@ -4399,6 +4476,7 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_suggestion_deny_list_entries( request, @@ -4410,6 +4488,7 @@ def test_purge_suggestion_deny_list_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_completion_suggestions_rest_bad_request( @@ -4495,10 +4574,14 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): transports.CompletionServiceRestInterceptor, "post_import_completion_suggestions", ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_import_completion_suggestions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_import_completion_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportCompletionSuggestionsRequest.pb( import_config.ImportCompletionSuggestionsRequest() ) @@ -4522,6 +4605,7 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_completion_suggestions( request, @@ -4533,6 +4617,7 @@ def test_import_completion_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_completion_suggestions_rest_bad_request( @@ -4617,10 +4702,14 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CompletionServiceRestInterceptor, "post_purge_completion_suggestions" ) as post, mock.patch.object( + transports.CompletionServiceRestInterceptor, + "post_purge_completion_suggestions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CompletionServiceRestInterceptor, "pre_purge_completion_suggestions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeCompletionSuggestionsRequest.pb( purge_config.PurgeCompletionSuggestionsRequest() ) @@ -4644,6 +4733,7 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_completion_suggestions( request, @@ -4655,6 +4745,7 @@ def test_purge_completion_suggestions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py index 903f83bd63b7..31a45a829931 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_control_service.py @@ -67,6 +67,13 @@ from google.cloud.discoveryengine_v1beta.types import control as gcd_control from google.cloud.discoveryengine_v1beta.types import control_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -325,6 +332,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ControlServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ControlServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4545,10 +4595,13 @@ def test_create_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_create_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_create_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_create_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.CreateControlRequest.pb( control_service.CreateControlRequest() ) @@ -4572,6 +4625,7 @@ def test_create_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_control.Control() + post_with_metadata.return_value = gcd_control.Control(), metadata client.create_control( request, @@ -4583,6 +4637,7 @@ def test_create_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_control_rest_bad_request( @@ -4892,10 +4947,13 @@ def test_update_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_update_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_update_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_update_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.UpdateControlRequest.pb( control_service.UpdateControlRequest() ) @@ -4919,6 +4977,7 @@ def test_update_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_control.Control() + post_with_metadata.return_value = gcd_control.Control(), metadata client.update_control( request, @@ -4930,6 +4989,7 @@ def test_update_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_control_rest_bad_request(request_type=control_service.GetControlRequest): @@ -5026,10 +5086,13 @@ def test_get_control_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_get_control" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_get_control_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_get_control" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.GetControlRequest.pb( control_service.GetControlRequest() ) @@ -5053,6 +5116,7 @@ def test_get_control_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control.Control() + post_with_metadata.return_value = control.Control(), metadata client.get_control( request, @@ -5064,6 +5128,7 @@ def test_get_control_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_controls_rest_bad_request( @@ -5148,10 +5213,13 @@ def test_list_controls_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ControlServiceRestInterceptor, "post_list_controls" ) as post, mock.patch.object( + transports.ControlServiceRestInterceptor, "post_list_controls_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ControlServiceRestInterceptor, "pre_list_controls" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control_service.ListControlsRequest.pb( control_service.ListControlsRequest() ) @@ -5177,6 +5245,10 @@ def test_list_controls_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control_service.ListControlsResponse() + post_with_metadata.return_value = ( + control_service.ListControlsResponse(), + metadata, + ) client.list_controls( request, @@ -5188,6 +5260,7 @@ def test_list_controls_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py index bb2dc1ee5523..9c54f5455ccd 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_conversational_search_service.py @@ -72,6 +72,13 @@ from google.cloud.discoveryengine_v1beta.types import session from google.cloud.discoveryengine_v1beta.types import session as gcd_session +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -355,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConversationalSearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConversationalSearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9264,11 +9314,15 @@ def test_converse_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_converse_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_converse_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_converse_conversation", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ConverseConversationRequest.pb( conversational_search_service.ConverseConversationRequest() ) @@ -9296,6 +9350,10 @@ def test_converse_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ConverseConversationResponse() + post_with_metadata.return_value = ( + conversational_search_service.ConverseConversationResponse(), + metadata, + ) client.converse_conversation( request, @@ -9307,6 +9365,7 @@ def test_converse_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_conversation_rest_bad_request( @@ -9531,10 +9590,14 @@ def test_create_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_create_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_create_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.CreateConversationRequest.pb( conversational_search_service.CreateConversationRequest() ) @@ -9560,6 +9623,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.create_conversation( request, @@ -9571,6 +9635,7 @@ def test_create_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_conversation_rest_bad_request( @@ -9916,10 +9981,14 @@ def test_update_conversation_rest_interceptors(null_interceptor): transports.ConversationalSearchServiceRestInterceptor, "post_update_conversation", ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_update_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.UpdateConversationRequest.pb( conversational_search_service.UpdateConversationRequest() ) @@ -9945,6 +10014,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_conversation.Conversation() + post_with_metadata.return_value = gcd_conversation.Conversation(), metadata client.update_conversation( request, @@ -9956,6 +10026,7 @@ def test_update_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_conversation_rest_bad_request( @@ -10048,10 +10119,14 @@ def test_get_conversation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_conversation" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_conversation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_conversation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetConversationRequest.pb( conversational_search_service.GetConversationRequest() ) @@ -10075,6 +10150,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversation.Conversation() + post_with_metadata.return_value = conversation.Conversation(), metadata client.get_conversation( request, @@ -10086,6 +10162,7 @@ def test_get_conversation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_conversations_rest_bad_request( @@ -10172,10 +10249,14 @@ def test_list_conversations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_list_conversations" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_list_conversations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_list_conversations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ListConversationsRequest.pb( conversational_search_service.ListConversationsRequest() ) @@ -10201,6 +10282,10 @@ def test_list_conversations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ListConversationsResponse() + post_with_metadata.return_value = ( + conversational_search_service.ListConversationsResponse(), + metadata, + ) client.list_conversations( request, @@ -10212,6 +10297,7 @@ def test_list_conversations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_answer_query_rest_bad_request( @@ -10302,10 +10388,14 @@ def test_answer_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_answer_query" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_answer_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_answer_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.AnswerQueryRequest.pb( conversational_search_service.AnswerQueryRequest() ) @@ -10331,6 +10421,10 @@ def test_answer_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.AnswerQueryResponse() + post_with_metadata.return_value = ( + conversational_search_service.AnswerQueryResponse(), + metadata, + ) client.answer_query( request, @@ -10342,6 +10436,7 @@ def test_answer_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_answer_rest_bad_request( @@ -10442,10 +10537,14 @@ def test_get_answer_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_answer" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_answer_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_answer" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetAnswerRequest.pb( conversational_search_service.GetAnswerRequest() ) @@ -10469,6 +10568,7 @@ def test_get_answer_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = answer.Answer() + post_with_metadata.return_value = answer.Answer(), metadata client.get_answer( request, @@ -10480,6 +10580,7 @@ def test_get_answer_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_session_rest_bad_request( @@ -10650,10 +10751,14 @@ def test_create_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_create_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_create_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_create_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.CreateSessionRequest.pb( conversational_search_service.CreateSessionRequest() ) @@ -10677,6 +10782,7 @@ def test_create_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.Session() + post_with_metadata.return_value = gcd_session.Session(), metadata client.create_session( request, @@ -10688,6 +10794,7 @@ def test_create_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_session_rest_bad_request( @@ -10979,10 +11086,14 @@ def test_update_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_update_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_update_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_update_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.UpdateSessionRequest.pb( conversational_search_service.UpdateSessionRequest() ) @@ -11006,6 +11117,7 @@ def test_update_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_session.Session() + post_with_metadata.return_value = gcd_session.Session(), metadata client.update_session( request, @@ -11017,6 +11129,7 @@ def test_update_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_session_rest_bad_request( @@ -11109,10 +11222,14 @@ def test_get_session_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_get_session" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_get_session_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_get_session" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.GetSessionRequest.pb( conversational_search_service.GetSessionRequest() ) @@ -11136,6 +11253,7 @@ def test_get_session_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = session.Session() + post_with_metadata.return_value = session.Session(), metadata client.get_session( request, @@ -11147,6 +11265,7 @@ def test_get_session_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sessions_rest_bad_request( @@ -11233,10 +11352,14 @@ def test_list_sessions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "post_list_sessions" ) as post, mock.patch.object( + transports.ConversationalSearchServiceRestInterceptor, + "post_list_sessions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ConversationalSearchServiceRestInterceptor, "pre_list_sessions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = conversational_search_service.ListSessionsRequest.pb( conversational_search_service.ListSessionsRequest() ) @@ -11262,6 +11385,10 @@ def test_list_sessions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = conversational_search_service.ListSessionsResponse() + post_with_metadata.return_value = ( + conversational_search_service.ListSessionsResponse(), + metadata, + ) client.list_sessions( request, @@ -11273,6 +11400,7 @@ def test_list_sessions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py index 4e5f3a2d51ec..a0cc6e0179d1 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_data_store_service.py @@ -82,6 +82,13 @@ from google.cloud.discoveryengine_v1beta.types import common from google.cloud.discoveryengine_v1beta.types import data_store +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataStoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataStoreServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4667,10 +4717,14 @@ def test_create_data_store_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_create_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_create_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_create_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.CreateDataStoreRequest.pb( data_store_service.CreateDataStoreRequest() ) @@ -4694,6 +4748,7 @@ def test_create_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_data_store( request, @@ -4705,6 +4760,7 @@ def test_create_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_data_store_rest_bad_request( @@ -4799,10 +4855,13 @@ def test_get_data_store_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_get_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, "post_get_data_store_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_get_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.GetDataStoreRequest.pb( data_store_service.GetDataStoreRequest() ) @@ -4826,6 +4885,7 @@ def test_get_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = data_store.DataStore() + post_with_metadata.return_value = data_store.DataStore(), metadata client.get_data_store( request, @@ -4837,6 +4897,7 @@ def test_get_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_data_stores_rest_bad_request( @@ -4921,10 +4982,14 @@ def test_list_data_stores_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_list_data_stores" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_list_data_stores_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_list_data_stores" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.ListDataStoresRequest.pb( data_store_service.ListDataStoresRequest() ) @@ -4950,6 +5015,10 @@ def test_list_data_stores_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = data_store_service.ListDataStoresResponse() + post_with_metadata.return_value = ( + data_store_service.ListDataStoresResponse(), + metadata, + ) client.list_data_stores( request, @@ -4961,6 +5030,7 @@ def test_list_data_stores_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_data_store_rest_bad_request( @@ -5041,10 +5111,14 @@ def test_delete_data_store_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_delete_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_delete_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_delete_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.DeleteDataStoreRequest.pb( data_store_service.DeleteDataStoreRequest() ) @@ -5068,6 +5142,7 @@ def test_delete_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_data_store( request, @@ -5079,6 +5154,7 @@ def test_delete_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_data_store_rest_bad_request( @@ -5301,10 +5377,14 @@ def test_update_data_store_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DataStoreServiceRestInterceptor, "post_update_data_store" ) as post, mock.patch.object( + transports.DataStoreServiceRestInterceptor, + "post_update_data_store_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DataStoreServiceRestInterceptor, "pre_update_data_store" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = data_store_service.UpdateDataStoreRequest.pb( data_store_service.UpdateDataStoreRequest() ) @@ -5328,6 +5408,7 @@ def test_update_data_store_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_data_store.DataStore() + post_with_metadata.return_value = gcd_data_store.DataStore(), metadata client.update_data_store( request, @@ -5339,6 +5420,7 @@ def test_update_data_store_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py index 625f966dd7d3..520fb56eaf0b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_document_service.py @@ -83,6 +83,13 @@ from google.cloud.discoveryengine_v1beta.types import document from google.cloud.discoveryengine_v1beta.types import document as gcd_document +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -341,6 +348,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6013,10 +6063,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.GetDocumentRequest.pb( document_service.GetDocumentRequest() ) @@ -6040,6 +6093,7 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document.Document() + post_with_metadata.return_value = document.Document(), metadata client.get_document( request, @@ -6051,6 +6105,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_documents_rest_bad_request( @@ -6139,10 +6194,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.ListDocumentsRequest.pb( document_service.ListDocumentsRequest() ) @@ -6168,6 +6226,10 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.ListDocumentsResponse() + post_with_metadata.return_value = ( + document_service.ListDocumentsResponse(), + metadata, + ) client.list_documents( request, @@ -6179,6 +6241,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_document_rest_bad_request( @@ -6371,10 +6434,13 @@ def test_create_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_create_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_create_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_create_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.CreateDocumentRequest.pb( document_service.CreateDocumentRequest() ) @@ -6398,6 +6464,7 @@ def test_create_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document.Document() + post_with_metadata.return_value = gcd_document.Document(), metadata client.create_document( request, @@ -6409,6 +6476,7 @@ def test_create_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_document_rest_bad_request( @@ -6605,10 +6673,13 @@ def test_update_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_update_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_update_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.UpdateDocumentRequest.pb( document_service.UpdateDocumentRequest() ) @@ -6632,6 +6703,7 @@ def test_update_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_document.Document() + post_with_metadata.return_value = gcd_document.Document(), metadata client.update_document( request, @@ -6643,6 +6715,7 @@ def test_update_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_document_rest_bad_request( @@ -6840,10 +6913,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportDocumentsRequest.pb( import_config.ImportDocumentsRequest() ) @@ -6867,6 +6943,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -6878,6 +6955,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_documents_rest_bad_request( @@ -6962,10 +7040,13 @@ def test_purge_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_purge_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_purge_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_purge_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeDocumentsRequest.pb( purge_config.PurgeDocumentsRequest() ) @@ -6989,6 +7070,7 @@ def test_purge_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_documents( request, @@ -7000,6 +7082,7 @@ def test_purge_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_get_documents_metadata_rest_bad_request( @@ -7087,10 +7170,14 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_batch_get_documents_metadata" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_batch_get_documents_metadata_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_batch_get_documents_metadata" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.BatchGetDocumentsMetadataRequest.pb( document_service.BatchGetDocumentsMetadataRequest() ) @@ -7116,6 +7203,10 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.BatchGetDocumentsMetadataResponse() + post_with_metadata.return_value = ( + document_service.BatchGetDocumentsMetadataResponse(), + metadata, + ) client.batch_get_documents_metadata( request, @@ -7127,6 +7218,7 @@ def test_batch_get_documents_metadata_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py index 057afab34583..f53860eda10c 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_engine_service.py @@ -77,6 +77,13 @@ from google.cloud.discoveryengine_v1beta.types import engine as gcd_engine from google.cloud.discoveryengine_v1beta.types import engine_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6296,10 +6346,13 @@ def test_create_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_create_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_create_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_create_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.CreateEngineRequest.pb( engine_service.CreateEngineRequest() ) @@ -6323,6 +6376,7 @@ def test_create_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_engine( request, @@ -6334,6 +6388,7 @@ def test_create_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_engine_rest_bad_request( @@ -6418,10 +6473,13 @@ def test_delete_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_delete_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_delete_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_delete_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.DeleteEngineRequest.pb( engine_service.DeleteEngineRequest() ) @@ -6445,6 +6503,7 @@ def test_delete_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_engine( request, @@ -6456,6 +6515,7 @@ def test_delete_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_engine_rest_bad_request( @@ -6647,10 +6707,13 @@ def test_update_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_update_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_update_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_update_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.UpdateEngineRequest.pb( engine_service.UpdateEngineRequest() ) @@ -6674,6 +6737,7 @@ def test_update_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_engine.Engine() + post_with_metadata.return_value = gcd_engine.Engine(), metadata client.update_engine( request, @@ -6685,6 +6749,7 @@ def test_update_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_engine_rest_bad_request(request_type=engine_service.GetEngineRequest): @@ -6781,10 +6846,13 @@ def test_get_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_get_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_get_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_get_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.GetEngineRequest.pb( engine_service.GetEngineRequest() ) @@ -6808,6 +6876,7 @@ def test_get_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.get_engine( request, @@ -6819,6 +6888,7 @@ def test_get_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_engines_rest_bad_request(request_type=engine_service.ListEnginesRequest): @@ -6901,10 +6971,13 @@ def test_list_engines_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_list_engines" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_list_engines_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_list_engines" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.ListEnginesRequest.pb( engine_service.ListEnginesRequest() ) @@ -6930,6 +7003,7 @@ def test_list_engines_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine_service.ListEnginesResponse() + post_with_metadata.return_value = engine_service.ListEnginesResponse(), metadata client.list_engines( request, @@ -6941,6 +7015,7 @@ def test_list_engines_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_pause_engine_rest_bad_request(request_type=engine_service.PauseEngineRequest): @@ -7037,10 +7112,13 @@ def test_pause_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_pause_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_pause_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_pause_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.PauseEngineRequest.pb( engine_service.PauseEngineRequest() ) @@ -7064,6 +7142,7 @@ def test_pause_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.pause_engine( request, @@ -7075,6 +7154,7 @@ def test_pause_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_resume_engine_rest_bad_request( @@ -7173,10 +7253,13 @@ def test_resume_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EngineServiceRestInterceptor, "post_resume_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_resume_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_resume_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.ResumeEngineRequest.pb( engine_service.ResumeEngineRequest() ) @@ -7200,6 +7283,7 @@ def test_resume_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = engine.Engine() + post_with_metadata.return_value = engine.Engine(), metadata client.resume_engine( request, @@ -7211,6 +7295,7 @@ def test_resume_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_tune_engine_rest_bad_request(request_type=engine_service.TuneEngineRequest): @@ -7293,10 +7378,13 @@ def test_tune_engine_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EngineServiceRestInterceptor, "post_tune_engine" ) as post, mock.patch.object( + transports.EngineServiceRestInterceptor, "post_tune_engine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EngineServiceRestInterceptor, "pre_tune_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = engine_service.TuneEngineRequest.pb( engine_service.TuneEngineRequest() ) @@ -7320,6 +7408,7 @@ def test_tune_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.tune_engine( request, @@ -7331,6 +7420,7 @@ def test_tune_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py index 1d2a7ae21374..e93b9fb8fa00 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_evaluation_service.py @@ -78,6 +78,13 @@ from google.cloud.discoveryengine_v1beta.types import common from google.cloud.discoveryengine_v1beta.types import evaluation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -345,6 +352,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EvaluationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EvaluationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4188,10 +4238,13 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_get_evaluation" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, "post_get_evaluation_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_get_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.GetEvaluationRequest.pb( evaluation_service.GetEvaluationRequest() ) @@ -4215,6 +4268,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation.Evaluation() + post_with_metadata.return_value = evaluation.Evaluation(), metadata client.get_evaluation( request, @@ -4226,6 +4280,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_evaluations_rest_bad_request( @@ -4310,10 +4365,14 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_list_evaluations" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_list_evaluations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_list_evaluations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.ListEvaluationsRequest.pb( evaluation_service.ListEvaluationsRequest() ) @@ -4339,6 +4398,10 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation_service.ListEvaluationsResponse() + post_with_metadata.return_value = ( + evaluation_service.ListEvaluationsResponse(), + metadata, + ) client.list_evaluations( request, @@ -4350,6 +4413,7 @@ def test_list_evaluations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_evaluation_rest_bad_request( @@ -4660,10 +4724,14 @@ def test_create_evaluation_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_create_evaluation" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_create_evaluation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_create_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.CreateEvaluationRequest.pb( evaluation_service.CreateEvaluationRequest() ) @@ -4687,6 +4755,7 @@ def test_create_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_evaluation( request, @@ -4698,6 +4767,7 @@ def test_create_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_evaluation_results_rest_bad_request( @@ -4786,10 +4856,14 @@ def test_list_evaluation_results_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EvaluationServiceRestInterceptor, "post_list_evaluation_results" ) as post, mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_list_evaluation_results_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EvaluationServiceRestInterceptor, "pre_list_evaluation_results" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = evaluation_service.ListEvaluationResultsRequest.pb( evaluation_service.ListEvaluationResultsRequest() ) @@ -4815,6 +4889,10 @@ def test_list_evaluation_results_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation_service.ListEvaluationResultsResponse() + post_with_metadata.return_value = ( + evaluation_service.ListEvaluationResultsResponse(), + metadata, + ) client.list_evaluation_results( request, @@ -4826,6 +4904,7 @@ def test_list_evaluation_results_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py index c2dd3c928427..c77ee91fdd33 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_grounded_generation_service.py @@ -64,6 +64,13 @@ grounding, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GroundedGenerationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GroundedGenerationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2396,11 +2446,15 @@ def test_generate_grounded_content_rest_interceptors(null_interceptor): transports.GroundedGenerationServiceRestInterceptor, "post_generate_grounded_content", ) as post, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, + "post_generate_grounded_content_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "pre_generate_grounded_content", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = grounded_generation_service.GenerateGroundedContentRequest.pb( grounded_generation_service.GenerateGroundedContentRequest() ) @@ -2430,6 +2484,10 @@ def test_generate_grounded_content_rest_interceptors(null_interceptor): post.return_value = ( grounded_generation_service.GenerateGroundedContentResponse() ) + post_with_metadata.return_value = ( + grounded_generation_service.GenerateGroundedContentResponse(), + metadata, + ) client.generate_grounded_content( request, @@ -2441,6 +2499,7 @@ def test_generate_grounded_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_check_grounding_rest_bad_request( @@ -2531,10 +2590,14 @@ def test_check_grounding_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "post_check_grounding" ) as post, mock.patch.object( + transports.GroundedGenerationServiceRestInterceptor, + "post_check_grounding_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GroundedGenerationServiceRestInterceptor, "pre_check_grounding" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = grounded_generation_service.CheckGroundingRequest.pb( grounded_generation_service.CheckGroundingRequest() ) @@ -2560,6 +2623,10 @@ def test_check_grounding_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = grounded_generation_service.CheckGroundingResponse() + post_with_metadata.return_value = ( + grounded_generation_service.CheckGroundingResponse(), + metadata, + ) client.check_grounding( request, @@ -2571,6 +2638,7 @@ def test_check_grounding_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py index 279035c4b685..d0960ca28352 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_project_service.py @@ -70,6 +70,13 @@ ) from google.cloud.discoveryengine_v1beta.types import project, project_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -328,6 +335,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ProjectServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ProjectServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1866,10 +1916,13 @@ def test_provision_project_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ProjectServiceRestInterceptor, "post_provision_project" ) as post, mock.patch.object( + transports.ProjectServiceRestInterceptor, "post_provision_project_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ProjectServiceRestInterceptor, "pre_provision_project" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = project_service.ProvisionProjectRequest.pb( project_service.ProvisionProjectRequest() ) @@ -1893,6 +1946,7 @@ def test_provision_project_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.provision_project( request, @@ -1904,6 +1958,7 @@ def test_provision_project_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py index 8f587ba6b6a0..36752e2d20b4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_rank_service.py @@ -61,6 +61,13 @@ ) from google.cloud.discoveryengine_v1beta.types import rank_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -300,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RankServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RankServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1641,10 +1691,13 @@ def test_rank_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RankServiceRestInterceptor, "post_rank" ) as post, mock.patch.object( + transports.RankServiceRestInterceptor, "post_rank_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RankServiceRestInterceptor, "pre_rank" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = rank_service.RankRequest.pb(rank_service.RankRequest()) transcode.return_value = { "method": "post", @@ -1666,6 +1719,7 @@ def test_rank_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = rank_service.RankResponse() + post_with_metadata.return_value = rank_service.RankResponse(), metadata client.rank( request, @@ -1677,6 +1731,7 @@ def test_rank_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py index 8b96cb441be8..31346677c6a4 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_recommendation_service.py @@ -68,6 +68,13 @@ user_event, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RecommendationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RecommendationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1776,10 +1826,13 @@ def test_recommend_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.RecommendationServiceRestInterceptor, "post_recommend" ) as post, mock.patch.object( + transports.RecommendationServiceRestInterceptor, "post_recommend_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.RecommendationServiceRestInterceptor, "pre_recommend" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = recommendation_service.RecommendRequest.pb( recommendation_service.RecommendRequest() ) @@ -1805,6 +1858,10 @@ def test_recommend_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = recommendation_service.RecommendResponse() + post_with_metadata.return_value = ( + recommendation_service.RecommendResponse(), + metadata, + ) client.recommend( request, @@ -1816,6 +1873,7 @@ def test_recommend_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py index 65dc921f35c6..0f54db70fafd 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_service.py @@ -77,6 +77,13 @@ from google.cloud.discoveryengine_v1beta.types import sample_query from google.cloud.discoveryengine_v1beta.types import sample_query_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -348,6 +355,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SampleQueryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SampleQueryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4994,10 +5044,14 @@ def test_get_sample_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_get_sample_query" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_get_sample_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_get_sample_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.GetSampleQueryRequest.pb( sample_query_service.GetSampleQueryRequest() ) @@ -5021,6 +5075,7 @@ def test_get_sample_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query.SampleQuery() + post_with_metadata.return_value = sample_query.SampleQuery(), metadata client.get_sample_query( request, @@ -5032,6 +5087,7 @@ def test_get_sample_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sample_queries_rest_bad_request( @@ -5120,10 +5176,14 @@ def test_list_sample_queries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_list_sample_queries" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_list_sample_queries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_list_sample_queries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.ListSampleQueriesRequest.pb( sample_query_service.ListSampleQueriesRequest() ) @@ -5149,6 +5209,10 @@ def test_list_sample_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query_service.ListSampleQueriesResponse() + post_with_metadata.return_value = ( + sample_query_service.ListSampleQueriesResponse(), + metadata, + ) client.list_sample_queries( request, @@ -5160,6 +5224,7 @@ def test_list_sample_queries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_sample_query_rest_bad_request( @@ -5327,10 +5392,14 @@ def test_create_sample_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_create_sample_query" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_create_sample_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_create_sample_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.CreateSampleQueryRequest.pb( sample_query_service.CreateSampleQueryRequest() ) @@ -5356,6 +5425,7 @@ def test_create_sample_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query.SampleQuery() + post_with_metadata.return_value = gcd_sample_query.SampleQuery(), metadata client.create_sample_query( request, @@ -5367,6 +5437,7 @@ def test_create_sample_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_sample_query_rest_bad_request( @@ -5538,10 +5609,14 @@ def test_update_sample_query_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_update_sample_query" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_update_sample_query_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_update_sample_query" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_service.UpdateSampleQueryRequest.pb( sample_query_service.UpdateSampleQueryRequest() ) @@ -5567,6 +5642,7 @@ def test_update_sample_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query.SampleQuery() + post_with_metadata.return_value = gcd_sample_query.SampleQuery(), metadata client.update_sample_query( request, @@ -5578,6 +5654,7 @@ def test_update_sample_query_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sample_query_rest_bad_request( @@ -5775,10 +5852,14 @@ def test_import_sample_queries_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SampleQueryServiceRestInterceptor, "post_import_sample_queries" ) as post, mock.patch.object( + transports.SampleQueryServiceRestInterceptor, + "post_import_sample_queries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQueryServiceRestInterceptor, "pre_import_sample_queries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportSampleQueriesRequest.pb( import_config.ImportSampleQueriesRequest() ) @@ -5802,6 +5883,7 @@ def test_import_sample_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_sample_queries( request, @@ -5813,6 +5895,7 @@ def test_import_sample_queries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py index 0fbcaa71b50b..aab62fe7fb47 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_sample_query_set_service.py @@ -68,6 +68,13 @@ from google.cloud.discoveryengine_v1beta.types import sample_query_set from google.cloud.discoveryengine_v1beta.types import sample_query_set_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +350,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SampleQuerySetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SampleQuerySetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4598,10 +4648,14 @@ def test_get_sample_query_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_get_sample_query_set" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_get_sample_query_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_get_sample_query_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.GetSampleQuerySetRequest.pb( sample_query_set_service.GetSampleQuerySetRequest() ) @@ -4627,6 +4681,7 @@ def test_get_sample_query_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query_set.SampleQuerySet() + post_with_metadata.return_value = sample_query_set.SampleQuerySet(), metadata client.get_sample_query_set( request, @@ -4638,6 +4693,7 @@ def test_get_sample_query_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sample_query_sets_rest_bad_request( @@ -4724,10 +4780,14 @@ def test_list_sample_query_sets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_list_sample_query_sets" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_list_sample_query_sets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_list_sample_query_sets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.ListSampleQuerySetsRequest.pb( sample_query_set_service.ListSampleQuerySetsRequest() ) @@ -4753,6 +4813,10 @@ def test_list_sample_query_sets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = sample_query_set_service.ListSampleQuerySetsResponse() + post_with_metadata.return_value = ( + sample_query_set_service.ListSampleQuerySetsResponse(), + metadata, + ) client.list_sample_query_sets( request, @@ -4764,6 +4828,7 @@ def test_list_sample_query_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_sample_query_set_rest_bad_request( @@ -4927,10 +4992,14 @@ def test_create_sample_query_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_create_sample_query_set" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_create_sample_query_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_create_sample_query_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.CreateSampleQuerySetRequest.pb( sample_query_set_service.CreateSampleQuerySetRequest() ) @@ -4956,6 +5025,10 @@ def test_create_sample_query_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query_set.SampleQuerySet() + post_with_metadata.return_value = ( + gcd_sample_query_set.SampleQuerySet(), + metadata, + ) client.create_sample_query_set( request, @@ -4967,6 +5040,7 @@ def test_create_sample_query_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_sample_query_set_rest_bad_request( @@ -5138,10 +5212,14 @@ def test_update_sample_query_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "post_update_sample_query_set" ) as post, mock.patch.object( + transports.SampleQuerySetServiceRestInterceptor, + "post_update_sample_query_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SampleQuerySetServiceRestInterceptor, "pre_update_sample_query_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = sample_query_set_service.UpdateSampleQuerySetRequest.pb( sample_query_set_service.UpdateSampleQuerySetRequest() ) @@ -5167,6 +5245,10 @@ def test_update_sample_query_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_sample_query_set.SampleQuerySet() + post_with_metadata.return_value = ( + gcd_sample_query_set.SampleQuerySet(), + metadata, + ) client.update_sample_query_set( request, @@ -5178,6 +5260,7 @@ def test_update_sample_query_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sample_query_set_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py index 23af64632394..15ed039ae838 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_schema_service.py @@ -75,6 +75,13 @@ from google.cloud.discoveryengine_v1beta.types import schema as gcd_schema from google.cloud.discoveryengine_v1beta.types import schema_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4279,10 +4329,13 @@ def test_get_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_get_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_get_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.GetSchemaRequest.pb( schema_service.GetSchemaRequest() ) @@ -4306,6 +4359,7 @@ def test_get_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata client.get_schema( request, @@ -4317,6 +4371,7 @@ def test_get_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_schemas_rest_bad_request(request_type=schema_service.ListSchemasRequest): @@ -4399,10 +4454,13 @@ def test_list_schemas_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_list_schemas" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_list_schemas" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.ListSchemasRequest.pb( schema_service.ListSchemasRequest() ) @@ -4428,6 +4486,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema_service.ListSchemasResponse() + post_with_metadata.return_value = schema_service.ListSchemasResponse(), metadata client.list_schemas( request, @@ -4439,6 +4498,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_schema_rest_bad_request( @@ -4591,10 +4651,13 @@ def test_create_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_create_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_create_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.CreateSchemaRequest.pb( schema_service.CreateSchemaRequest() ) @@ -4618,6 +4681,7 @@ def test_create_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_schema( request, @@ -4629,6 +4693,7 @@ def test_create_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_schema_rest_bad_request( @@ -4789,10 +4854,13 @@ def test_update_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_update_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_update_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_update_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.UpdateSchemaRequest.pb( schema_service.UpdateSchemaRequest() ) @@ -4816,6 +4884,7 @@ def test_update_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_schema( request, @@ -4827,6 +4896,7 @@ def test_update_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_schema_rest_bad_request( @@ -4911,10 +4981,13 @@ def test_delete_schema_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SchemaServiceRestInterceptor, "post_delete_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_delete_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema_service.DeleteSchemaRequest.pb( schema_service.DeleteSchemaRequest() ) @@ -4938,6 +5011,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_schema( request, @@ -4949,6 +5023,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py index 20856a1cd905..91e48c5c6b3e 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_service.py @@ -63,6 +63,13 @@ ) from google.cloud.discoveryengine_v1beta.types import common, search_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SearchServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2712,10 +2762,13 @@ def test_search_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchServiceRestInterceptor, "post_search" ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SearchServiceRestInterceptor, "pre_search" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) transcode.return_value = { "method": "post", @@ -2739,6 +2792,7 @@ def test_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_service.SearchResponse() + post_with_metadata.return_value = search_service.SearchResponse(), metadata client.search( request, @@ -2750,6 +2804,7 @@ def test_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_lite_rest_bad_request(request_type=search_service.SearchRequest): @@ -2846,10 +2901,13 @@ def test_search_lite_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchServiceRestInterceptor, "post_search_lite" ) as post, mock.patch.object( + transports.SearchServiceRestInterceptor, "post_search_lite_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SearchServiceRestInterceptor, "pre_search_lite" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_service.SearchRequest.pb(search_service.SearchRequest()) transcode.return_value = { "method": "post", @@ -2873,6 +2931,7 @@ def test_search_lite_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_service.SearchResponse() + post_with_metadata.return_value = search_service.SearchResponse(), metadata client.search_lite( request, @@ -2884,6 +2943,7 @@ def test_search_lite_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py index 801bfbda6422..7d8522748933 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_search_tuning_service.py @@ -74,6 +74,13 @@ search_tuning_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -347,6 +354,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SearchTuningServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SearchTuningServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2209,10 +2259,14 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SearchTuningServiceRestInterceptor, "post_train_custom_model" ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, + "post_train_custom_model_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "pre_train_custom_model" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_tuning_service.TrainCustomModelRequest.pb( search_tuning_service.TrainCustomModelRequest() ) @@ -2236,6 +2290,7 @@ def test_train_custom_model_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_custom_model( request, @@ -2247,6 +2302,7 @@ def test_train_custom_model_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_custom_models_rest_bad_request( @@ -2332,10 +2388,14 @@ def test_list_custom_models_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "post_list_custom_models" ) as post, mock.patch.object( + transports.SearchTuningServiceRestInterceptor, + "post_list_custom_models_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SearchTuningServiceRestInterceptor, "pre_list_custom_models" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = search_tuning_service.ListCustomModelsRequest.pb( search_tuning_service.ListCustomModelsRequest() ) @@ -2361,6 +2421,10 @@ def test_list_custom_models_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = search_tuning_service.ListCustomModelsResponse() + post_with_metadata.return_value = ( + search_tuning_service.ListCustomModelsResponse(), + metadata, + ) client.list_custom_models( request, @@ -2372,6 +2436,7 @@ def test_list_custom_models_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py index 8d7e0b3863ff..ca255027b712 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_serving_config_service.py @@ -69,6 +69,13 @@ from google.cloud.discoveryengine_v1beta.types import serving_config from google.cloud.discoveryengine_v1beta.types import serving_config_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ServingConfigServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ServingConfigServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3714,10 +3764,14 @@ def test_update_serving_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "post_update_serving_config" ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, + "post_update_serving_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "pre_update_serving_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = serving_config_service.UpdateServingConfigRequest.pb( serving_config_service.UpdateServingConfigRequest() ) @@ -3743,6 +3797,7 @@ def test_update_serving_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_serving_config.ServingConfig() + post_with_metadata.return_value = gcd_serving_config.ServingConfig(), metadata client.update_serving_config( request, @@ -3754,6 +3809,7 @@ def test_update_serving_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_serving_config_rest_bad_request( @@ -3868,10 +3924,14 @@ def test_get_serving_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "post_get_serving_config" ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, + "post_get_serving_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "pre_get_serving_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = serving_config_service.GetServingConfigRequest.pb( serving_config_service.GetServingConfigRequest() ) @@ -3897,6 +3957,7 @@ def test_get_serving_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = serving_config.ServingConfig() + post_with_metadata.return_value = serving_config.ServingConfig(), metadata client.get_serving_config( request, @@ -3908,6 +3969,7 @@ def test_get_serving_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_serving_configs_rest_bad_request( @@ -3994,10 +4056,14 @@ def test_list_serving_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "post_list_serving_configs" ) as post, mock.patch.object( + transports.ServingConfigServiceRestInterceptor, + "post_list_serving_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ServingConfigServiceRestInterceptor, "pre_list_serving_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = serving_config_service.ListServingConfigsRequest.pb( serving_config_service.ListServingConfigsRequest() ) @@ -4023,6 +4089,10 @@ def test_list_serving_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = serving_config_service.ListServingConfigsResponse() + post_with_metadata.return_value = ( + serving_config_service.ListServingConfigsResponse(), + metadata, + ) client.list_serving_configs( request, @@ -4034,6 +4104,7 @@ def test_list_serving_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py index 3ccf42fdac52..15d74781cda2 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_site_search_engine_service.py @@ -76,6 +76,13 @@ site_search_engine_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SiteSearchEngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SiteSearchEngineServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -9744,10 +9794,14 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_get_site_search_engine" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_site_search_engine_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_site_search_engine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetSiteSearchEngineRequest.pb( site_search_engine_service.GetSiteSearchEngineRequest() ) @@ -9773,6 +9827,10 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine.SiteSearchEngine() + post_with_metadata.return_value = ( + site_search_engine.SiteSearchEngine(), + metadata, + ) client.get_site_search_engine( request, @@ -9784,6 +9842,7 @@ def test_get_site_search_engine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_target_site_rest_bad_request( @@ -9952,10 +10011,14 @@ def test_create_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_create_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_create_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_create_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.CreateTargetSiteRequest.pb( site_search_engine_service.CreateTargetSiteRequest() ) @@ -9979,6 +10042,7 @@ def test_create_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_target_site( request, @@ -9990,6 +10054,7 @@ def test_create_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_create_target_sites_rest_bad_request( @@ -10075,11 +10140,15 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_batch_create_target_sites", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_create_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_batch_create_target_sites", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.BatchCreateTargetSitesRequest.pb( site_search_engine_service.BatchCreateTargetSitesRequest() ) @@ -10103,6 +10172,7 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_create_target_sites( request, @@ -10114,6 +10184,7 @@ def test_batch_create_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_target_site_rest_bad_request( @@ -10216,10 +10287,14 @@ def test_get_target_site_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_get_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_get_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_get_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.GetTargetSiteRequest.pb( site_search_engine_service.GetTargetSiteRequest() ) @@ -10245,6 +10320,7 @@ def test_get_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine.TargetSite() + post_with_metadata.return_value = site_search_engine.TargetSite(), metadata client.get_target_site( request, @@ -10256,6 +10332,7 @@ def test_get_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_target_site_rest_bad_request( @@ -10428,10 +10505,14 @@ def test_update_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_update_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_update_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_update_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.UpdateTargetSiteRequest.pb( site_search_engine_service.UpdateTargetSiteRequest() ) @@ -10455,6 +10536,7 @@ def test_update_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_target_site( request, @@ -10466,6 +10548,7 @@ def test_update_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_target_site_rest_bad_request( @@ -10550,10 +10633,14 @@ def test_delete_target_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_delete_target_site" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_delete_target_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_target_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DeleteTargetSiteRequest.pb( site_search_engine_service.DeleteTargetSiteRequest() ) @@ -10577,6 +10664,7 @@ def test_delete_target_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_target_site( request, @@ -10588,6 +10676,7 @@ def test_delete_target_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_target_sites_rest_bad_request( @@ -10680,10 +10769,14 @@ def test_list_target_sites_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_list_target_sites" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_list_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_list_target_sites" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.ListTargetSitesRequest.pb( site_search_engine_service.ListTargetSitesRequest() ) @@ -10709,6 +10802,10 @@ def test_list_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine_service.ListTargetSitesResponse() + post_with_metadata.return_value = ( + site_search_engine_service.ListTargetSitesResponse(), + metadata, + ) client.list_target_sites( request, @@ -10720,6 +10817,7 @@ def test_list_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_sitemap_rest_bad_request( @@ -10876,10 +10974,14 @@ def test_create_sitemap_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_create_sitemap" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_create_sitemap_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_create_sitemap" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.CreateSitemapRequest.pb( site_search_engine_service.CreateSitemapRequest() ) @@ -10903,6 +11005,7 @@ def test_create_sitemap_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_sitemap( request, @@ -10914,6 +11017,7 @@ def test_create_sitemap_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_sitemap_rest_bad_request( @@ -10998,10 +11102,14 @@ def test_delete_sitemap_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_delete_sitemap" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_delete_sitemap_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_delete_sitemap" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DeleteSitemapRequest.pb( site_search_engine_service.DeleteSitemapRequest() ) @@ -11025,6 +11133,7 @@ def test_delete_sitemap_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_sitemap( request, @@ -11036,6 +11145,7 @@ def test_delete_sitemap_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_sitemaps_rest_bad_request( @@ -11121,10 +11231,14 @@ def test_fetch_sitemaps_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_fetch_sitemaps" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_sitemaps_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_fetch_sitemaps" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.FetchSitemapsRequest.pb( site_search_engine_service.FetchSitemapsRequest() ) @@ -11150,6 +11264,10 @@ def test_fetch_sitemaps_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = site_search_engine_service.FetchSitemapsResponse() + post_with_metadata.return_value = ( + site_search_engine_service.FetchSitemapsResponse(), + metadata, + ) client.fetch_sitemaps( request, @@ -11161,6 +11279,7 @@ def test_fetch_sitemaps_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_advanced_site_search_rest_bad_request( @@ -11246,11 +11365,15 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_enable_advanced_site_search", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_enable_advanced_site_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_enable_advanced_site_search", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.EnableAdvancedSiteSearchRequest.pb( site_search_engine_service.EnableAdvancedSiteSearchRequest() ) @@ -11274,6 +11397,7 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.enable_advanced_site_search( request, @@ -11285,6 +11409,7 @@ def test_enable_advanced_site_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_advanced_site_search_rest_bad_request( @@ -11370,11 +11495,15 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_disable_advanced_site_search", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_disable_advanced_site_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_disable_advanced_site_search", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.DisableAdvancedSiteSearchRequest.pb( site_search_engine_service.DisableAdvancedSiteSearchRequest() ) @@ -11398,6 +11527,7 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.disable_advanced_site_search( request, @@ -11409,6 +11539,7 @@ def test_disable_advanced_site_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_recrawl_uris_rest_bad_request( @@ -11493,10 +11624,14 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ), mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "post_recrawl_uris" ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_recrawl_uris_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_recrawl_uris" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.RecrawlUrisRequest.pb( site_search_engine_service.RecrawlUrisRequest() ) @@ -11520,6 +11655,7 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.recrawl_uris( request, @@ -11531,6 +11667,7 @@ def test_recrawl_uris_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_verify_target_sites_rest_bad_request( @@ -11616,11 +11753,15 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_batch_verify_target_sites", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_batch_verify_target_sites_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_batch_verify_target_sites", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.BatchVerifyTargetSitesRequest.pb( site_search_engine_service.BatchVerifyTargetSitesRequest() ) @@ -11644,6 +11785,7 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_verify_target_sites( request, @@ -11655,6 +11797,7 @@ def test_batch_verify_target_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_domain_verification_status_rest_bad_request( @@ -11750,11 +11893,15 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): transports.SiteSearchEngineServiceRestInterceptor, "post_fetch_domain_verification_status", ) as post, mock.patch.object( + transports.SiteSearchEngineServiceRestInterceptor, + "post_fetch_domain_verification_status_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SiteSearchEngineServiceRestInterceptor, "pre_fetch_domain_verification_status", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = site_search_engine_service.FetchDomainVerificationStatusRequest.pb( site_search_engine_service.FetchDomainVerificationStatusRequest() ) @@ -11784,6 +11931,10 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): post.return_value = ( site_search_engine_service.FetchDomainVerificationStatusResponse() ) + post_with_metadata.return_value = ( + site_search_engine_service.FetchDomainVerificationStatusResponse(), + metadata, + ) client.fetch_domain_verification_status( request, @@ -11795,6 +11946,7 @@ def test_fetch_domain_verification_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py index 0031c6081dd4..e4f911dfff5b 100644 --- a/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py +++ b/packages/google-cloud-discoveryengine/tests/unit/gapic/discoveryengine_v1beta/test_user_event_service.py @@ -81,6 +81,13 @@ user_event_service, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -340,6 +347,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = UserEventServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = UserEventServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3289,10 +3339,14 @@ def test_write_user_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserEventServiceRestInterceptor, "post_write_user_event" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_write_user_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_write_user_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_event_service.WriteUserEventRequest.pb( user_event_service.WriteUserEventRequest() ) @@ -3316,6 +3370,7 @@ def test_write_user_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = user_event.UserEvent() + post_with_metadata.return_value = user_event.UserEvent(), metadata client.write_user_event( request, @@ -3327,6 +3382,7 @@ def test_write_user_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_collect_user_event_rest_bad_request( @@ -3410,10 +3466,14 @@ def test_collect_user_event_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.UserEventServiceRestInterceptor, "post_collect_user_event" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_collect_user_event_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_collect_user_event" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = user_event_service.CollectUserEventRequest.pb( user_event_service.CollectUserEventRequest() ) @@ -3437,6 +3497,7 @@ def test_collect_user_event_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = httpbody_pb2.HttpBody() + post_with_metadata.return_value = httpbody_pb2.HttpBody(), metadata client.collect_user_event( request, @@ -3448,6 +3509,7 @@ def test_collect_user_event_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_purge_user_events_rest_bad_request( @@ -3528,10 +3590,14 @@ def test_purge_user_events_rest_interceptors(null_interceptor): ), mock.patch.object( transports.UserEventServiceRestInterceptor, "post_purge_user_events" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_purge_user_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_purge_user_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = purge_config.PurgeUserEventsRequest.pb( purge_config.PurgeUserEventsRequest() ) @@ -3555,6 +3621,7 @@ def test_purge_user_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.purge_user_events( request, @@ -3566,6 +3633,7 @@ def test_purge_user_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_user_events_rest_bad_request( @@ -3646,10 +3714,14 @@ def test_import_user_events_rest_interceptors(null_interceptor): ), mock.patch.object( transports.UserEventServiceRestInterceptor, "post_import_user_events" ) as post, mock.patch.object( + transports.UserEventServiceRestInterceptor, + "post_import_user_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.UserEventServiceRestInterceptor, "pre_import_user_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = import_config.ImportUserEventsRequest.pb( import_config.ImportUserEventsRequest() ) @@ -3673,6 +3745,7 @@ def test_import_user_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_user_events( request, @@ -3684,6 +3757,7 @@ def test_import_user_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-dlp/CHANGELOG.md b/packages/google-cloud-dlp/CHANGELOG.md index 281bf3859673..bfd69e3099f1 100644 --- a/packages/google-cloud-dlp/CHANGELOG.md +++ b/packages/google-cloud-dlp/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-dlp/#history +## [3.27.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.26.0...google-cloud-dlp-v3.27.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [3.26.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dlp-v3.25.1...google-cloud-dlp-v3.26.0) (2024-12-12) diff --git a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py index 1fe6836b543e..3d01951c9fa8 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.0" # {x-release-please-version} +__version__ = "3.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py index 1fe6836b543e..3d01951c9fa8 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.26.0" # {x-release-please-version} +__version__ = "3.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py index cc1f3fa4f386..f647d8cd0d60 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -739,6 +741,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py index ccb233cc7ffb..4029cdc5eb90 100644 --- a/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py +++ b/packages/google-cloud-dlp/google/cloud/dlp_v2/services/dlp_service/transports/rest.py @@ -487,12 +487,33 @@ def pre_activate_job_trigger( def post_activate_job_trigger(self, response: dlp.DlpJob) -> dlp.DlpJob: """Post-rpc interceptor for activate_job_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_activate_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_activate_job_trigger` interceptor runs + before the `post_activate_job_trigger_with_metadata` interceptor. """ return response + def post_activate_job_trigger_with_metadata( + self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for activate_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_activate_job_trigger_with_metadata` + interceptor in new development instead of the `post_activate_job_trigger` interceptor. + When both interceptors are used, this `post_activate_job_trigger_with_metadata` interceptor runs after the + `post_activate_job_trigger` interceptor. The (possibly modified) response returned by + `post_activate_job_trigger` will be passed to + `post_activate_job_trigger_with_metadata`. + """ + return response, metadata + def pre_cancel_dlp_job( self, request: dlp.CancelDlpJobRequest, @@ -520,12 +541,35 @@ def pre_create_connection( def post_create_connection(self, response: dlp.Connection) -> dlp.Connection: """Post-rpc interceptor for create_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_connection` interceptor runs + before the `post_create_connection_with_metadata` interceptor. """ return response + def post_create_connection_with_metadata( + self, + response: dlp.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_connection_with_metadata` + interceptor in new development instead of the `post_create_connection` interceptor. + When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the + `post_create_connection` interceptor. The (possibly modified) response returned by + `post_create_connection` will be passed to + `post_create_connection_with_metadata`. + """ + return response, metadata + def pre_create_deidentify_template( self, request: dlp.CreateDeidentifyTemplateRequest, @@ -545,12 +589,35 @@ def post_create_deidentify_template( ) -> dlp.DeidentifyTemplate: """Post-rpc interceptor for create_deidentify_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_deidentify_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_deidentify_template` interceptor runs + before the `post_create_deidentify_template_with_metadata` interceptor. """ return response + def post_create_deidentify_template_with_metadata( + self, + response: dlp.DeidentifyTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deidentify_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_deidentify_template_with_metadata` + interceptor in new development instead of the `post_create_deidentify_template` interceptor. + When both interceptors are used, this `post_create_deidentify_template_with_metadata` interceptor runs after the + `post_create_deidentify_template` interceptor. The (possibly modified) response returned by + `post_create_deidentify_template` will be passed to + `post_create_deidentify_template_with_metadata`. + """ + return response, metadata + def pre_create_discovery_config( self, request: dlp.CreateDiscoveryConfigRequest, @@ -570,12 +637,35 @@ def post_create_discovery_config( ) -> dlp.DiscoveryConfig: """Post-rpc interceptor for create_discovery_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_discovery_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_discovery_config` interceptor runs + before the `post_create_discovery_config_with_metadata` interceptor. """ return response + def post_create_discovery_config_with_metadata( + self, + response: dlp.DiscoveryConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_discovery_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_discovery_config_with_metadata` + interceptor in new development instead of the `post_create_discovery_config` interceptor. + When both interceptors are used, this `post_create_discovery_config_with_metadata` interceptor runs after the + `post_create_discovery_config` interceptor. The (possibly modified) response returned by + `post_create_discovery_config` will be passed to + `post_create_discovery_config_with_metadata`. + """ + return response, metadata + def pre_create_dlp_job( self, request: dlp.CreateDlpJobRequest, @@ -591,12 +681,33 @@ def pre_create_dlp_job( def post_create_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: """Post-rpc interceptor for create_dlp_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_dlp_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_dlp_job` interceptor runs + before the `post_create_dlp_job_with_metadata` interceptor. """ return response + def post_create_dlp_job_with_metadata( + self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_dlp_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_dlp_job_with_metadata` + interceptor in new development instead of the `post_create_dlp_job` interceptor. + When both interceptors are used, this `post_create_dlp_job_with_metadata` interceptor runs after the + `post_create_dlp_job` interceptor. The (possibly modified) response returned by + `post_create_dlp_job` will be passed to + `post_create_dlp_job_with_metadata`. + """ + return response, metadata + def pre_create_inspect_template( self, request: dlp.CreateInspectTemplateRequest, @@ -616,12 +727,35 @@ def post_create_inspect_template( ) -> dlp.InspectTemplate: """Post-rpc interceptor for create_inspect_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_inspect_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_inspect_template` interceptor runs + before the `post_create_inspect_template_with_metadata` interceptor. """ return response + def post_create_inspect_template_with_metadata( + self, + response: dlp.InspectTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_inspect_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_inspect_template_with_metadata` + interceptor in new development instead of the `post_create_inspect_template` interceptor. + When both interceptors are used, this `post_create_inspect_template_with_metadata` interceptor runs after the + `post_create_inspect_template` interceptor. The (possibly modified) response returned by + `post_create_inspect_template` will be passed to + `post_create_inspect_template_with_metadata`. + """ + return response, metadata + def pre_create_job_trigger( self, request: dlp.CreateJobTriggerRequest, @@ -637,12 +771,35 @@ def pre_create_job_trigger( def post_create_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: """Post-rpc interceptor for create_job_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_job_trigger` interceptor runs + before the `post_create_job_trigger_with_metadata` interceptor. """ return response + def post_create_job_trigger_with_metadata( + self, + response: dlp.JobTrigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_job_trigger_with_metadata` + interceptor in new development instead of the `post_create_job_trigger` interceptor. + When both interceptors are used, this `post_create_job_trigger_with_metadata` interceptor runs after the + `post_create_job_trigger` interceptor. The (possibly modified) response returned by + `post_create_job_trigger` will be passed to + `post_create_job_trigger_with_metadata`. + """ + return response, metadata + def pre_create_stored_info_type( self, request: dlp.CreateStoredInfoTypeRequest, @@ -662,12 +819,35 @@ def post_create_stored_info_type( ) -> dlp.StoredInfoType: """Post-rpc interceptor for create_stored_info_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_stored_info_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_create_stored_info_type` interceptor runs + before the `post_create_stored_info_type_with_metadata` interceptor. """ return response + def post_create_stored_info_type_with_metadata( + self, + response: dlp.StoredInfoType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_stored_info_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_create_stored_info_type_with_metadata` + interceptor in new development instead of the `post_create_stored_info_type` interceptor. + When both interceptors are used, this `post_create_stored_info_type_with_metadata` interceptor runs after the + `post_create_stored_info_type` interceptor. The (possibly modified) response returned by + `post_create_stored_info_type` will be passed to + `post_create_stored_info_type_with_metadata`. + """ + return response, metadata + def pre_deidentify_content( self, request: dlp.DeidentifyContentRequest, @@ -685,12 +865,35 @@ def post_deidentify_content( ) -> dlp.DeidentifyContentResponse: """Post-rpc interceptor for deidentify_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deidentify_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_deidentify_content` interceptor runs + before the `post_deidentify_content_with_metadata` interceptor. """ return response + def post_deidentify_content_with_metadata( + self, + response: dlp.DeidentifyContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DeidentifyContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deidentify_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_deidentify_content_with_metadata` + interceptor in new development instead of the `post_deidentify_content` interceptor. + When both interceptors are used, this `post_deidentify_content_with_metadata` interceptor runs after the + `post_deidentify_content` interceptor. The (possibly modified) response returned by + `post_deidentify_content` will be passed to + `post_deidentify_content_with_metadata`. + """ + return response, metadata + def pre_delete_connection( self, request: dlp.DeleteConnectionRequest, @@ -842,12 +1045,35 @@ def post_get_column_data_profile( ) -> dlp.ColumnDataProfile: """Post-rpc interceptor for get_column_data_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_column_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_column_data_profile` interceptor runs + before the `post_get_column_data_profile_with_metadata` interceptor. """ return response + def post_get_column_data_profile_with_metadata( + self, + response: dlp.ColumnDataProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ColumnDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_column_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_column_data_profile_with_metadata` + interceptor in new development instead of the `post_get_column_data_profile` interceptor. + When both interceptors are used, this `post_get_column_data_profile_with_metadata` interceptor runs after the + `post_get_column_data_profile` interceptor. The (possibly modified) response returned by + `post_get_column_data_profile` will be passed to + `post_get_column_data_profile_with_metadata`. + """ + return response, metadata + def pre_get_connection( self, request: dlp.GetConnectionRequest, @@ -863,12 +1089,35 @@ def pre_get_connection( def post_get_connection(self, response: dlp.Connection) -> dlp.Connection: """Post-rpc interceptor for get_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_connection` interceptor runs + before the `post_get_connection_with_metadata` interceptor. """ return response + def post_get_connection_with_metadata( + self, + response: dlp.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_connection_with_metadata` + interceptor in new development instead of the `post_get_connection` interceptor. + When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the + `post_get_connection` interceptor. The (possibly modified) response returned by + `post_get_connection` will be passed to + `post_get_connection_with_metadata`. + """ + return response, metadata + def pre_get_deidentify_template( self, request: dlp.GetDeidentifyTemplateRequest, @@ -888,12 +1137,35 @@ def post_get_deidentify_template( ) -> dlp.DeidentifyTemplate: """Post-rpc interceptor for get_deidentify_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deidentify_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_deidentify_template` interceptor runs + before the `post_get_deidentify_template_with_metadata` interceptor. """ return response + def post_get_deidentify_template_with_metadata( + self, + response: dlp.DeidentifyTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deidentify_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_deidentify_template_with_metadata` + interceptor in new development instead of the `post_get_deidentify_template` interceptor. + When both interceptors are used, this `post_get_deidentify_template_with_metadata` interceptor runs after the + `post_get_deidentify_template` interceptor. The (possibly modified) response returned by + `post_get_deidentify_template` will be passed to + `post_get_deidentify_template_with_metadata`. + """ + return response, metadata + def pre_get_discovery_config( self, request: dlp.GetDiscoveryConfigRequest, @@ -911,12 +1183,35 @@ def post_get_discovery_config( ) -> dlp.DiscoveryConfig: """Post-rpc interceptor for get_discovery_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_discovery_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_discovery_config` interceptor runs + before the `post_get_discovery_config_with_metadata` interceptor. """ return response + def post_get_discovery_config_with_metadata( + self, + response: dlp.DiscoveryConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_discovery_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_discovery_config_with_metadata` + interceptor in new development instead of the `post_get_discovery_config` interceptor. + When both interceptors are used, this `post_get_discovery_config_with_metadata` interceptor runs after the + `post_get_discovery_config` interceptor. The (possibly modified) response returned by + `post_get_discovery_config` will be passed to + `post_get_discovery_config_with_metadata`. + """ + return response, metadata + def pre_get_dlp_job( self, request: dlp.GetDlpJobRequest, @@ -932,12 +1227,33 @@ def pre_get_dlp_job( def post_get_dlp_job(self, response: dlp.DlpJob) -> dlp.DlpJob: """Post-rpc interceptor for get_dlp_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_dlp_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_dlp_job` interceptor runs + before the `post_get_dlp_job_with_metadata` interceptor. """ return response + def post_get_dlp_job_with_metadata( + self, response: dlp.DlpJob, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[dlp.DlpJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_dlp_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_dlp_job_with_metadata` + interceptor in new development instead of the `post_get_dlp_job` interceptor. + When both interceptors are used, this `post_get_dlp_job_with_metadata` interceptor runs after the + `post_get_dlp_job` interceptor. The (possibly modified) response returned by + `post_get_dlp_job` will be passed to + `post_get_dlp_job_with_metadata`. + """ + return response, metadata + def pre_get_file_store_data_profile( self, request: dlp.GetFileStoreDataProfileRequest, @@ -957,12 +1273,35 @@ def post_get_file_store_data_profile( ) -> dlp.FileStoreDataProfile: """Post-rpc interceptor for get_file_store_data_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_file_store_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_file_store_data_profile` interceptor runs + before the `post_get_file_store_data_profile_with_metadata` interceptor. """ return response + def post_get_file_store_data_profile_with_metadata( + self, + response: dlp.FileStoreDataProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.FileStoreDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_file_store_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_file_store_data_profile_with_metadata` + interceptor in new development instead of the `post_get_file_store_data_profile` interceptor. + When both interceptors are used, this `post_get_file_store_data_profile_with_metadata` interceptor runs after the + `post_get_file_store_data_profile` interceptor. The (possibly modified) response returned by + `post_get_file_store_data_profile` will be passed to + `post_get_file_store_data_profile_with_metadata`. + """ + return response, metadata + def pre_get_inspect_template( self, request: dlp.GetInspectTemplateRequest, @@ -980,12 +1319,35 @@ def post_get_inspect_template( ) -> dlp.InspectTemplate: """Post-rpc interceptor for get_inspect_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_inspect_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_inspect_template` interceptor runs + before the `post_get_inspect_template_with_metadata` interceptor. """ return response + def post_get_inspect_template_with_metadata( + self, + response: dlp.InspectTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_inspect_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_inspect_template_with_metadata` + interceptor in new development instead of the `post_get_inspect_template` interceptor. + When both interceptors are used, this `post_get_inspect_template_with_metadata` interceptor runs after the + `post_get_inspect_template` interceptor. The (possibly modified) response returned by + `post_get_inspect_template` will be passed to + `post_get_inspect_template_with_metadata`. + """ + return response, metadata + def pre_get_job_trigger( self, request: dlp.GetJobTriggerRequest, @@ -1001,12 +1363,35 @@ def pre_get_job_trigger( def post_get_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: """Post-rpc interceptor for get_job_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_job_trigger` interceptor runs + before the `post_get_job_trigger_with_metadata` interceptor. """ return response + def post_get_job_trigger_with_metadata( + self, + response: dlp.JobTrigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_job_trigger_with_metadata` + interceptor in new development instead of the `post_get_job_trigger` interceptor. + When both interceptors are used, this `post_get_job_trigger_with_metadata` interceptor runs after the + `post_get_job_trigger` interceptor. The (possibly modified) response returned by + `post_get_job_trigger` will be passed to + `post_get_job_trigger_with_metadata`. + """ + return response, metadata + def pre_get_project_data_profile( self, request: dlp.GetProjectDataProfileRequest, @@ -1026,12 +1411,35 @@ def post_get_project_data_profile( ) -> dlp.ProjectDataProfile: """Post-rpc interceptor for get_project_data_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_project_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_project_data_profile` interceptor runs + before the `post_get_project_data_profile_with_metadata` interceptor. """ return response + def post_get_project_data_profile_with_metadata( + self, + response: dlp.ProjectDataProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ProjectDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_project_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_project_data_profile_with_metadata` + interceptor in new development instead of the `post_get_project_data_profile` interceptor. + When both interceptors are used, this `post_get_project_data_profile_with_metadata` interceptor runs after the + `post_get_project_data_profile` interceptor. The (possibly modified) response returned by + `post_get_project_data_profile` will be passed to + `post_get_project_data_profile_with_metadata`. + """ + return response, metadata + def pre_get_stored_info_type( self, request: dlp.GetStoredInfoTypeRequest, @@ -1049,12 +1457,35 @@ def post_get_stored_info_type( ) -> dlp.StoredInfoType: """Post-rpc interceptor for get_stored_info_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_stored_info_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_stored_info_type` interceptor runs + before the `post_get_stored_info_type_with_metadata` interceptor. """ return response + def post_get_stored_info_type_with_metadata( + self, + response: dlp.StoredInfoType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_stored_info_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_stored_info_type_with_metadata` + interceptor in new development instead of the `post_get_stored_info_type` interceptor. + When both interceptors are used, this `post_get_stored_info_type_with_metadata` interceptor runs after the + `post_get_stored_info_type` interceptor. The (possibly modified) response returned by + `post_get_stored_info_type` will be passed to + `post_get_stored_info_type_with_metadata`. + """ + return response, metadata + def pre_get_table_data_profile( self, request: dlp.GetTableDataProfileRequest, @@ -1072,12 +1503,35 @@ def post_get_table_data_profile( ) -> dlp.TableDataProfile: """Post-rpc interceptor for get_table_data_profile - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_table_data_profile_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_get_table_data_profile` interceptor runs + before the `post_get_table_data_profile_with_metadata` interceptor. """ return response + def post_get_table_data_profile_with_metadata( + self, + response: dlp.TableDataProfile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.TableDataProfile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_table_data_profile + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_get_table_data_profile_with_metadata` + interceptor in new development instead of the `post_get_table_data_profile` interceptor. + When both interceptors are used, this `post_get_table_data_profile_with_metadata` interceptor runs after the + `post_get_table_data_profile` interceptor. The (possibly modified) response returned by + `post_get_table_data_profile` will be passed to + `post_get_table_data_profile_with_metadata`. + """ + return response, metadata + def pre_hybrid_inspect_dlp_job( self, request: dlp.HybridInspectDlpJobRequest, @@ -1095,12 +1549,35 @@ def post_hybrid_inspect_dlp_job( ) -> dlp.HybridInspectResponse: """Post-rpc interceptor for hybrid_inspect_dlp_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_hybrid_inspect_dlp_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_hybrid_inspect_dlp_job` interceptor runs + before the `post_hybrid_inspect_dlp_job_with_metadata` interceptor. """ return response + def post_hybrid_inspect_dlp_job_with_metadata( + self, + response: dlp.HybridInspectResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.HybridInspectResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for hybrid_inspect_dlp_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_hybrid_inspect_dlp_job_with_metadata` + interceptor in new development instead of the `post_hybrid_inspect_dlp_job` interceptor. + When both interceptors are used, this `post_hybrid_inspect_dlp_job_with_metadata` interceptor runs after the + `post_hybrid_inspect_dlp_job` interceptor. The (possibly modified) response returned by + `post_hybrid_inspect_dlp_job` will be passed to + `post_hybrid_inspect_dlp_job_with_metadata`. + """ + return response, metadata + def pre_hybrid_inspect_job_trigger( self, request: dlp.HybridInspectJobTriggerRequest, @@ -1120,12 +1597,35 @@ def post_hybrid_inspect_job_trigger( ) -> dlp.HybridInspectResponse: """Post-rpc interceptor for hybrid_inspect_job_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_hybrid_inspect_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_hybrid_inspect_job_trigger` interceptor runs + before the `post_hybrid_inspect_job_trigger_with_metadata` interceptor. """ return response + def post_hybrid_inspect_job_trigger_with_metadata( + self, + response: dlp.HybridInspectResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.HybridInspectResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for hybrid_inspect_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_hybrid_inspect_job_trigger_with_metadata` + interceptor in new development instead of the `post_hybrid_inspect_job_trigger` interceptor. + When both interceptors are used, this `post_hybrid_inspect_job_trigger_with_metadata` interceptor runs after the + `post_hybrid_inspect_job_trigger` interceptor. The (possibly modified) response returned by + `post_hybrid_inspect_job_trigger` will be passed to + `post_hybrid_inspect_job_trigger_with_metadata`. + """ + return response, metadata + def pre_inspect_content( self, request: dlp.InspectContentRequest, @@ -1143,12 +1643,35 @@ def post_inspect_content( ) -> dlp.InspectContentResponse: """Post-rpc interceptor for inspect_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_inspect_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_inspect_content` interceptor runs + before the `post_inspect_content_with_metadata` interceptor. """ return response + def post_inspect_content_with_metadata( + self, + response: dlp.InspectContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.InspectContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for inspect_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_inspect_content_with_metadata` + interceptor in new development instead of the `post_inspect_content` interceptor. + When both interceptors are used, this `post_inspect_content_with_metadata` interceptor runs after the + `post_inspect_content` interceptor. The (possibly modified) response returned by + `post_inspect_content` will be passed to + `post_inspect_content_with_metadata`. + """ + return response, metadata + def pre_list_column_data_profiles( self, request: dlp.ListColumnDataProfilesRequest, @@ -1168,12 +1691,37 @@ def post_list_column_data_profiles( ) -> dlp.ListColumnDataProfilesResponse: """Post-rpc interceptor for list_column_data_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_column_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_column_data_profiles` interceptor runs + before the `post_list_column_data_profiles_with_metadata` interceptor. """ return response + def post_list_column_data_profiles_with_metadata( + self, + response: dlp.ListColumnDataProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListColumnDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_column_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_column_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_column_data_profiles` interceptor. + When both interceptors are used, this `post_list_column_data_profiles_with_metadata` interceptor runs after the + `post_list_column_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_column_data_profiles` will be passed to + `post_list_column_data_profiles_with_metadata`. + """ + return response, metadata + def pre_list_connections( self, request: dlp.ListConnectionsRequest, @@ -1191,12 +1739,35 @@ def post_list_connections( ) -> dlp.ListConnectionsResponse: """Post-rpc interceptor for list_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_connections` interceptor runs + before the `post_list_connections_with_metadata` interceptor. """ return response + def post_list_connections_with_metadata( + self, + response: dlp.ListConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ListConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_connections_with_metadata` + interceptor in new development instead of the `post_list_connections` interceptor. + When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the + `post_list_connections` interceptor. The (possibly modified) response returned by + `post_list_connections` will be passed to + `post_list_connections_with_metadata`. + """ + return response, metadata + def pre_list_deidentify_templates( self, request: dlp.ListDeidentifyTemplatesRequest, @@ -1216,12 +1787,37 @@ def post_list_deidentify_templates( ) -> dlp.ListDeidentifyTemplatesResponse: """Post-rpc interceptor for list_deidentify_templates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deidentify_templates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_deidentify_templates` interceptor runs + before the `post_list_deidentify_templates_with_metadata` interceptor. """ return response + def post_list_deidentify_templates_with_metadata( + self, + response: dlp.ListDeidentifyTemplatesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListDeidentifyTemplatesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deidentify_templates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_deidentify_templates_with_metadata` + interceptor in new development instead of the `post_list_deidentify_templates` interceptor. + When both interceptors are used, this `post_list_deidentify_templates_with_metadata` interceptor runs after the + `post_list_deidentify_templates` interceptor. The (possibly modified) response returned by + `post_list_deidentify_templates` will be passed to + `post_list_deidentify_templates_with_metadata`. + """ + return response, metadata + def pre_list_discovery_configs( self, request: dlp.ListDiscoveryConfigsRequest, @@ -1241,12 +1837,37 @@ def post_list_discovery_configs( ) -> dlp.ListDiscoveryConfigsResponse: """Post-rpc interceptor for list_discovery_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_discovery_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_discovery_configs` interceptor runs + before the `post_list_discovery_configs_with_metadata` interceptor. """ return response + def post_list_discovery_configs_with_metadata( + self, + response: dlp.ListDiscoveryConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListDiscoveryConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_discovery_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_discovery_configs_with_metadata` + interceptor in new development instead of the `post_list_discovery_configs` interceptor. + When both interceptors are used, this `post_list_discovery_configs_with_metadata` interceptor runs after the + `post_list_discovery_configs` interceptor. The (possibly modified) response returned by + `post_list_discovery_configs` will be passed to + `post_list_discovery_configs_with_metadata`. + """ + return response, metadata + def pre_list_dlp_jobs( self, request: dlp.ListDlpJobsRequest, @@ -1264,12 +1885,35 @@ def post_list_dlp_jobs( ) -> dlp.ListDlpJobsResponse: """Post-rpc interceptor for list_dlp_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_dlp_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_dlp_jobs` interceptor runs + before the `post_list_dlp_jobs_with_metadata` interceptor. """ return response + def post_list_dlp_jobs_with_metadata( + self, + response: dlp.ListDlpJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ListDlpJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_dlp_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_dlp_jobs_with_metadata` + interceptor in new development instead of the `post_list_dlp_jobs` interceptor. + When both interceptors are used, this `post_list_dlp_jobs_with_metadata` interceptor runs after the + `post_list_dlp_jobs` interceptor. The (possibly modified) response returned by + `post_list_dlp_jobs` will be passed to + `post_list_dlp_jobs_with_metadata`. + """ + return response, metadata + def pre_list_file_store_data_profiles( self, request: dlp.ListFileStoreDataProfilesRequest, @@ -1289,12 +1933,37 @@ def post_list_file_store_data_profiles( ) -> dlp.ListFileStoreDataProfilesResponse: """Post-rpc interceptor for list_file_store_data_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_file_store_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_file_store_data_profiles` interceptor runs + before the `post_list_file_store_data_profiles_with_metadata` interceptor. """ return response + def post_list_file_store_data_profiles_with_metadata( + self, + response: dlp.ListFileStoreDataProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListFileStoreDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_file_store_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_file_store_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_file_store_data_profiles` interceptor. + When both interceptors are used, this `post_list_file_store_data_profiles_with_metadata` interceptor runs after the + `post_list_file_store_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_file_store_data_profiles` will be passed to + `post_list_file_store_data_profiles_with_metadata`. + """ + return response, metadata + def pre_list_info_types( self, request: dlp.ListInfoTypesRequest, @@ -1312,12 +1981,35 @@ def post_list_info_types( ) -> dlp.ListInfoTypesResponse: """Post-rpc interceptor for list_info_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_info_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_info_types` interceptor runs + before the `post_list_info_types_with_metadata` interceptor. """ return response + def post_list_info_types_with_metadata( + self, + response: dlp.ListInfoTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ListInfoTypesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_info_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_info_types_with_metadata` + interceptor in new development instead of the `post_list_info_types` interceptor. + When both interceptors are used, this `post_list_info_types_with_metadata` interceptor runs after the + `post_list_info_types` interceptor. The (possibly modified) response returned by + `post_list_info_types` will be passed to + `post_list_info_types_with_metadata`. + """ + return response, metadata + def pre_list_inspect_templates( self, request: dlp.ListInspectTemplatesRequest, @@ -1337,12 +2029,37 @@ def post_list_inspect_templates( ) -> dlp.ListInspectTemplatesResponse: """Post-rpc interceptor for list_inspect_templates - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_inspect_templates_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_inspect_templates` interceptor runs + before the `post_list_inspect_templates_with_metadata` interceptor. """ return response + def post_list_inspect_templates_with_metadata( + self, + response: dlp.ListInspectTemplatesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListInspectTemplatesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_inspect_templates + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_inspect_templates_with_metadata` + interceptor in new development instead of the `post_list_inspect_templates` interceptor. + When both interceptors are used, this `post_list_inspect_templates_with_metadata` interceptor runs after the + `post_list_inspect_templates` interceptor. The (possibly modified) response returned by + `post_list_inspect_templates` will be passed to + `post_list_inspect_templates_with_metadata`. + """ + return response, metadata + def pre_list_job_triggers( self, request: dlp.ListJobTriggersRequest, @@ -1360,12 +2077,35 @@ def post_list_job_triggers( ) -> dlp.ListJobTriggersResponse: """Post-rpc interceptor for list_job_triggers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_job_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_job_triggers` interceptor runs + before the `post_list_job_triggers_with_metadata` interceptor. """ return response + def post_list_job_triggers_with_metadata( + self, + response: dlp.ListJobTriggersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ListJobTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_job_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_job_triggers_with_metadata` + interceptor in new development instead of the `post_list_job_triggers` interceptor. + When both interceptors are used, this `post_list_job_triggers_with_metadata` interceptor runs after the + `post_list_job_triggers` interceptor. The (possibly modified) response returned by + `post_list_job_triggers` will be passed to + `post_list_job_triggers_with_metadata`. + """ + return response, metadata + def pre_list_project_data_profiles( self, request: dlp.ListProjectDataProfilesRequest, @@ -1385,12 +2125,37 @@ def post_list_project_data_profiles( ) -> dlp.ListProjectDataProfilesResponse: """Post-rpc interceptor for list_project_data_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_project_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_project_data_profiles` interceptor runs + before the `post_list_project_data_profiles_with_metadata` interceptor. """ return response + def post_list_project_data_profiles_with_metadata( + self, + response: dlp.ListProjectDataProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListProjectDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_project_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_project_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_project_data_profiles` interceptor. + When both interceptors are used, this `post_list_project_data_profiles_with_metadata` interceptor runs after the + `post_list_project_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_project_data_profiles` will be passed to + `post_list_project_data_profiles_with_metadata`. + """ + return response, metadata + def pre_list_stored_info_types( self, request: dlp.ListStoredInfoTypesRequest, @@ -1408,12 +2173,37 @@ def post_list_stored_info_types( ) -> dlp.ListStoredInfoTypesResponse: """Post-rpc interceptor for list_stored_info_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_stored_info_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_stored_info_types` interceptor runs + before the `post_list_stored_info_types_with_metadata` interceptor. """ return response + def post_list_stored_info_types_with_metadata( + self, + response: dlp.ListStoredInfoTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListStoredInfoTypesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_stored_info_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_stored_info_types_with_metadata` + interceptor in new development instead of the `post_list_stored_info_types` interceptor. + When both interceptors are used, this `post_list_stored_info_types_with_metadata` interceptor runs after the + `post_list_stored_info_types` interceptor. The (possibly modified) response returned by + `post_list_stored_info_types` will be passed to + `post_list_stored_info_types_with_metadata`. + """ + return response, metadata + def pre_list_table_data_profiles( self, request: dlp.ListTableDataProfilesRequest, @@ -1433,12 +2223,37 @@ def post_list_table_data_profiles( ) -> dlp.ListTableDataProfilesResponse: """Post-rpc interceptor for list_table_data_profiles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_table_data_profiles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_list_table_data_profiles` interceptor runs + before the `post_list_table_data_profiles_with_metadata` interceptor. """ return response + def post_list_table_data_profiles_with_metadata( + self, + response: dlp.ListTableDataProfilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dlp.ListTableDataProfilesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_table_data_profiles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_list_table_data_profiles_with_metadata` + interceptor in new development instead of the `post_list_table_data_profiles` interceptor. + When both interceptors are used, this `post_list_table_data_profiles_with_metadata` interceptor runs after the + `post_list_table_data_profiles` interceptor. The (possibly modified) response returned by + `post_list_table_data_profiles` will be passed to + `post_list_table_data_profiles_with_metadata`. + """ + return response, metadata + def pre_redact_image( self, request: dlp.RedactImageRequest, @@ -1456,12 +2271,35 @@ def post_redact_image( ) -> dlp.RedactImageResponse: """Post-rpc interceptor for redact_image - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_redact_image_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_redact_image` interceptor runs + before the `post_redact_image_with_metadata` interceptor. """ return response + def post_redact_image_with_metadata( + self, + response: dlp.RedactImageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.RedactImageResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for redact_image + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_redact_image_with_metadata` + interceptor in new development instead of the `post_redact_image` interceptor. + When both interceptors are used, this `post_redact_image_with_metadata` interceptor runs after the + `post_redact_image` interceptor. The (possibly modified) response returned by + `post_redact_image` will be passed to + `post_redact_image_with_metadata`. + """ + return response, metadata + def pre_reidentify_content( self, request: dlp.ReidentifyContentRequest, @@ -1479,12 +2317,35 @@ def post_reidentify_content( ) -> dlp.ReidentifyContentResponse: """Post-rpc interceptor for reidentify_content - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reidentify_content_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_reidentify_content` interceptor runs + before the `post_reidentify_content_with_metadata` interceptor. """ return response + def post_reidentify_content_with_metadata( + self, + response: dlp.ReidentifyContentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.ReidentifyContentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reidentify_content + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_reidentify_content_with_metadata` + interceptor in new development instead of the `post_reidentify_content` interceptor. + When both interceptors are used, this `post_reidentify_content_with_metadata` interceptor runs after the + `post_reidentify_content` interceptor. The (possibly modified) response returned by + `post_reidentify_content` will be passed to + `post_reidentify_content_with_metadata`. + """ + return response, metadata + def pre_search_connections( self, request: dlp.SearchConnectionsRequest, @@ -1502,12 +2363,35 @@ def post_search_connections( ) -> dlp.SearchConnectionsResponse: """Post-rpc interceptor for search_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_search_connections` interceptor runs + before the `post_search_connections_with_metadata` interceptor. """ return response + def post_search_connections_with_metadata( + self, + response: dlp.SearchConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.SearchConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_search_connections_with_metadata` + interceptor in new development instead of the `post_search_connections` interceptor. + When both interceptors are used, this `post_search_connections_with_metadata` interceptor runs after the + `post_search_connections` interceptor. The (possibly modified) response returned by + `post_search_connections` will be passed to + `post_search_connections_with_metadata`. + """ + return response, metadata + def pre_update_connection( self, request: dlp.UpdateConnectionRequest, @@ -1523,12 +2407,35 @@ def pre_update_connection( def post_update_connection(self, response: dlp.Connection) -> dlp.Connection: """Post-rpc interceptor for update_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_update_connection` interceptor runs + before the `post_update_connection_with_metadata` interceptor. """ return response + def post_update_connection_with_metadata( + self, + response: dlp.Connection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_connection_with_metadata` + interceptor in new development instead of the `post_update_connection` interceptor. + When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the + `post_update_connection` interceptor. The (possibly modified) response returned by + `post_update_connection` will be passed to + `post_update_connection_with_metadata`. + """ + return response, metadata + def pre_update_deidentify_template( self, request: dlp.UpdateDeidentifyTemplateRequest, @@ -1548,12 +2455,35 @@ def post_update_deidentify_template( ) -> dlp.DeidentifyTemplate: """Post-rpc interceptor for update_deidentify_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_deidentify_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_update_deidentify_template` interceptor runs + before the `post_update_deidentify_template_with_metadata` interceptor. """ return response + def post_update_deidentify_template_with_metadata( + self, + response: dlp.DeidentifyTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DeidentifyTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_deidentify_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_deidentify_template_with_metadata` + interceptor in new development instead of the `post_update_deidentify_template` interceptor. + When both interceptors are used, this `post_update_deidentify_template_with_metadata` interceptor runs after the + `post_update_deidentify_template` interceptor. The (possibly modified) response returned by + `post_update_deidentify_template` will be passed to + `post_update_deidentify_template_with_metadata`. + """ + return response, metadata + def pre_update_discovery_config( self, request: dlp.UpdateDiscoveryConfigRequest, @@ -1573,12 +2503,35 @@ def post_update_discovery_config( ) -> dlp.DiscoveryConfig: """Post-rpc interceptor for update_discovery_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_discovery_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_update_discovery_config` interceptor runs + before the `post_update_discovery_config_with_metadata` interceptor. """ return response + def post_update_discovery_config_with_metadata( + self, + response: dlp.DiscoveryConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.DiscoveryConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_discovery_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_discovery_config_with_metadata` + interceptor in new development instead of the `post_update_discovery_config` interceptor. + When both interceptors are used, this `post_update_discovery_config_with_metadata` interceptor runs after the + `post_update_discovery_config` interceptor. The (possibly modified) response returned by + `post_update_discovery_config` will be passed to + `post_update_discovery_config_with_metadata`. + """ + return response, metadata + def pre_update_inspect_template( self, request: dlp.UpdateInspectTemplateRequest, @@ -1598,12 +2551,35 @@ def post_update_inspect_template( ) -> dlp.InspectTemplate: """Post-rpc interceptor for update_inspect_template - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_inspect_template_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_update_inspect_template` interceptor runs + before the `post_update_inspect_template_with_metadata` interceptor. """ return response + def post_update_inspect_template_with_metadata( + self, + response: dlp.InspectTemplate, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.InspectTemplate, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_inspect_template + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_inspect_template_with_metadata` + interceptor in new development instead of the `post_update_inspect_template` interceptor. + When both interceptors are used, this `post_update_inspect_template_with_metadata` interceptor runs after the + `post_update_inspect_template` interceptor. The (possibly modified) response returned by + `post_update_inspect_template` will be passed to + `post_update_inspect_template_with_metadata`. + """ + return response, metadata + def pre_update_job_trigger( self, request: dlp.UpdateJobTriggerRequest, @@ -1619,12 +2595,35 @@ def pre_update_job_trigger( def post_update_job_trigger(self, response: dlp.JobTrigger) -> dlp.JobTrigger: """Post-rpc interceptor for update_job_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_job_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_update_job_trigger` interceptor runs + before the `post_update_job_trigger_with_metadata` interceptor. """ return response + def post_update_job_trigger_with_metadata( + self, + response: dlp.JobTrigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.JobTrigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_job_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_job_trigger_with_metadata` + interceptor in new development instead of the `post_update_job_trigger` interceptor. + When both interceptors are used, this `post_update_job_trigger_with_metadata` interceptor runs after the + `post_update_job_trigger` interceptor. The (possibly modified) response returned by + `post_update_job_trigger` will be passed to + `post_update_job_trigger_with_metadata`. + """ + return response, metadata + def pre_update_stored_info_type( self, request: dlp.UpdateStoredInfoTypeRequest, @@ -1644,12 +2643,35 @@ def post_update_stored_info_type( ) -> dlp.StoredInfoType: """Post-rpc interceptor for update_stored_info_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_stored_info_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DlpService server but before - it is returned to user code. + it is returned to user code. This `post_update_stored_info_type` interceptor runs + before the `post_update_stored_info_type_with_metadata` interceptor. """ return response + def post_update_stored_info_type_with_metadata( + self, + response: dlp.StoredInfoType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dlp.StoredInfoType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_stored_info_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DlpService server but before it is returned to user code. + + We recommend only using this `post_update_stored_info_type_with_metadata` + interceptor in new development instead of the `post_update_stored_info_type` interceptor. + When both interceptors are used, this `post_update_stored_info_type_with_metadata` interceptor runs after the + `post_update_stored_info_type` interceptor. The (possibly modified) response returned by + `post_update_stored_info_type` will be passed to + `post_update_stored_info_type_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DlpServiceRestStub: @@ -1870,6 +2892,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_activate_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_activate_job_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2140,6 +3166,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2295,6 +3325,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_deidentify_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_deidentify_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2451,6 +3485,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_discovery_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_discovery_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2609,6 +3647,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_dlp_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dlp_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2765,6 +3807,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_inspect_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_inspect_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2917,6 +3963,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_job_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3069,6 +4119,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_stored_info_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_stored_info_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3219,6 +4273,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_deidentify_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deidentify_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4467,6 +5525,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_column_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_column_data_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4614,6 +5676,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4762,6 +5828,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deidentify_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deidentify_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4912,6 +5982,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_discovery_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_discovery_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5056,6 +6130,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_dlp_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dlp_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5204,6 +6282,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_file_store_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_file_store_data_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5354,6 +6436,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_inspect_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_inspect_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5502,6 +6588,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_job_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5648,6 +6738,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_project_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_project_data_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5794,6 +6888,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_stored_info_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_stored_info_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5936,6 +7034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_table_data_profile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_table_data_profile_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6087,6 +7189,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_hybrid_inspect_dlp_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_hybrid_inspect_dlp_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6239,6 +7345,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_hybrid_inspect_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_hybrid_inspect_job_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6390,6 +7500,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_inspect_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_inspect_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6536,6 +7650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_column_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_column_data_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6680,6 +7798,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6825,6 +7947,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deidentify_templates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deidentify_templates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6972,6 +8098,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_discovery_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_discovery_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7121,6 +8251,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_dlp_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_dlp_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7269,6 +8403,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_file_store_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_file_store_data_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7417,6 +8558,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_info_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_info_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7562,6 +8707,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_inspect_templates(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_inspect_templates_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7706,6 +8855,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_job_triggers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_job_triggers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7853,6 +9006,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_project_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_project_data_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8000,6 +9157,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_stored_info_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_stored_info_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8146,6 +9307,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_table_data_profiles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_table_data_profiles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8300,6 +9465,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_redact_image(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_redact_image_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8448,6 +9617,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_reidentify_content(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reidentify_content_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8593,6 +9766,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8744,6 +9921,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8899,6 +10080,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_deidentify_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_deidentify_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9055,6 +10240,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_discovery_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_discovery_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9211,6 +10400,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_inspect_template(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_inspect_template_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9363,6 +10556,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_job_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_job_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -9515,6 +10712,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_stored_info_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_stored_info_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json index 0d79612ba0ab..d8f531fb2f58 100644 --- a/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json +++ b/packages/google-cloud-dlp/samples/generated_samples/snippet_metadata_google.privacy.dlp.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dlp", - "version": "3.26.0" + "version": "3.27.0" }, "snippets": [ { diff --git a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py index c472606b7c0f..dfa52befa7bc 100644 --- a/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/packages/google-cloud-dlp/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -70,6 +70,13 @@ ) from google.cloud.dlp_v2.types import dlp, storage +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -305,6 +312,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DlpServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DlpServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -35158,10 +35208,13 @@ def test_inspect_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_inspect_content" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_inspect_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_inspect_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.InspectContentRequest.pb(dlp.InspectContentRequest()) transcode.return_value = { "method": "post", @@ -35183,6 +35236,7 @@ def test_inspect_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.InspectContentResponse() + post_with_metadata.return_value = dlp.InspectContentResponse(), metadata client.inspect_content( request, @@ -35194,6 +35248,7 @@ def test_inspect_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_redact_image_rest_bad_request(request_type=dlp.RedactImageRequest): @@ -35278,10 +35333,13 @@ def test_redact_image_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_redact_image" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_redact_image_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_redact_image" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.RedactImageRequest.pb(dlp.RedactImageRequest()) transcode.return_value = { "method": "post", @@ -35303,6 +35361,7 @@ def test_redact_image_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.RedactImageResponse() + post_with_metadata.return_value = dlp.RedactImageResponse(), metadata client.redact_image( request, @@ -35314,6 +35373,7 @@ def test_redact_image_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deidentify_content_rest_bad_request(request_type=dlp.DeidentifyContentRequest): @@ -35393,10 +35453,13 @@ def test_deidentify_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_deidentify_content" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_deidentify_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_deidentify_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.DeidentifyContentRequest.pb(dlp.DeidentifyContentRequest()) transcode.return_value = { "method": "post", @@ -35420,6 +35483,7 @@ def test_deidentify_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DeidentifyContentResponse() + post_with_metadata.return_value = dlp.DeidentifyContentResponse(), metadata client.deidentify_content( request, @@ -35431,6 +35495,7 @@ def test_deidentify_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reidentify_content_rest_bad_request(request_type=dlp.ReidentifyContentRequest): @@ -35510,10 +35575,13 @@ def test_reidentify_content_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_reidentify_content" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_reidentify_content_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_reidentify_content" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ReidentifyContentRequest.pb(dlp.ReidentifyContentRequest()) transcode.return_value = { "method": "post", @@ -35537,6 +35605,7 @@ def test_reidentify_content_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ReidentifyContentResponse() + post_with_metadata.return_value = dlp.ReidentifyContentResponse(), metadata client.reidentify_content( request, @@ -35548,6 +35617,7 @@ def test_reidentify_content_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_info_types_rest_bad_request(request_type=dlp.ListInfoTypesRequest): @@ -35627,10 +35697,13 @@ def test_list_info_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_info_types" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_info_types_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_info_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListInfoTypesRequest.pb(dlp.ListInfoTypesRequest()) transcode.return_value = { "method": "post", @@ -35652,6 +35725,7 @@ def test_list_info_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListInfoTypesResponse() + post_with_metadata.return_value = dlp.ListInfoTypesResponse(), metadata client.list_info_types( request, @@ -35663,6 +35737,7 @@ def test_list_info_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_inspect_template_rest_bad_request( @@ -35751,10 +35826,14 @@ def test_create_inspect_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_inspect_template" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_create_inspect_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_inspect_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateInspectTemplateRequest.pb( dlp.CreateInspectTemplateRequest() ) @@ -35778,6 +35857,7 @@ def test_create_inspect_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.InspectTemplate() + post_with_metadata.return_value = dlp.InspectTemplate(), metadata client.create_inspect_template( request, @@ -35789,6 +35869,7 @@ def test_create_inspect_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_inspect_template_rest_bad_request( @@ -35881,10 +35962,14 @@ def test_update_inspect_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_update_inspect_template" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_update_inspect_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_update_inspect_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.UpdateInspectTemplateRequest.pb( dlp.UpdateInspectTemplateRequest() ) @@ -35908,6 +35993,7 @@ def test_update_inspect_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.InspectTemplate() + post_with_metadata.return_value = dlp.InspectTemplate(), metadata client.update_inspect_template( request, @@ -35919,6 +36005,7 @@ def test_update_inspect_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_inspect_template_rest_bad_request( @@ -36011,10 +36098,13 @@ def test_get_inspect_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_inspect_template" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_inspect_template_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_inspect_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetInspectTemplateRequest.pb(dlp.GetInspectTemplateRequest()) transcode.return_value = { "method": "post", @@ -36036,6 +36126,7 @@ def test_get_inspect_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.InspectTemplate() + post_with_metadata.return_value = dlp.InspectTemplate(), metadata client.get_inspect_template( request, @@ -36047,6 +36138,7 @@ def test_get_inspect_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_inspect_templates_rest_bad_request( @@ -36131,10 +36223,14 @@ def test_list_inspect_templates_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_inspect_templates" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_inspect_templates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_inspect_templates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListInspectTemplatesRequest.pb( dlp.ListInspectTemplatesRequest() ) @@ -36160,6 +36256,7 @@ def test_list_inspect_templates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListInspectTemplatesResponse() + post_with_metadata.return_value = dlp.ListInspectTemplatesResponse(), metadata client.list_inspect_templates( request, @@ -36171,6 +36268,7 @@ def test_list_inspect_templates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_inspect_template_rest_bad_request( @@ -36372,10 +36470,14 @@ def test_create_deidentify_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_deidentify_template" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_create_deidentify_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_deidentify_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateDeidentifyTemplateRequest.pb( dlp.CreateDeidentifyTemplateRequest() ) @@ -36399,6 +36501,7 @@ def test_create_deidentify_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DeidentifyTemplate() + post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata client.create_deidentify_template( request, @@ -36410,6 +36513,7 @@ def test_create_deidentify_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_deidentify_template_rest_bad_request( @@ -36498,10 +36602,14 @@ def test_update_deidentify_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_update_deidentify_template" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_update_deidentify_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_update_deidentify_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.UpdateDeidentifyTemplateRequest.pb( dlp.UpdateDeidentifyTemplateRequest() ) @@ -36525,6 +36633,7 @@ def test_update_deidentify_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DeidentifyTemplate() + post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata client.update_deidentify_template( request, @@ -36536,6 +36645,7 @@ def test_update_deidentify_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deidentify_template_rest_bad_request( @@ -36624,10 +36734,14 @@ def test_get_deidentify_template_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_deidentify_template" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_get_deidentify_template_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_deidentify_template" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetDeidentifyTemplateRequest.pb( dlp.GetDeidentifyTemplateRequest() ) @@ -36651,6 +36765,7 @@ def test_get_deidentify_template_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DeidentifyTemplate() + post_with_metadata.return_value = dlp.DeidentifyTemplate(), metadata client.get_deidentify_template( request, @@ -36662,6 +36777,7 @@ def test_get_deidentify_template_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_deidentify_templates_rest_bad_request( @@ -36746,10 +36862,14 @@ def test_list_deidentify_templates_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_deidentify_templates" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_deidentify_templates_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_deidentify_templates" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListDeidentifyTemplatesRequest.pb( dlp.ListDeidentifyTemplatesRequest() ) @@ -36775,6 +36895,10 @@ def test_list_deidentify_templates_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListDeidentifyTemplatesResponse() + post_with_metadata.return_value = ( + dlp.ListDeidentifyTemplatesResponse(), + metadata, + ) client.list_deidentify_templates( request, @@ -36786,6 +36910,7 @@ def test_list_deidentify_templates_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_deidentify_template_rest_bad_request( @@ -36983,10 +37108,13 @@ def test_create_job_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_job_trigger" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_job_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_job_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateJobTriggerRequest.pb(dlp.CreateJobTriggerRequest()) transcode.return_value = { "method": "post", @@ -37008,6 +37136,7 @@ def test_create_job_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.JobTrigger() + post_with_metadata.return_value = dlp.JobTrigger(), metadata client.create_job_trigger( request, @@ -37019,6 +37148,7 @@ def test_create_job_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_job_trigger_rest_bad_request(request_type=dlp.UpdateJobTriggerRequest): @@ -37107,10 +37237,13 @@ def test_update_job_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_update_job_trigger" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_job_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_update_job_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.UpdateJobTriggerRequest.pb(dlp.UpdateJobTriggerRequest()) transcode.return_value = { "method": "post", @@ -37132,6 +37265,7 @@ def test_update_job_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.JobTrigger() + post_with_metadata.return_value = dlp.JobTrigger(), metadata client.update_job_trigger( request, @@ -37143,6 +37277,7 @@ def test_update_job_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_hybrid_inspect_job_trigger_rest_bad_request( @@ -37224,10 +37359,14 @@ def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_hybrid_inspect_job_trigger" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_hybrid_inspect_job_trigger_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_job_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.HybridInspectJobTriggerRequest.pb( dlp.HybridInspectJobTriggerRequest() ) @@ -37251,6 +37390,7 @@ def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.HybridInspectResponse() + post_with_metadata.return_value = dlp.HybridInspectResponse(), metadata client.hybrid_inspect_job_trigger( request, @@ -37262,6 +37402,7 @@ def test_hybrid_inspect_job_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_job_trigger_rest_bad_request(request_type=dlp.GetJobTriggerRequest): @@ -37350,10 +37491,13 @@ def test_get_job_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_job_trigger" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_job_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_job_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetJobTriggerRequest.pb(dlp.GetJobTriggerRequest()) transcode.return_value = { "method": "post", @@ -37375,6 +37519,7 @@ def test_get_job_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.JobTrigger() + post_with_metadata.return_value = dlp.JobTrigger(), metadata client.get_job_trigger( request, @@ -37386,6 +37531,7 @@ def test_get_job_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_job_triggers_rest_bad_request(request_type=dlp.ListJobTriggersRequest): @@ -37468,10 +37614,13 @@ def test_list_job_triggers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_job_triggers" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_job_triggers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_job_triggers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListJobTriggersRequest.pb(dlp.ListJobTriggersRequest()) transcode.return_value = { "method": "post", @@ -37495,6 +37644,7 @@ def test_list_job_triggers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListJobTriggersResponse() + post_with_metadata.return_value = dlp.ListJobTriggersResponse(), metadata client.list_job_triggers( request, @@ -37506,6 +37656,7 @@ def test_list_job_triggers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_job_trigger_rest_bad_request(request_type=dlp.DeleteJobTriggerRequest): @@ -37701,10 +37852,13 @@ def test_activate_job_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_activate_job_trigger" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_activate_job_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_activate_job_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ActivateJobTriggerRequest.pb(dlp.ActivateJobTriggerRequest()) transcode.return_value = { "method": "post", @@ -37726,6 +37880,7 @@ def test_activate_job_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DlpJob() + post_with_metadata.return_value = dlp.DlpJob(), metadata client.activate_job_trigger( request, @@ -37737,6 +37892,7 @@ def test_activate_job_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_discovery_config_rest_bad_request( @@ -37827,10 +37983,14 @@ def test_create_discovery_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_discovery_config" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_create_discovery_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_discovery_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateDiscoveryConfigRequest.pb( dlp.CreateDiscoveryConfigRequest() ) @@ -37854,6 +38014,7 @@ def test_create_discovery_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DiscoveryConfig() + post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata client.create_discovery_config( request, @@ -37865,6 +38026,7 @@ def test_create_discovery_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_discovery_config_rest_bad_request( @@ -37959,10 +38121,14 @@ def test_update_discovery_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_update_discovery_config" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_update_discovery_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_update_discovery_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.UpdateDiscoveryConfigRequest.pb( dlp.UpdateDiscoveryConfigRequest() ) @@ -37986,6 +38152,7 @@ def test_update_discovery_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DiscoveryConfig() + post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata client.update_discovery_config( request, @@ -37997,6 +38164,7 @@ def test_update_discovery_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_discovery_config_rest_bad_request( @@ -38091,10 +38259,13 @@ def test_get_discovery_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_discovery_config" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_discovery_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_discovery_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetDiscoveryConfigRequest.pb(dlp.GetDiscoveryConfigRequest()) transcode.return_value = { "method": "post", @@ -38116,6 +38287,7 @@ def test_get_discovery_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DiscoveryConfig() + post_with_metadata.return_value = dlp.DiscoveryConfig(), metadata client.get_discovery_config( request, @@ -38127,6 +38299,7 @@ def test_get_discovery_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_discovery_configs_rest_bad_request( @@ -38211,10 +38384,14 @@ def test_list_discovery_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_discovery_configs" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_discovery_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_discovery_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListDiscoveryConfigsRequest.pb( dlp.ListDiscoveryConfigsRequest() ) @@ -38240,6 +38417,7 @@ def test_list_discovery_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListDiscoveryConfigsResponse() + post_with_metadata.return_value = dlp.ListDiscoveryConfigsResponse(), metadata client.list_discovery_configs( request, @@ -38251,6 +38429,7 @@ def test_list_discovery_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_discovery_config_rest_bad_request( @@ -38452,10 +38631,13 @@ def test_create_dlp_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_dlp_job" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_dlp_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_dlp_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateDlpJobRequest.pb(dlp.CreateDlpJobRequest()) transcode.return_value = { "method": "post", @@ -38477,6 +38659,7 @@ def test_create_dlp_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DlpJob() + post_with_metadata.return_value = dlp.DlpJob(), metadata client.create_dlp_job( request, @@ -38488,6 +38671,7 @@ def test_create_dlp_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_dlp_jobs_rest_bad_request(request_type=dlp.ListDlpJobsRequest): @@ -38570,10 +38754,13 @@ def test_list_dlp_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_dlp_jobs" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_dlp_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_dlp_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListDlpJobsRequest.pb(dlp.ListDlpJobsRequest()) transcode.return_value = { "method": "post", @@ -38595,6 +38782,7 @@ def test_list_dlp_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListDlpJobsResponse() + post_with_metadata.return_value = dlp.ListDlpJobsResponse(), metadata client.list_dlp_jobs( request, @@ -38606,6 +38794,7 @@ def test_list_dlp_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_dlp_job_rest_bad_request(request_type=dlp.GetDlpJobRequest): @@ -38694,10 +38883,13 @@ def test_get_dlp_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_dlp_job" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_dlp_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_dlp_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetDlpJobRequest.pb(dlp.GetDlpJobRequest()) transcode.return_value = { "method": "post", @@ -38719,6 +38911,7 @@ def test_get_dlp_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.DlpJob() + post_with_metadata.return_value = dlp.DlpJob(), metadata client.get_dlp_job( request, @@ -38730,6 +38923,7 @@ def test_get_dlp_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_dlp_job_rest_bad_request(request_type=dlp.DeleteDlpJobRequest): @@ -39024,10 +39218,14 @@ def test_create_stored_info_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_stored_info_type" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_create_stored_info_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_stored_info_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateStoredInfoTypeRequest.pb( dlp.CreateStoredInfoTypeRequest() ) @@ -39051,6 +39249,7 @@ def test_create_stored_info_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.StoredInfoType() + post_with_metadata.return_value = dlp.StoredInfoType(), metadata client.create_stored_info_type( request, @@ -39062,6 +39261,7 @@ def test_create_stored_info_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_stored_info_type_rest_bad_request( @@ -39146,10 +39346,14 @@ def test_update_stored_info_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_update_stored_info_type" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_update_stored_info_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_update_stored_info_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.UpdateStoredInfoTypeRequest.pb( dlp.UpdateStoredInfoTypeRequest() ) @@ -39173,6 +39377,7 @@ def test_update_stored_info_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.StoredInfoType() + post_with_metadata.return_value = dlp.StoredInfoType(), metadata client.update_stored_info_type( request, @@ -39184,6 +39389,7 @@ def test_update_stored_info_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_stored_info_type_rest_bad_request( @@ -39268,10 +39474,13 @@ def test_get_stored_info_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_stored_info_type" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_stored_info_type_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_stored_info_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetStoredInfoTypeRequest.pb(dlp.GetStoredInfoTypeRequest()) transcode.return_value = { "method": "post", @@ -39293,6 +39502,7 @@ def test_get_stored_info_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.StoredInfoType() + post_with_metadata.return_value = dlp.StoredInfoType(), metadata client.get_stored_info_type( request, @@ -39304,6 +39514,7 @@ def test_get_stored_info_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_stored_info_types_rest_bad_request( @@ -39388,10 +39599,14 @@ def test_list_stored_info_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_stored_info_types" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_stored_info_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_stored_info_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListStoredInfoTypesRequest.pb(dlp.ListStoredInfoTypesRequest()) transcode.return_value = { "method": "post", @@ -39415,6 +39630,7 @@ def test_list_stored_info_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListStoredInfoTypesResponse() + post_with_metadata.return_value = dlp.ListStoredInfoTypesResponse(), metadata client.list_stored_info_types( request, @@ -39426,6 +39642,7 @@ def test_list_stored_info_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_stored_info_type_rest_bad_request( @@ -39619,10 +39836,14 @@ def test_list_project_data_profiles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_project_data_profiles" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_project_data_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_project_data_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListProjectDataProfilesRequest.pb( dlp.ListProjectDataProfilesRequest() ) @@ -39648,6 +39869,10 @@ def test_list_project_data_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListProjectDataProfilesResponse() + post_with_metadata.return_value = ( + dlp.ListProjectDataProfilesResponse(), + metadata, + ) client.list_project_data_profiles( request, @@ -39659,6 +39884,7 @@ def test_list_project_data_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_table_data_profiles_rest_bad_request( @@ -39743,10 +39969,14 @@ def test_list_table_data_profiles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_table_data_profiles" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_table_data_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_table_data_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListTableDataProfilesRequest.pb( dlp.ListTableDataProfilesRequest() ) @@ -39772,6 +40002,7 @@ def test_list_table_data_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListTableDataProfilesResponse() + post_with_metadata.return_value = dlp.ListTableDataProfilesResponse(), metadata client.list_table_data_profiles( request, @@ -39783,6 +40014,7 @@ def test_list_table_data_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_column_data_profiles_rest_bad_request( @@ -39867,10 +40099,14 @@ def test_list_column_data_profiles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_column_data_profiles" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_column_data_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_column_data_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListColumnDataProfilesRequest.pb( dlp.ListColumnDataProfilesRequest() ) @@ -39896,6 +40132,7 @@ def test_list_column_data_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListColumnDataProfilesResponse() + post_with_metadata.return_value = dlp.ListColumnDataProfilesResponse(), metadata client.list_column_data_profiles( request, @@ -39907,6 +40144,7 @@ def test_list_column_data_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_project_data_profile_rest_bad_request( @@ -40001,10 +40239,14 @@ def test_get_project_data_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_project_data_profile" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_get_project_data_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_project_data_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetProjectDataProfileRequest.pb( dlp.GetProjectDataProfileRequest() ) @@ -40028,6 +40270,7 @@ def test_get_project_data_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ProjectDataProfile() + post_with_metadata.return_value = dlp.ProjectDataProfile(), metadata client.get_project_data_profile( request, @@ -40039,6 +40282,7 @@ def test_get_project_data_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_file_store_data_profiles_rest_bad_request( @@ -40123,10 +40367,14 @@ def test_list_file_store_data_profiles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_file_store_data_profiles" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_list_file_store_data_profiles_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_file_store_data_profiles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListFileStoreDataProfilesRequest.pb( dlp.ListFileStoreDataProfilesRequest() ) @@ -40152,6 +40400,10 @@ def test_list_file_store_data_profiles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListFileStoreDataProfilesResponse() + post_with_metadata.return_value = ( + dlp.ListFileStoreDataProfilesResponse(), + metadata, + ) client.list_file_store_data_profiles( request, @@ -40163,6 +40415,7 @@ def test_list_file_store_data_profiles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_file_store_data_profile_rest_bad_request( @@ -40274,10 +40527,14 @@ def test_get_file_store_data_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_file_store_data_profile" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_get_file_store_data_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_file_store_data_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetFileStoreDataProfileRequest.pb( dlp.GetFileStoreDataProfileRequest() ) @@ -40301,6 +40558,7 @@ def test_get_file_store_data_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.FileStoreDataProfile() + post_with_metadata.return_value = dlp.FileStoreDataProfile(), metadata client.get_file_store_data_profile( request, @@ -40312,6 +40570,7 @@ def test_get_file_store_data_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_file_store_data_profile_rest_bad_request( @@ -40542,10 +40801,14 @@ def test_get_table_data_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_table_data_profile" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_get_table_data_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_table_data_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetTableDataProfileRequest.pb(dlp.GetTableDataProfileRequest()) transcode.return_value = { "method": "post", @@ -40567,6 +40830,7 @@ def test_get_table_data_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.TableDataProfile() + post_with_metadata.return_value = dlp.TableDataProfile(), metadata client.get_table_data_profile( request, @@ -40578,6 +40842,7 @@ def test_get_table_data_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_column_data_profile_rest_bad_request( @@ -40701,10 +40966,14 @@ def test_get_column_data_profile_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_column_data_profile" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_get_column_data_profile_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_column_data_profile" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetColumnDataProfileRequest.pb( dlp.GetColumnDataProfileRequest() ) @@ -40728,6 +40997,7 @@ def test_get_column_data_profile_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ColumnDataProfile() + post_with_metadata.return_value = dlp.ColumnDataProfile(), metadata client.get_column_data_profile( request, @@ -40739,6 +41009,7 @@ def test_get_column_data_profile_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_table_data_profile_rest_bad_request( @@ -40933,10 +41204,14 @@ def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_hybrid_inspect_dlp_job" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, + "post_hybrid_inspect_dlp_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_hybrid_inspect_dlp_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.HybridInspectDlpJobRequest.pb(dlp.HybridInspectDlpJobRequest()) transcode.return_value = { "method": "post", @@ -40958,6 +41233,7 @@ def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.HybridInspectResponse() + post_with_metadata.return_value = dlp.HybridInspectResponse(), metadata client.hybrid_inspect_dlp_job( request, @@ -40969,6 +41245,7 @@ def test_hybrid_inspect_dlp_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_finish_dlp_job_rest_bad_request(request_type=dlp.FinishDlpJobRequest): @@ -41158,10 +41435,13 @@ def test_create_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_create_connection" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_create_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_create_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.CreateConnectionRequest.pb(dlp.CreateConnectionRequest()) transcode.return_value = { "method": "post", @@ -41183,6 +41463,7 @@ def test_create_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.Connection() + post_with_metadata.return_value = dlp.Connection(), metadata client.create_connection( request, @@ -41194,6 +41475,7 @@ def test_create_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_connection_rest_bad_request(request_type=dlp.GetConnectionRequest): @@ -41278,10 +41560,13 @@ def test_get_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_get_connection" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_get_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_get_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.GetConnectionRequest.pb(dlp.GetConnectionRequest()) transcode.return_value = { "method": "post", @@ -41303,6 +41588,7 @@ def test_get_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.Connection() + post_with_metadata.return_value = dlp.Connection(), metadata client.get_connection( request, @@ -41314,6 +41600,7 @@ def test_get_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_connections_rest_bad_request(request_type=dlp.ListConnectionsRequest): @@ -41396,10 +41683,13 @@ def test_list_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_list_connections" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_list_connections_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_list_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.ListConnectionsRequest.pb(dlp.ListConnectionsRequest()) transcode.return_value = { "method": "post", @@ -41423,6 +41713,7 @@ def test_list_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.ListConnectionsResponse() + post_with_metadata.return_value = dlp.ListConnectionsResponse(), metadata client.list_connections( request, @@ -41434,6 +41725,7 @@ def test_list_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_connections_rest_bad_request(request_type=dlp.SearchConnectionsRequest): @@ -41516,10 +41808,13 @@ def test_search_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_search_connections" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_search_connections_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_search_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.SearchConnectionsRequest.pb(dlp.SearchConnectionsRequest()) transcode.return_value = { "method": "post", @@ -41543,6 +41838,7 @@ def test_search_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.SearchConnectionsResponse() + post_with_metadata.return_value = dlp.SearchConnectionsResponse(), metadata client.search_connections( request, @@ -41554,6 +41850,7 @@ def test_search_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_connection_rest_bad_request(request_type=dlp.DeleteConnectionRequest): @@ -41743,10 +42040,13 @@ def test_update_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DlpServiceRestInterceptor, "post_update_connection" ) as post, mock.patch.object( + transports.DlpServiceRestInterceptor, "post_update_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DlpServiceRestInterceptor, "pre_update_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = dlp.UpdateConnectionRequest.pb(dlp.UpdateConnectionRequest()) transcode.return_value = { "method": "post", @@ -41768,6 +42068,7 @@ def test_update_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dlp.Connection() + post_with_metadata.return_value = dlp.Connection(), metadata client.update_connection( request, @@ -41779,6 +42080,7 @@ def test_update_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-dms/CHANGELOG.md b/packages/google-cloud-dms/CHANGELOG.md index 64e9a5ba10ab..16bd97e9455d 100644 --- a/packages/google-cloud-dms/CHANGELOG.md +++ b/packages/google-cloud-dms/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dms-v1.11.0...google-cloud-dms-v1.12.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-dms-v1.10.1...google-cloud-dms-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py b/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py +++ b/packages/google-cloud-dms/google/cloud/clouddms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py b/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py +++ b/packages/google-cloud-dms/google/cloud/clouddms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py b/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py index 22f99798d05c..186edb7eec08 100644 --- a/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py +++ b/packages/google-cloud-dms/google/cloud/clouddms_v1/services/data_migration_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -607,6 +609,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5633,16 +5662,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5688,16 +5721,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5920,16 +5957,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -6042,16 +6083,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -6102,16 +6147,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -6157,16 +6206,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -6212,16 +6265,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json index 66d78478488e..d03d6fd9b3de 100644 --- a/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json +++ b/packages/google-cloud-dms/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dms", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py index 2bfff7d057f1..2fbeeb5f117b 100644 --- a/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py +++ b/packages/google-cloud-dms/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -81,6 +82,13 @@ conversionworkspace_resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -354,6 +362,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataMigrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataMigrationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-documentai/CHANGELOG.md b/packages/google-cloud-documentai/CHANGELOG.md index 4ac14e25c350..72f7c21d92db 100644 --- a/packages/google-cloud-documentai/CHANGELOG.md +++ b/packages/google-cloud-documentai/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [3.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v3.1.0...google-cloud-documentai-v3.2.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([b0e1f43](https://github.com/googleapis/google-cloud-python/commit/b0e1f4303b6ceff89b022986f9d87eda736e7235)) +* Add support for reading selective GAPIC generation methods from service YAML ([b0e1f43](https://github.com/googleapis/google-cloud-python/commit/b0e1f4303b6ceff89b022986f9d87eda736e7235)) + + +### Documentation + +* mark fields as unused ([b0e1f43](https://github.com/googleapis/google-cloud-python/commit/b0e1f4303b6ceff89b022986f9d87eda736e7235)) + ## [3.1.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-documentai-v3.0.1...google-cloud-documentai-v3.1.0) (2024-12-12) diff --git a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py index bfbe15797e84..349033e81d71 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.1.0" # {x-release-please-version} +__version__ = "3.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py index bfbe15797e84..349033e81d71 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.1.0" # {x-release-please-version} +__version__ = "3.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py index f36a2bfb23c8..1155f9052204 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -601,6 +603,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3549,16 +3578,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3604,16 +3637,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -3714,16 +3751,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3769,16 +3810,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py index 469b08a597be..e0a12066fe3b 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1/services/document_processor_service/transports/rest.py @@ -275,12 +275,35 @@ def post_batch_process_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_process_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_process_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_batch_process_documents` interceptor runs + before the `post_batch_process_documents_with_metadata` interceptor. """ return response + def post_batch_process_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_process_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_batch_process_documents_with_metadata` + interceptor in new development instead of the `post_batch_process_documents` interceptor. + When both interceptors are used, this `post_batch_process_documents_with_metadata` interceptor runs after the + `post_batch_process_documents` interceptor. The (possibly modified) response returned by + `post_batch_process_documents` will be passed to + `post_batch_process_documents_with_metadata`. + """ + return response, metadata + def pre_create_processor( self, request: document_processor_service.CreateProcessorRequest, @@ -301,12 +324,35 @@ def post_create_processor( ) -> gcd_processor.Processor: """Post-rpc interceptor for create_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_create_processor` interceptor runs + before the `post_create_processor_with_metadata` interceptor. """ return response + def post_create_processor_with_metadata( + self, + response: gcd_processor.Processor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_processor.Processor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_create_processor_with_metadata` + interceptor in new development instead of the `post_create_processor` interceptor. + When both interceptors are used, this `post_create_processor_with_metadata` interceptor runs after the + `post_create_processor` interceptor. The (possibly modified) response returned by + `post_create_processor` will be passed to + `post_create_processor_with_metadata`. + """ + return response, metadata + def pre_delete_processor( self, request: document_processor_service.DeleteProcessorRequest, @@ -327,12 +373,35 @@ def post_delete_processor( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_delete_processor` interceptor runs + before the `post_delete_processor_with_metadata` interceptor. """ return response + def post_delete_processor_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_delete_processor_with_metadata` + interceptor in new development instead of the `post_delete_processor` interceptor. + When both interceptors are used, this `post_delete_processor_with_metadata` interceptor runs after the + `post_delete_processor` interceptor. The (possibly modified) response returned by + `post_delete_processor` will be passed to + `post_delete_processor_with_metadata`. + """ + return response, metadata + def pre_delete_processor_version( self, request: document_processor_service.DeleteProcessorVersionRequest, @@ -353,12 +422,35 @@ def post_delete_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_delete_processor_version` interceptor runs + before the `post_delete_processor_version_with_metadata` interceptor. """ return response + def post_delete_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_delete_processor_version_with_metadata` + interceptor in new development instead of the `post_delete_processor_version` interceptor. + When both interceptors are used, this `post_delete_processor_version_with_metadata` interceptor runs after the + `post_delete_processor_version` interceptor. The (possibly modified) response returned by + `post_delete_processor_version` will be passed to + `post_delete_processor_version_with_metadata`. + """ + return response, metadata + def pre_deploy_processor_version( self, request: document_processor_service.DeployProcessorVersionRequest, @@ -379,12 +471,35 @@ def post_deploy_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_deploy_processor_version` interceptor runs + before the `post_deploy_processor_version_with_metadata` interceptor. """ return response + def post_deploy_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_deploy_processor_version_with_metadata` + interceptor in new development instead of the `post_deploy_processor_version` interceptor. + When both interceptors are used, this `post_deploy_processor_version_with_metadata` interceptor runs after the + `post_deploy_processor_version` interceptor. The (possibly modified) response returned by + `post_deploy_processor_version` will be passed to + `post_deploy_processor_version_with_metadata`. + """ + return response, metadata + def pre_disable_processor( self, request: document_processor_service.DisableProcessorRequest, @@ -405,12 +520,35 @@ def post_disable_processor( ) -> operations_pb2.Operation: """Post-rpc interceptor for disable_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_disable_processor` interceptor runs + before the `post_disable_processor_with_metadata` interceptor. """ return response + def post_disable_processor_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_disable_processor_with_metadata` + interceptor in new development instead of the `post_disable_processor` interceptor. + When both interceptors are used, this `post_disable_processor_with_metadata` interceptor runs after the + `post_disable_processor` interceptor. The (possibly modified) response returned by + `post_disable_processor` will be passed to + `post_disable_processor_with_metadata`. + """ + return response, metadata + def pre_enable_processor( self, request: document_processor_service.EnableProcessorRequest, @@ -431,12 +569,35 @@ def post_enable_processor( ) -> operations_pb2.Operation: """Post-rpc interceptor for enable_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_enable_processor` interceptor runs + before the `post_enable_processor_with_metadata` interceptor. """ return response + def post_enable_processor_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_enable_processor_with_metadata` + interceptor in new development instead of the `post_enable_processor` interceptor. + When both interceptors are used, this `post_enable_processor_with_metadata` interceptor runs after the + `post_enable_processor` interceptor. The (possibly modified) response returned by + `post_enable_processor` will be passed to + `post_enable_processor_with_metadata`. + """ + return response, metadata + def pre_evaluate_processor_version( self, request: document_processor_service.EvaluateProcessorVersionRequest, @@ -457,12 +618,35 @@ def post_evaluate_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for evaluate_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_evaluate_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_evaluate_processor_version` interceptor runs + before the `post_evaluate_processor_version_with_metadata` interceptor. """ return response + def post_evaluate_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for evaluate_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_evaluate_processor_version_with_metadata` + interceptor in new development instead of the `post_evaluate_processor_version` interceptor. + When both interceptors are used, this `post_evaluate_processor_version_with_metadata` interceptor runs after the + `post_evaluate_processor_version` interceptor. The (possibly modified) response returned by + `post_evaluate_processor_version` will be passed to + `post_evaluate_processor_version_with_metadata`. + """ + return response, metadata + def pre_fetch_processor_types( self, request: document_processor_service.FetchProcessorTypesRequest, @@ -483,12 +667,38 @@ def post_fetch_processor_types( ) -> document_processor_service.FetchProcessorTypesResponse: """Post-rpc interceptor for fetch_processor_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_processor_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_processor_types` interceptor runs + before the `post_fetch_processor_types_with_metadata` interceptor. """ return response + def post_fetch_processor_types_with_metadata( + self, + response: document_processor_service.FetchProcessorTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.FetchProcessorTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_processor_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_fetch_processor_types_with_metadata` + interceptor in new development instead of the `post_fetch_processor_types` interceptor. + When both interceptors are used, this `post_fetch_processor_types_with_metadata` interceptor runs after the + `post_fetch_processor_types` interceptor. The (possibly modified) response returned by + `post_fetch_processor_types` will be passed to + `post_fetch_processor_types_with_metadata`. + """ + return response, metadata + def pre_get_evaluation( self, request: document_processor_service.GetEvaluationRequest, @@ -509,12 +719,35 @@ def post_get_evaluation( ) -> evaluation.Evaluation: """Post-rpc interceptor for get_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_evaluation` interceptor runs + before the `post_get_evaluation_with_metadata` interceptor. """ return response + def post_get_evaluation_with_metadata( + self, + response: evaluation.Evaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[evaluation.Evaluation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_evaluation_with_metadata` + interceptor in new development instead of the `post_get_evaluation` interceptor. + When both interceptors are used, this `post_get_evaluation_with_metadata` interceptor runs after the + `post_get_evaluation` interceptor. The (possibly modified) response returned by + `post_get_evaluation` will be passed to + `post_get_evaluation_with_metadata`. + """ + return response, metadata + def pre_get_processor( self, request: document_processor_service.GetProcessorRequest, @@ -533,12 +766,35 @@ def pre_get_processor( def post_get_processor(self, response: processor.Processor) -> processor.Processor: """Post-rpc interceptor for get_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_processor` interceptor runs + before the `post_get_processor_with_metadata` interceptor. """ return response + def post_get_processor_with_metadata( + self, + response: processor.Processor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[processor.Processor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_processor_with_metadata` + interceptor in new development instead of the `post_get_processor` interceptor. + When both interceptors are used, this `post_get_processor_with_metadata` interceptor runs after the + `post_get_processor` interceptor. The (possibly modified) response returned by + `post_get_processor` will be passed to + `post_get_processor_with_metadata`. + """ + return response, metadata + def pre_get_processor_type( self, request: document_processor_service.GetProcessorTypeRequest, @@ -559,12 +815,35 @@ def post_get_processor_type( ) -> processor_type.ProcessorType: """Post-rpc interceptor for get_processor_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processor_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_processor_type` interceptor runs + before the `post_get_processor_type_with_metadata` interceptor. """ return response + def post_get_processor_type_with_metadata( + self, + response: processor_type.ProcessorType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[processor_type.ProcessorType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processor_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_processor_type_with_metadata` + interceptor in new development instead of the `post_get_processor_type` interceptor. + When both interceptors are used, this `post_get_processor_type_with_metadata` interceptor runs after the + `post_get_processor_type` interceptor. The (possibly modified) response returned by + `post_get_processor_type` will be passed to + `post_get_processor_type_with_metadata`. + """ + return response, metadata + def pre_get_processor_version( self, request: document_processor_service.GetProcessorVersionRequest, @@ -585,12 +864,35 @@ def post_get_processor_version( ) -> processor.ProcessorVersion: """Post-rpc interceptor for get_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_processor_version` interceptor runs + before the `post_get_processor_version_with_metadata` interceptor. """ return response + def post_get_processor_version_with_metadata( + self, + response: processor.ProcessorVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[processor.ProcessorVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_processor_version_with_metadata` + interceptor in new development instead of the `post_get_processor_version` interceptor. + When both interceptors are used, this `post_get_processor_version_with_metadata` interceptor runs after the + `post_get_processor_version` interceptor. The (possibly modified) response returned by + `post_get_processor_version` will be passed to + `post_get_processor_version_with_metadata`. + """ + return response, metadata + def pre_list_evaluations( self, request: document_processor_service.ListEvaluationsRequest, @@ -611,12 +913,38 @@ def post_list_evaluations( ) -> document_processor_service.ListEvaluationsResponse: """Post-rpc interceptor for list_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_evaluations` interceptor runs + before the `post_list_evaluations_with_metadata` interceptor. """ return response + def post_list_evaluations_with_metadata( + self, + response: document_processor_service.ListEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListEvaluationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_evaluations_with_metadata` + interceptor in new development instead of the `post_list_evaluations` interceptor. + When both interceptors are used, this `post_list_evaluations_with_metadata` interceptor runs after the + `post_list_evaluations` interceptor. The (possibly modified) response returned by + `post_list_evaluations` will be passed to + `post_list_evaluations_with_metadata`. + """ + return response, metadata + def pre_list_processors( self, request: document_processor_service.ListProcessorsRequest, @@ -637,12 +965,38 @@ def post_list_processors( ) -> document_processor_service.ListProcessorsResponse: """Post-rpc interceptor for list_processors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_processors` interceptor runs + before the `post_list_processors_with_metadata` interceptor. """ return response + def post_list_processors_with_metadata( + self, + response: document_processor_service.ListProcessorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListProcessorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_processors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_processors_with_metadata` + interceptor in new development instead of the `post_list_processors` interceptor. + When both interceptors are used, this `post_list_processors_with_metadata` interceptor runs after the + `post_list_processors` interceptor. The (possibly modified) response returned by + `post_list_processors` will be passed to + `post_list_processors_with_metadata`. + """ + return response, metadata + def pre_list_processor_types( self, request: document_processor_service.ListProcessorTypesRequest, @@ -663,12 +1017,38 @@ def post_list_processor_types( ) -> document_processor_service.ListProcessorTypesResponse: """Post-rpc interceptor for list_processor_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processor_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_processor_types` interceptor runs + before the `post_list_processor_types_with_metadata` interceptor. """ return response + def post_list_processor_types_with_metadata( + self, + response: document_processor_service.ListProcessorTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListProcessorTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_processor_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_processor_types_with_metadata` + interceptor in new development instead of the `post_list_processor_types` interceptor. + When both interceptors are used, this `post_list_processor_types_with_metadata` interceptor runs after the + `post_list_processor_types` interceptor. The (possibly modified) response returned by + `post_list_processor_types` will be passed to + `post_list_processor_types_with_metadata`. + """ + return response, metadata + def pre_list_processor_versions( self, request: document_processor_service.ListProcessorVersionsRequest, @@ -689,12 +1069,38 @@ def post_list_processor_versions( ) -> document_processor_service.ListProcessorVersionsResponse: """Post-rpc interceptor for list_processor_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processor_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_processor_versions` interceptor runs + before the `post_list_processor_versions_with_metadata` interceptor. """ return response + def post_list_processor_versions_with_metadata( + self, + response: document_processor_service.ListProcessorVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListProcessorVersionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_processor_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_processor_versions_with_metadata` + interceptor in new development instead of the `post_list_processor_versions` interceptor. + When both interceptors are used, this `post_list_processor_versions_with_metadata` interceptor runs after the + `post_list_processor_versions` interceptor. The (possibly modified) response returned by + `post_list_processor_versions` will be passed to + `post_list_processor_versions_with_metadata`. + """ + return response, metadata + def pre_process_document( self, request: document_processor_service.ProcessRequest, @@ -715,12 +1121,38 @@ def post_process_document( ) -> document_processor_service.ProcessResponse: """Post-rpc interceptor for process_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_process_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_process_document` interceptor runs + before the `post_process_document_with_metadata` interceptor. """ return response + def post_process_document_with_metadata( + self, + response: document_processor_service.ProcessResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ProcessResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for process_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_process_document_with_metadata` + interceptor in new development instead of the `post_process_document` interceptor. + When both interceptors are used, this `post_process_document_with_metadata` interceptor runs after the + `post_process_document` interceptor. The (possibly modified) response returned by + `post_process_document` will be passed to + `post_process_document_with_metadata`. + """ + return response, metadata + def pre_review_document( self, request: document_processor_service.ReviewDocumentRequest, @@ -741,12 +1173,35 @@ def post_review_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for review_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_review_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_review_document` interceptor runs + before the `post_review_document_with_metadata` interceptor. """ return response + def post_review_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for review_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_review_document_with_metadata` + interceptor in new development instead of the `post_review_document` interceptor. + When both interceptors are used, this `post_review_document_with_metadata` interceptor runs after the + `post_review_document` interceptor. The (possibly modified) response returned by + `post_review_document` will be passed to + `post_review_document_with_metadata`. + """ + return response, metadata + def pre_set_default_processor_version( self, request: document_processor_service.SetDefaultProcessorVersionRequest, @@ -767,12 +1222,35 @@ def post_set_default_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for set_default_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_default_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_set_default_processor_version` interceptor runs + before the `post_set_default_processor_version_with_metadata` interceptor. """ return response + def post_set_default_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_default_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_set_default_processor_version_with_metadata` + interceptor in new development instead of the `post_set_default_processor_version` interceptor. + When both interceptors are used, this `post_set_default_processor_version_with_metadata` interceptor runs after the + `post_set_default_processor_version` interceptor. The (possibly modified) response returned by + `post_set_default_processor_version` will be passed to + `post_set_default_processor_version_with_metadata`. + """ + return response, metadata + def pre_train_processor_version( self, request: document_processor_service.TrainProcessorVersionRequest, @@ -793,12 +1271,35 @@ def post_train_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_train_processor_version` interceptor runs + before the `post_train_processor_version_with_metadata` interceptor. """ return response + def post_train_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_train_processor_version_with_metadata` + interceptor in new development instead of the `post_train_processor_version` interceptor. + When both interceptors are used, this `post_train_processor_version_with_metadata` interceptor runs after the + `post_train_processor_version` interceptor. The (possibly modified) response returned by + `post_train_processor_version` will be passed to + `post_train_processor_version_with_metadata`. + """ + return response, metadata + def pre_undeploy_processor_version( self, request: document_processor_service.UndeployProcessorVersionRequest, @@ -819,12 +1320,35 @@ def post_undeploy_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for undeploy_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_processor_version` interceptor runs + before the `post_undeploy_processor_version_with_metadata` interceptor. """ return response + def post_undeploy_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_undeploy_processor_version_with_metadata` + interceptor in new development instead of the `post_undeploy_processor_version` interceptor. + When both interceptors are used, this `post_undeploy_processor_version_with_metadata` interceptor runs after the + `post_undeploy_processor_version` interceptor. The (possibly modified) response returned by + `post_undeploy_processor_version` will be passed to + `post_undeploy_processor_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1230,6 +1754,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_process_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_process_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1390,6 +1918,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1538,6 +2070,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1684,6 +2220,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1836,6 +2376,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1990,6 +2534,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2144,6 +2692,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2299,6 +2851,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_evaluate_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_evaluate_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2448,6 +3004,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_processor_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_processor_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2597,6 +3157,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2746,6 +3310,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2897,6 +3465,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processor_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processor_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3050,6 +3622,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3196,6 +3772,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3347,6 +3927,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3502,6 +4086,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processor_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processor_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3654,6 +4242,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processor_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processor_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3812,6 +4404,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_process_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_process_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3966,6 +4562,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_review_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_review_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4121,6 +4721,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_default_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_default_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4273,6 +4880,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4428,6 +5039,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undeploy_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py index bfbe15797e84..349033e81d71 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.1.0" # {x-release-please-version} +__version__ = "3.2.0" # {x-release-please-version} diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py index 3cdae6731137..1fed115bbf83 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -601,6 +603,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3693,16 +3722,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3748,16 +3781,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -3858,16 +3895,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3913,16 +3954,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py index 6ab8b04b20ab..1d4b80446628 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_processor_service/transports/rest.py @@ -283,12 +283,35 @@ def post_batch_process_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_process_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_process_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_batch_process_documents` interceptor runs + before the `post_batch_process_documents_with_metadata` interceptor. """ return response + def post_batch_process_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_process_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_batch_process_documents_with_metadata` + interceptor in new development instead of the `post_batch_process_documents` interceptor. + When both interceptors are used, this `post_batch_process_documents_with_metadata` interceptor runs after the + `post_batch_process_documents` interceptor. The (possibly modified) response returned by + `post_batch_process_documents` will be passed to + `post_batch_process_documents_with_metadata`. + """ + return response, metadata + def pre_create_processor( self, request: document_processor_service.CreateProcessorRequest, @@ -309,12 +332,35 @@ def post_create_processor( ) -> gcd_processor.Processor: """Post-rpc interceptor for create_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_create_processor` interceptor runs + before the `post_create_processor_with_metadata` interceptor. """ return response + def post_create_processor_with_metadata( + self, + response: gcd_processor.Processor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcd_processor.Processor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_create_processor_with_metadata` + interceptor in new development instead of the `post_create_processor` interceptor. + When both interceptors are used, this `post_create_processor_with_metadata` interceptor runs after the + `post_create_processor` interceptor. The (possibly modified) response returned by + `post_create_processor` will be passed to + `post_create_processor_with_metadata`. + """ + return response, metadata + def pre_delete_processor( self, request: document_processor_service.DeleteProcessorRequest, @@ -335,12 +381,35 @@ def post_delete_processor( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_delete_processor` interceptor runs + before the `post_delete_processor_with_metadata` interceptor. """ return response + def post_delete_processor_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_delete_processor_with_metadata` + interceptor in new development instead of the `post_delete_processor` interceptor. + When both interceptors are used, this `post_delete_processor_with_metadata` interceptor runs after the + `post_delete_processor` interceptor. The (possibly modified) response returned by + `post_delete_processor` will be passed to + `post_delete_processor_with_metadata`. + """ + return response, metadata + def pre_delete_processor_version( self, request: document_processor_service.DeleteProcessorVersionRequest, @@ -361,12 +430,35 @@ def post_delete_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_delete_processor_version` interceptor runs + before the `post_delete_processor_version_with_metadata` interceptor. """ return response + def post_delete_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_delete_processor_version_with_metadata` + interceptor in new development instead of the `post_delete_processor_version` interceptor. + When both interceptors are used, this `post_delete_processor_version_with_metadata` interceptor runs after the + `post_delete_processor_version` interceptor. The (possibly modified) response returned by + `post_delete_processor_version` will be passed to + `post_delete_processor_version_with_metadata`. + """ + return response, metadata + def pre_deploy_processor_version( self, request: document_processor_service.DeployProcessorVersionRequest, @@ -387,12 +479,35 @@ def post_deploy_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for deploy_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_deploy_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_deploy_processor_version` interceptor runs + before the `post_deploy_processor_version_with_metadata` interceptor. """ return response + def post_deploy_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deploy_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_deploy_processor_version_with_metadata` + interceptor in new development instead of the `post_deploy_processor_version` interceptor. + When both interceptors are used, this `post_deploy_processor_version_with_metadata` interceptor runs after the + `post_deploy_processor_version` interceptor. The (possibly modified) response returned by + `post_deploy_processor_version` will be passed to + `post_deploy_processor_version_with_metadata`. + """ + return response, metadata + def pre_disable_processor( self, request: document_processor_service.DisableProcessorRequest, @@ -413,12 +528,35 @@ def post_disable_processor( ) -> operations_pb2.Operation: """Post-rpc interceptor for disable_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_disable_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_disable_processor` interceptor runs + before the `post_disable_processor_with_metadata` interceptor. """ return response + def post_disable_processor_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for disable_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_disable_processor_with_metadata` + interceptor in new development instead of the `post_disable_processor` interceptor. + When both interceptors are used, this `post_disable_processor_with_metadata` interceptor runs after the + `post_disable_processor` interceptor. The (possibly modified) response returned by + `post_disable_processor` will be passed to + `post_disable_processor_with_metadata`. + """ + return response, metadata + def pre_enable_processor( self, request: document_processor_service.EnableProcessorRequest, @@ -439,12 +577,35 @@ def post_enable_processor( ) -> operations_pb2.Operation: """Post-rpc interceptor for enable_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_enable_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_enable_processor` interceptor runs + before the `post_enable_processor_with_metadata` interceptor. """ return response + def post_enable_processor_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for enable_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_enable_processor_with_metadata` + interceptor in new development instead of the `post_enable_processor` interceptor. + When both interceptors are used, this `post_enable_processor_with_metadata` interceptor runs after the + `post_enable_processor` interceptor. The (possibly modified) response returned by + `post_enable_processor` will be passed to + `post_enable_processor_with_metadata`. + """ + return response, metadata + def pre_evaluate_processor_version( self, request: document_processor_service.EvaluateProcessorVersionRequest, @@ -465,12 +626,35 @@ def post_evaluate_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for evaluate_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_evaluate_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_evaluate_processor_version` interceptor runs + before the `post_evaluate_processor_version_with_metadata` interceptor. """ return response + def post_evaluate_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for evaluate_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_evaluate_processor_version_with_metadata` + interceptor in new development instead of the `post_evaluate_processor_version` interceptor. + When both interceptors are used, this `post_evaluate_processor_version_with_metadata` interceptor runs after the + `post_evaluate_processor_version` interceptor. The (possibly modified) response returned by + `post_evaluate_processor_version` will be passed to + `post_evaluate_processor_version_with_metadata`. + """ + return response, metadata + def pre_fetch_processor_types( self, request: document_processor_service.FetchProcessorTypesRequest, @@ -491,12 +675,38 @@ def post_fetch_processor_types( ) -> document_processor_service.FetchProcessorTypesResponse: """Post-rpc interceptor for fetch_processor_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_fetch_processor_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_fetch_processor_types` interceptor runs + before the `post_fetch_processor_types_with_metadata` interceptor. """ return response + def post_fetch_processor_types_with_metadata( + self, + response: document_processor_service.FetchProcessorTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.FetchProcessorTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_processor_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_fetch_processor_types_with_metadata` + interceptor in new development instead of the `post_fetch_processor_types` interceptor. + When both interceptors are used, this `post_fetch_processor_types_with_metadata` interceptor runs after the + `post_fetch_processor_types` interceptor. The (possibly modified) response returned by + `post_fetch_processor_types` will be passed to + `post_fetch_processor_types_with_metadata`. + """ + return response, metadata + def pre_get_evaluation( self, request: document_processor_service.GetEvaluationRequest, @@ -517,12 +727,35 @@ def post_get_evaluation( ) -> evaluation.Evaluation: """Post-rpc interceptor for get_evaluation - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_evaluation_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_evaluation` interceptor runs + before the `post_get_evaluation_with_metadata` interceptor. """ return response + def post_get_evaluation_with_metadata( + self, + response: evaluation.Evaluation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[evaluation.Evaluation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_evaluation + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_evaluation_with_metadata` + interceptor in new development instead of the `post_get_evaluation` interceptor. + When both interceptors are used, this `post_get_evaluation_with_metadata` interceptor runs after the + `post_get_evaluation` interceptor. The (possibly modified) response returned by + `post_get_evaluation` will be passed to + `post_get_evaluation_with_metadata`. + """ + return response, metadata + def pre_get_processor( self, request: document_processor_service.GetProcessorRequest, @@ -541,12 +774,35 @@ def pre_get_processor( def post_get_processor(self, response: processor.Processor) -> processor.Processor: """Post-rpc interceptor for get_processor - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processor_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_processor` interceptor runs + before the `post_get_processor_with_metadata` interceptor. """ return response + def post_get_processor_with_metadata( + self, + response: processor.Processor, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[processor.Processor, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processor + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_processor_with_metadata` + interceptor in new development instead of the `post_get_processor` interceptor. + When both interceptors are used, this `post_get_processor_with_metadata` interceptor runs after the + `post_get_processor` interceptor. The (possibly modified) response returned by + `post_get_processor` will be passed to + `post_get_processor_with_metadata`. + """ + return response, metadata + def pre_get_processor_type( self, request: document_processor_service.GetProcessorTypeRequest, @@ -567,12 +823,35 @@ def post_get_processor_type( ) -> processor_type.ProcessorType: """Post-rpc interceptor for get_processor_type - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processor_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_processor_type` interceptor runs + before the `post_get_processor_type_with_metadata` interceptor. """ return response + def post_get_processor_type_with_metadata( + self, + response: processor_type.ProcessorType, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[processor_type.ProcessorType, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processor_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_processor_type_with_metadata` + interceptor in new development instead of the `post_get_processor_type` interceptor. + When both interceptors are used, this `post_get_processor_type_with_metadata` interceptor runs after the + `post_get_processor_type` interceptor. The (possibly modified) response returned by + `post_get_processor_type` will be passed to + `post_get_processor_type_with_metadata`. + """ + return response, metadata + def pre_get_processor_version( self, request: document_processor_service.GetProcessorVersionRequest, @@ -593,12 +872,35 @@ def post_get_processor_version( ) -> processor.ProcessorVersion: """Post-rpc interceptor for get_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_get_processor_version` interceptor runs + before the `post_get_processor_version_with_metadata` interceptor. """ return response + def post_get_processor_version_with_metadata( + self, + response: processor.ProcessorVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[processor.ProcessorVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_get_processor_version_with_metadata` + interceptor in new development instead of the `post_get_processor_version` interceptor. + When both interceptors are used, this `post_get_processor_version_with_metadata` interceptor runs after the + `post_get_processor_version` interceptor. The (possibly modified) response returned by + `post_get_processor_version` will be passed to + `post_get_processor_version_with_metadata`. + """ + return response, metadata + def pre_import_processor_version( self, request: document_processor_service.ImportProcessorVersionRequest, @@ -619,12 +921,35 @@ def post_import_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_import_processor_version` interceptor runs + before the `post_import_processor_version_with_metadata` interceptor. """ return response + def post_import_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_import_processor_version_with_metadata` + interceptor in new development instead of the `post_import_processor_version` interceptor. + When both interceptors are used, this `post_import_processor_version_with_metadata` interceptor runs after the + `post_import_processor_version` interceptor. The (possibly modified) response returned by + `post_import_processor_version` will be passed to + `post_import_processor_version_with_metadata`. + """ + return response, metadata + def pre_list_evaluations( self, request: document_processor_service.ListEvaluationsRequest, @@ -645,12 +970,38 @@ def post_list_evaluations( ) -> document_processor_service.ListEvaluationsResponse: """Post-rpc interceptor for list_evaluations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_evaluations` interceptor runs + before the `post_list_evaluations_with_metadata` interceptor. """ return response + def post_list_evaluations_with_metadata( + self, + response: document_processor_service.ListEvaluationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListEvaluationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_evaluations_with_metadata` + interceptor in new development instead of the `post_list_evaluations` interceptor. + When both interceptors are used, this `post_list_evaluations_with_metadata` interceptor runs after the + `post_list_evaluations` interceptor. The (possibly modified) response returned by + `post_list_evaluations` will be passed to + `post_list_evaluations_with_metadata`. + """ + return response, metadata + def pre_list_processors( self, request: document_processor_service.ListProcessorsRequest, @@ -671,12 +1022,38 @@ def post_list_processors( ) -> document_processor_service.ListProcessorsResponse: """Post-rpc interceptor for list_processors - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_processors` interceptor runs + before the `post_list_processors_with_metadata` interceptor. """ return response + def post_list_processors_with_metadata( + self, + response: document_processor_service.ListProcessorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListProcessorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_processors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_processors_with_metadata` + interceptor in new development instead of the `post_list_processors` interceptor. + When both interceptors are used, this `post_list_processors_with_metadata` interceptor runs after the + `post_list_processors` interceptor. The (possibly modified) response returned by + `post_list_processors` will be passed to + `post_list_processors_with_metadata`. + """ + return response, metadata + def pre_list_processor_types( self, request: document_processor_service.ListProcessorTypesRequest, @@ -697,12 +1074,38 @@ def post_list_processor_types( ) -> document_processor_service.ListProcessorTypesResponse: """Post-rpc interceptor for list_processor_types - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processor_types_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_processor_types` interceptor runs + before the `post_list_processor_types_with_metadata` interceptor. """ return response + def post_list_processor_types_with_metadata( + self, + response: document_processor_service.ListProcessorTypesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListProcessorTypesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_processor_types + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_processor_types_with_metadata` + interceptor in new development instead of the `post_list_processor_types` interceptor. + When both interceptors are used, this `post_list_processor_types_with_metadata` interceptor runs after the + `post_list_processor_types` interceptor. The (possibly modified) response returned by + `post_list_processor_types` will be passed to + `post_list_processor_types_with_metadata`. + """ + return response, metadata + def pre_list_processor_versions( self, request: document_processor_service.ListProcessorVersionsRequest, @@ -723,12 +1126,38 @@ def post_list_processor_versions( ) -> document_processor_service.ListProcessorVersionsResponse: """Post-rpc interceptor for list_processor_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_processor_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_list_processor_versions` interceptor runs + before the `post_list_processor_versions_with_metadata` interceptor. """ return response + def post_list_processor_versions_with_metadata( + self, + response: document_processor_service.ListProcessorVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ListProcessorVersionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_processor_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_list_processor_versions_with_metadata` + interceptor in new development instead of the `post_list_processor_versions` interceptor. + When both interceptors are used, this `post_list_processor_versions_with_metadata` interceptor runs after the + `post_list_processor_versions` interceptor. The (possibly modified) response returned by + `post_list_processor_versions` will be passed to + `post_list_processor_versions_with_metadata`. + """ + return response, metadata + def pre_process_document( self, request: document_processor_service.ProcessRequest, @@ -749,12 +1178,38 @@ def post_process_document( ) -> document_processor_service.ProcessResponse: """Post-rpc interceptor for process_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_process_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_process_document` interceptor runs + before the `post_process_document_with_metadata` interceptor. """ return response + def post_process_document_with_metadata( + self, + response: document_processor_service.ProcessResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_processor_service.ProcessResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for process_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_process_document_with_metadata` + interceptor in new development instead of the `post_process_document` interceptor. + When both interceptors are used, this `post_process_document_with_metadata` interceptor runs after the + `post_process_document` interceptor. The (possibly modified) response returned by + `post_process_document` will be passed to + `post_process_document_with_metadata`. + """ + return response, metadata + def pre_review_document( self, request: document_processor_service.ReviewDocumentRequest, @@ -775,12 +1230,35 @@ def post_review_document( ) -> operations_pb2.Operation: """Post-rpc interceptor for review_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_review_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_review_document` interceptor runs + before the `post_review_document_with_metadata` interceptor. """ return response + def post_review_document_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for review_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_review_document_with_metadata` + interceptor in new development instead of the `post_review_document` interceptor. + When both interceptors are used, this `post_review_document_with_metadata` interceptor runs after the + `post_review_document` interceptor. The (possibly modified) response returned by + `post_review_document` will be passed to + `post_review_document_with_metadata`. + """ + return response, metadata + def pre_set_default_processor_version( self, request: document_processor_service.SetDefaultProcessorVersionRequest, @@ -801,12 +1279,35 @@ def post_set_default_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for set_default_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_default_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_set_default_processor_version` interceptor runs + before the `post_set_default_processor_version_with_metadata` interceptor. """ return response + def post_set_default_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_default_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_set_default_processor_version_with_metadata` + interceptor in new development instead of the `post_set_default_processor_version` interceptor. + When both interceptors are used, this `post_set_default_processor_version_with_metadata` interceptor runs after the + `post_set_default_processor_version` interceptor. The (possibly modified) response returned by + `post_set_default_processor_version` will be passed to + `post_set_default_processor_version_with_metadata`. + """ + return response, metadata + def pre_train_processor_version( self, request: document_processor_service.TrainProcessorVersionRequest, @@ -827,12 +1328,35 @@ def post_train_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for train_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_train_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_train_processor_version` interceptor runs + before the `post_train_processor_version_with_metadata` interceptor. """ return response + def post_train_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for train_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_train_processor_version_with_metadata` + interceptor in new development instead of the `post_train_processor_version` interceptor. + When both interceptors are used, this `post_train_processor_version_with_metadata` interceptor runs after the + `post_train_processor_version` interceptor. The (possibly modified) response returned by + `post_train_processor_version` will be passed to + `post_train_processor_version_with_metadata`. + """ + return response, metadata + def pre_undeploy_processor_version( self, request: document_processor_service.UndeployProcessorVersionRequest, @@ -853,12 +1377,35 @@ def post_undeploy_processor_version( ) -> operations_pb2.Operation: """Post-rpc interceptor for undeploy_processor_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_undeploy_processor_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentProcessorService server but before - it is returned to user code. + it is returned to user code. This `post_undeploy_processor_version` interceptor runs + before the `post_undeploy_processor_version_with_metadata` interceptor. """ return response + def post_undeploy_processor_version_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for undeploy_processor_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentProcessorService server but before it is returned to user code. + + We recommend only using this `post_undeploy_processor_version_with_metadata` + interceptor in new development instead of the `post_undeploy_processor_version` interceptor. + When both interceptors are used, this `post_undeploy_processor_version_with_metadata` interceptor runs after the + `post_undeploy_processor_version` interceptor. The (possibly modified) response returned by + `post_undeploy_processor_version` will be passed to + `post_undeploy_processor_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1260,6 +1807,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_process_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_process_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1420,6 +1971,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1568,6 +2123,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1714,6 +2273,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1866,6 +2429,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_deploy_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deploy_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2020,6 +2587,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_disable_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_disable_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2174,6 +2745,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_enable_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_enable_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2329,6 +2904,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_evaluate_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_evaluate_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2478,6 +3057,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_fetch_processor_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_fetch_processor_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2627,6 +3210,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_evaluation(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_evaluation_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2776,6 +3363,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processor(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processor_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2927,6 +3518,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processor_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processor_type_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3080,6 +3675,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3246,6 +3845,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3392,6 +3995,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_evaluations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3543,6 +4150,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processors_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3698,6 +4309,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processor_types(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processor_types_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3850,6 +4465,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_processor_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_processor_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4008,6 +4627,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_process_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_process_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4162,6 +4785,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_review_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_review_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4317,6 +4944,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_default_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_set_default_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4469,6 +5103,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_train_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_train_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4624,6 +5262,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_undeploy_processor_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_undeploy_processor_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py index aa61faff9bf9..6856a534a9e0 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -542,6 +544,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1636,16 +1665,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1691,16 +1724,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1801,16 +1838,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1856,16 +1897,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py index 09f25ecb8c93..35f507fc5a71 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/services/document_service/transports/rest.py @@ -152,12 +152,35 @@ def post_batch_delete_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for batch_delete_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_delete_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_batch_delete_documents` interceptor runs + before the `post_batch_delete_documents_with_metadata` interceptor. """ return response + def post_batch_delete_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_delete_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_batch_delete_documents_with_metadata` + interceptor in new development instead of the `post_batch_delete_documents` interceptor. + When both interceptors are used, this `post_batch_delete_documents_with_metadata` interceptor runs after the + `post_batch_delete_documents` interceptor. The (possibly modified) response returned by + `post_batch_delete_documents` will be passed to + `post_batch_delete_documents_with_metadata`. + """ + return response, metadata + def pre_get_dataset_schema( self, request: document_service.GetDatasetSchemaRequest, @@ -178,12 +201,35 @@ def post_get_dataset_schema( ) -> dataset.DatasetSchema: """Post-rpc interceptor for get_dataset_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_dataset_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_dataset_schema` interceptor runs + before the `post_get_dataset_schema_with_metadata` interceptor. """ return response + def post_get_dataset_schema_with_metadata( + self, + response: dataset.DatasetSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataset.DatasetSchema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_dataset_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_dataset_schema_with_metadata` + interceptor in new development instead of the `post_get_dataset_schema` interceptor. + When both interceptors are used, this `post_get_dataset_schema_with_metadata` interceptor runs after the + `post_get_dataset_schema` interceptor. The (possibly modified) response returned by + `post_get_dataset_schema` will be passed to + `post_get_dataset_schema_with_metadata`. + """ + return response, metadata + def pre_get_document( self, request: document_service.GetDocumentRequest, @@ -203,12 +249,37 @@ def post_get_document( ) -> document_service.GetDocumentResponse: """Post-rpc interceptor for get_document - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_document_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_get_document` interceptor runs + before the `post_get_document_with_metadata` interceptor. """ return response + def post_get_document_with_metadata( + self, + response: document_service.GetDocumentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.GetDocumentResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_document + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_get_document_with_metadata` + interceptor in new development instead of the `post_get_document` interceptor. + When both interceptors are used, this `post_get_document_with_metadata` interceptor runs after the + `post_get_document` interceptor. The (possibly modified) response returned by + `post_get_document` will be passed to + `post_get_document_with_metadata`. + """ + return response, metadata + def pre_import_documents( self, request: document_service.ImportDocumentsRequest, @@ -228,12 +299,35 @@ def post_import_documents( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_import_documents` interceptor runs + before the `post_import_documents_with_metadata` interceptor. """ return response + def post_import_documents_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_import_documents_with_metadata` + interceptor in new development instead of the `post_import_documents` interceptor. + When both interceptors are used, this `post_import_documents_with_metadata` interceptor runs after the + `post_import_documents` interceptor. The (possibly modified) response returned by + `post_import_documents` will be passed to + `post_import_documents_with_metadata`. + """ + return response, metadata + def pre_list_documents( self, request: document_service.ListDocumentsRequest, @@ -253,12 +347,37 @@ def post_list_documents( ) -> document_service.ListDocumentsResponse: """Post-rpc interceptor for list_documents - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_documents_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_list_documents` interceptor runs + before the `post_list_documents_with_metadata` interceptor. """ return response + def post_list_documents_with_metadata( + self, + response: document_service.ListDocumentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + document_service.ListDocumentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_documents + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_list_documents_with_metadata` + interceptor in new development instead of the `post_list_documents` interceptor. + When both interceptors are used, this `post_list_documents_with_metadata` interceptor runs after the + `post_list_documents` interceptor. The (possibly modified) response returned by + `post_list_documents` will be passed to + `post_list_documents_with_metadata`. + """ + return response, metadata + def pre_update_dataset( self, request: document_service.UpdateDatasetRequest, @@ -278,12 +397,35 @@ def post_update_dataset( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_dataset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_dataset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_update_dataset` interceptor runs + before the `post_update_dataset_with_metadata` interceptor. """ return response + def post_update_dataset_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_dataset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_update_dataset_with_metadata` + interceptor in new development instead of the `post_update_dataset` interceptor. + When both interceptors are used, this `post_update_dataset_with_metadata` interceptor runs after the + `post_update_dataset` interceptor. The (possibly modified) response returned by + `post_update_dataset` will be passed to + `post_update_dataset_with_metadata`. + """ + return response, metadata + def pre_update_dataset_schema( self, request: document_service.UpdateDatasetSchemaRequest, @@ -304,12 +446,35 @@ def post_update_dataset_schema( ) -> dataset.DatasetSchema: """Post-rpc interceptor for update_dataset_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_dataset_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the DocumentService server but before - it is returned to user code. + it is returned to user code. This `post_update_dataset_schema` interceptor runs + before the `post_update_dataset_schema_with_metadata` interceptor. """ return response + def post_update_dataset_schema_with_metadata( + self, + response: dataset.DatasetSchema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dataset.DatasetSchema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_dataset_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DocumentService server but before it is returned to user code. + + We recommend only using this `post_update_dataset_schema_with_metadata` + interceptor in new development instead of the `post_update_dataset_schema` interceptor. + When both interceptors are used, this `post_update_dataset_schema_with_metadata` interceptor runs after the + `post_update_dataset_schema` interceptor. The (possibly modified) response returned by + `post_update_dataset_schema` will be passed to + `post_update_dataset_schema_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -707,6 +872,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_delete_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_delete_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -849,6 +1018,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_dataset_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dataset_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -989,6 +1162,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_document(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_document_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1140,6 +1317,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1286,6 +1467,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_documents(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_documents_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1435,6 +1620,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_dataset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dataset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1584,6 +1773,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_dataset_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dataset_schema_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py index 6739bf59742c..b286fd2e7ce3 100644 --- a/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py +++ b/packages/google-cloud-documentai/google/cloud/documentai_v1beta3/types/document_processor_service.py @@ -153,11 +153,13 @@ class ChunkingConfig(proto.Message): semantic_chunking_group_size (bool): Optional. The number of tokens to group together when evaluating semantic similarity. + THIS FIELD IS NOT YET USED. breakpoint_percentile_threshold (int): Optional. The percentile of cosine dissimilarity that must be exceeded between a group of tokens and the next. The smaller this number is, the more chunks will be generated. + THIS FIELD IS NOT YET USED. """ chunk_size: int = proto.Field( diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json index 593f36774f27..0d6147e074ab 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "3.1.0" + "version": "3.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json index 9eae10785554..275a622a9219 100644 --- a/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json +++ b/packages/google-cloud-documentai/samples/generated_samples/snippet_metadata_google.cloud.documentai.v1beta3.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-documentai", - "version": "3.1.0" + "version": "3.2.0" }, "snippets": [ { diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py index 48064cacf2de..55aefbdb6344 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1/test_document_processor_service.py @@ -95,6 +95,13 @@ from google.cloud.documentai_v1.types import processor as gcd_processor from google.cloud.documentai_v1.types import processor_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -370,6 +377,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentProcessorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentProcessorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -14813,10 +14863,14 @@ def test_process_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_process_document" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_process_document_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_process_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ProcessRequest.pb( document_processor_service.ProcessRequest() ) @@ -14842,6 +14896,10 @@ def test_process_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ProcessResponse() + post_with_metadata.return_value = ( + document_processor_service.ProcessResponse(), + metadata, + ) client.process_document( request, @@ -14853,6 +14911,7 @@ def test_process_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_process_documents_rest_bad_request( @@ -14934,11 +14993,15 @@ def test_batch_process_documents_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_batch_process_documents", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_batch_process_documents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_batch_process_documents", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.BatchProcessRequest.pb( document_processor_service.BatchProcessRequest() ) @@ -14962,6 +15025,7 @@ def test_batch_process_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_process_documents( request, @@ -14973,6 +15037,7 @@ def test_batch_process_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_processor_types_rest_bad_request( @@ -15056,10 +15121,14 @@ def test_fetch_processor_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_fetch_processor_types" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_fetch_processor_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_fetch_processor_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.FetchProcessorTypesRequest.pb( document_processor_service.FetchProcessorTypesRequest() ) @@ -15085,6 +15154,10 @@ def test_fetch_processor_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.FetchProcessorTypesResponse() + post_with_metadata.return_value = ( + document_processor_service.FetchProcessorTypesResponse(), + metadata, + ) client.fetch_processor_types( request, @@ -15096,6 +15169,7 @@ def test_fetch_processor_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processor_types_rest_bad_request( @@ -15182,10 +15256,14 @@ def test_list_processor_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_list_processor_types" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processor_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_processor_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListProcessorTypesRequest.pb( document_processor_service.ListProcessorTypesRequest() ) @@ -15211,6 +15289,10 @@ def test_list_processor_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListProcessorTypesResponse() + post_with_metadata.return_value = ( + document_processor_service.ListProcessorTypesResponse(), + metadata, + ) client.list_processor_types( request, @@ -15222,6 +15304,7 @@ def test_list_processor_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processor_type_rest_bad_request( @@ -15316,10 +15399,14 @@ def test_get_processor_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_type" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_processor_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetProcessorTypeRequest.pb( document_processor_service.GetProcessorTypeRequest() ) @@ -15345,6 +15432,7 @@ def test_get_processor_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = processor_type.ProcessorType() + post_with_metadata.return_value = processor_type.ProcessorType(), metadata client.get_processor_type( request, @@ -15356,6 +15444,7 @@ def test_get_processor_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processors_rest_bad_request( @@ -15442,10 +15531,14 @@ def test_list_processors_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_list_processors" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_processors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListProcessorsRequest.pb( document_processor_service.ListProcessorsRequest() ) @@ -15471,6 +15564,10 @@ def test_list_processors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListProcessorsResponse() + post_with_metadata.return_value = ( + document_processor_service.ListProcessorsResponse(), + metadata, + ) client.list_processors( request, @@ -15482,6 +15579,7 @@ def test_list_processors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processor_rest_bad_request( @@ -15582,10 +15680,14 @@ def test_get_processor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetProcessorRequest.pb( document_processor_service.GetProcessorRequest() ) @@ -15609,6 +15711,7 @@ def test_get_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = processor.Processor() + post_with_metadata.return_value = processor.Processor(), metadata client.get_processor( request, @@ -15620,6 +15723,7 @@ def test_get_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_train_processor_version_rest_bad_request( @@ -15701,11 +15805,15 @@ def test_train_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_train_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_train_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_train_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.TrainProcessorVersionRequest.pb( document_processor_service.TrainProcessorVersionRequest() ) @@ -15729,6 +15837,7 @@ def test_train_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_processor_version( request, @@ -15740,6 +15849,7 @@ def test_train_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processor_version_rest_bad_request( @@ -15847,10 +15957,14 @@ def test_get_processor_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_version" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetProcessorVersionRequest.pb( document_processor_service.GetProcessorVersionRequest() ) @@ -15874,6 +15988,7 @@ def test_get_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = processor.ProcessorVersion() + post_with_metadata.return_value = processor.ProcessorVersion(), metadata client.get_processor_version( request, @@ -15885,6 +16000,7 @@ def test_get_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processor_versions_rest_bad_request( @@ -15972,11 +16088,15 @@ def test_list_processor_versions_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_list_processor_versions", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processor_versions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_processor_versions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListProcessorVersionsRequest.pb( document_processor_service.ListProcessorVersionsRequest() ) @@ -16002,6 +16122,10 @@ def test_list_processor_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListProcessorVersionsResponse() + post_with_metadata.return_value = ( + document_processor_service.ListProcessorVersionsResponse(), + metadata, + ) client.list_processor_versions( request, @@ -16013,6 +16137,7 @@ def test_list_processor_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_processor_version_rest_bad_request( @@ -16098,11 +16223,15 @@ def test_delete_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_delete_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_delete_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_delete_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DeleteProcessorVersionRequest.pb( document_processor_service.DeleteProcessorVersionRequest() ) @@ -16126,6 +16255,7 @@ def test_delete_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_processor_version( request, @@ -16137,6 +16267,7 @@ def test_delete_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_processor_version_rest_bad_request( @@ -16222,11 +16353,15 @@ def test_deploy_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_deploy_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_deploy_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_deploy_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DeployProcessorVersionRequest.pb( document_processor_service.DeployProcessorVersionRequest() ) @@ -16250,6 +16385,7 @@ def test_deploy_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_processor_version( request, @@ -16261,6 +16397,7 @@ def test_deploy_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_processor_version_rest_bad_request( @@ -16346,11 +16483,15 @@ def test_undeploy_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_undeploy_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_undeploy_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_undeploy_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.UndeployProcessorVersionRequest.pb( document_processor_service.UndeployProcessorVersionRequest() ) @@ -16374,6 +16515,7 @@ def test_undeploy_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undeploy_processor_version( request, @@ -16385,6 +16527,7 @@ def test_undeploy_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_processor_rest_bad_request( @@ -16569,10 +16712,14 @@ def test_create_processor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_create_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_create_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_create_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.CreateProcessorRequest.pb( document_processor_service.CreateProcessorRequest() ) @@ -16596,6 +16743,7 @@ def test_create_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_processor.Processor() + post_with_metadata.return_value = gcd_processor.Processor(), metadata client.create_processor( request, @@ -16607,6 +16755,7 @@ def test_create_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_processor_rest_bad_request( @@ -16687,10 +16836,14 @@ def test_delete_processor_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_delete_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_delete_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_delete_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DeleteProcessorRequest.pb( document_processor_service.DeleteProcessorRequest() ) @@ -16714,6 +16867,7 @@ def test_delete_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_processor( request, @@ -16725,6 +16879,7 @@ def test_delete_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_processor_rest_bad_request( @@ -16805,10 +16960,14 @@ def test_enable_processor_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_enable_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_enable_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_enable_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.EnableProcessorRequest.pb( document_processor_service.EnableProcessorRequest() ) @@ -16832,6 +16991,7 @@ def test_enable_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.enable_processor( request, @@ -16843,6 +17003,7 @@ def test_enable_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_processor_rest_bad_request( @@ -16923,10 +17084,14 @@ def test_disable_processor_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_disable_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_disable_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_disable_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DisableProcessorRequest.pb( document_processor_service.DisableProcessorRequest() ) @@ -16950,6 +17115,7 @@ def test_disable_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.disable_processor( request, @@ -16961,6 +17127,7 @@ def test_disable_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_default_processor_version_rest_bad_request( @@ -17046,11 +17213,15 @@ def test_set_default_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_set_default_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_set_default_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_set_default_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.SetDefaultProcessorVersionRequest.pb( document_processor_service.SetDefaultProcessorVersionRequest() ) @@ -17074,6 +17245,7 @@ def test_set_default_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.set_default_processor_version( request, @@ -17085,6 +17257,7 @@ def test_set_default_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_review_document_rest_bad_request( @@ -17169,10 +17342,14 @@ def test_review_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_review_document" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_review_document_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_review_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ReviewDocumentRequest.pb( document_processor_service.ReviewDocumentRequest() ) @@ -17196,6 +17373,7 @@ def test_review_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.review_document( request, @@ -17207,6 +17385,7 @@ def test_review_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_evaluate_processor_version_rest_bad_request( @@ -17292,11 +17471,15 @@ def test_evaluate_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_evaluate_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_evaluate_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_evaluate_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.EvaluateProcessorVersionRequest.pb( document_processor_service.EvaluateProcessorVersionRequest() ) @@ -17320,6 +17503,7 @@ def test_evaluate_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.evaluate_processor_version( request, @@ -17331,6 +17515,7 @@ def test_evaluate_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_evaluation_rest_bad_request( @@ -17423,10 +17608,14 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_evaluation" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_evaluation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetEvaluationRequest.pb( document_processor_service.GetEvaluationRequest() ) @@ -17450,6 +17639,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation.Evaluation() + post_with_metadata.return_value = evaluation.Evaluation(), metadata client.get_evaluation( request, @@ -17461,6 +17651,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_evaluations_rest_bad_request( @@ -17551,10 +17742,14 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_list_evaluations" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_evaluations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_evaluations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListEvaluationsRequest.pb( document_processor_service.ListEvaluationsRequest() ) @@ -17580,6 +17775,10 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListEvaluationsResponse() + post_with_metadata.return_value = ( + document_processor_service.ListEvaluationsResponse(), + metadata, + ) client.list_evaluations( request, @@ -17591,6 +17790,7 @@ def test_list_evaluations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py index 1a6b6c2b7aa7..8e07cb99326f 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_processor_service.py @@ -95,6 +95,13 @@ from google.cloud.documentai_v1beta3.types import processor as gcd_processor from google.cloud.documentai_v1beta3.types import processor_type +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -370,6 +377,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentProcessorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentProcessorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -15412,10 +15462,14 @@ def test_process_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_process_document" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_process_document_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_process_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ProcessRequest.pb( document_processor_service.ProcessRequest() ) @@ -15441,6 +15495,10 @@ def test_process_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ProcessResponse() + post_with_metadata.return_value = ( + document_processor_service.ProcessResponse(), + metadata, + ) client.process_document( request, @@ -15452,6 +15510,7 @@ def test_process_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_process_documents_rest_bad_request( @@ -15533,11 +15592,15 @@ def test_batch_process_documents_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_batch_process_documents", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_batch_process_documents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_batch_process_documents", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.BatchProcessRequest.pb( document_processor_service.BatchProcessRequest() ) @@ -15561,6 +15624,7 @@ def test_batch_process_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_process_documents( request, @@ -15572,6 +15636,7 @@ def test_batch_process_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_fetch_processor_types_rest_bad_request( @@ -15655,10 +15720,14 @@ def test_fetch_processor_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_fetch_processor_types" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_fetch_processor_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_fetch_processor_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.FetchProcessorTypesRequest.pb( document_processor_service.FetchProcessorTypesRequest() ) @@ -15684,6 +15753,10 @@ def test_fetch_processor_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.FetchProcessorTypesResponse() + post_with_metadata.return_value = ( + document_processor_service.FetchProcessorTypesResponse(), + metadata, + ) client.fetch_processor_types( request, @@ -15695,6 +15768,7 @@ def test_fetch_processor_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processor_types_rest_bad_request( @@ -15781,10 +15855,14 @@ def test_list_processor_types_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_list_processor_types" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processor_types_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_processor_types" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListProcessorTypesRequest.pb( document_processor_service.ListProcessorTypesRequest() ) @@ -15810,6 +15888,10 @@ def test_list_processor_types_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListProcessorTypesResponse() + post_with_metadata.return_value = ( + document_processor_service.ListProcessorTypesResponse(), + metadata, + ) client.list_processor_types( request, @@ -15821,6 +15903,7 @@ def test_list_processor_types_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processor_type_rest_bad_request( @@ -15915,10 +15998,14 @@ def test_get_processor_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_type" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_processor_type_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_type" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetProcessorTypeRequest.pb( document_processor_service.GetProcessorTypeRequest() ) @@ -15944,6 +16031,7 @@ def test_get_processor_type_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = processor_type.ProcessorType() + post_with_metadata.return_value = processor_type.ProcessorType(), metadata client.get_processor_type( request, @@ -15955,6 +16043,7 @@ def test_get_processor_type_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processors_rest_bad_request( @@ -16041,10 +16130,14 @@ def test_list_processors_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_list_processors" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processors_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_processors" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListProcessorsRequest.pb( document_processor_service.ListProcessorsRequest() ) @@ -16070,6 +16163,10 @@ def test_list_processors_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListProcessorsResponse() + post_with_metadata.return_value = ( + document_processor_service.ListProcessorsResponse(), + metadata, + ) client.list_processors( request, @@ -16081,6 +16178,7 @@ def test_list_processors_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processor_rest_bad_request( @@ -16181,10 +16279,14 @@ def test_get_processor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetProcessorRequest.pb( document_processor_service.GetProcessorRequest() ) @@ -16208,6 +16310,7 @@ def test_get_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = processor.Processor() + post_with_metadata.return_value = processor.Processor(), metadata client.get_processor( request, @@ -16219,6 +16322,7 @@ def test_get_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_train_processor_version_rest_bad_request( @@ -16300,11 +16404,15 @@ def test_train_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_train_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_train_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_train_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.TrainProcessorVersionRequest.pb( document_processor_service.TrainProcessorVersionRequest() ) @@ -16328,6 +16436,7 @@ def test_train_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.train_processor_version( request, @@ -16339,6 +16448,7 @@ def test_train_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_processor_version_rest_bad_request( @@ -16446,10 +16556,14 @@ def test_get_processor_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_processor_version" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_processor_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetProcessorVersionRequest.pb( document_processor_service.GetProcessorVersionRequest() ) @@ -16473,6 +16587,7 @@ def test_get_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = processor.ProcessorVersion() + post_with_metadata.return_value = processor.ProcessorVersion(), metadata client.get_processor_version( request, @@ -16484,6 +16599,7 @@ def test_get_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_processor_versions_rest_bad_request( @@ -16571,11 +16687,15 @@ def test_list_processor_versions_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_list_processor_versions", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_processor_versions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_processor_versions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListProcessorVersionsRequest.pb( document_processor_service.ListProcessorVersionsRequest() ) @@ -16601,6 +16721,10 @@ def test_list_processor_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListProcessorVersionsResponse() + post_with_metadata.return_value = ( + document_processor_service.ListProcessorVersionsResponse(), + metadata, + ) client.list_processor_versions( request, @@ -16612,6 +16736,7 @@ def test_list_processor_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_processor_version_rest_bad_request( @@ -16697,11 +16822,15 @@ def test_delete_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_delete_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_delete_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_delete_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DeleteProcessorVersionRequest.pb( document_processor_service.DeleteProcessorVersionRequest() ) @@ -16725,6 +16854,7 @@ def test_delete_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_processor_version( request, @@ -16736,6 +16866,7 @@ def test_delete_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_deploy_processor_version_rest_bad_request( @@ -16821,11 +16952,15 @@ def test_deploy_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_deploy_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_deploy_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_deploy_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DeployProcessorVersionRequest.pb( document_processor_service.DeployProcessorVersionRequest() ) @@ -16849,6 +16984,7 @@ def test_deploy_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.deploy_processor_version( request, @@ -16860,6 +16996,7 @@ def test_deploy_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_undeploy_processor_version_rest_bad_request( @@ -16945,11 +17082,15 @@ def test_undeploy_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_undeploy_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_undeploy_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_undeploy_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.UndeployProcessorVersionRequest.pb( document_processor_service.UndeployProcessorVersionRequest() ) @@ -16973,6 +17114,7 @@ def test_undeploy_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.undeploy_processor_version( request, @@ -16984,6 +17126,7 @@ def test_undeploy_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_processor_rest_bad_request( @@ -17168,10 +17311,14 @@ def test_create_processor_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_create_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_create_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_create_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.CreateProcessorRequest.pb( document_processor_service.CreateProcessorRequest() ) @@ -17195,6 +17342,7 @@ def test_create_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gcd_processor.Processor() + post_with_metadata.return_value = gcd_processor.Processor(), metadata client.create_processor( request, @@ -17206,6 +17354,7 @@ def test_create_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_processor_rest_bad_request( @@ -17286,10 +17435,14 @@ def test_delete_processor_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_delete_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_delete_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_delete_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DeleteProcessorRequest.pb( document_processor_service.DeleteProcessorRequest() ) @@ -17313,6 +17466,7 @@ def test_delete_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_processor( request, @@ -17324,6 +17478,7 @@ def test_delete_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_enable_processor_rest_bad_request( @@ -17404,10 +17559,14 @@ def test_enable_processor_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_enable_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_enable_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_enable_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.EnableProcessorRequest.pb( document_processor_service.EnableProcessorRequest() ) @@ -17431,6 +17590,7 @@ def test_enable_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.enable_processor( request, @@ -17442,6 +17602,7 @@ def test_enable_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_disable_processor_rest_bad_request( @@ -17522,10 +17683,14 @@ def test_disable_processor_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_disable_processor" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_disable_processor_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_disable_processor" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.DisableProcessorRequest.pb( document_processor_service.DisableProcessorRequest() ) @@ -17549,6 +17714,7 @@ def test_disable_processor_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.disable_processor( request, @@ -17560,6 +17726,7 @@ def test_disable_processor_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_default_processor_version_rest_bad_request( @@ -17645,11 +17812,15 @@ def test_set_default_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_set_default_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_set_default_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_set_default_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.SetDefaultProcessorVersionRequest.pb( document_processor_service.SetDefaultProcessorVersionRequest() ) @@ -17673,6 +17844,7 @@ def test_set_default_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.set_default_processor_version( request, @@ -17684,6 +17856,7 @@ def test_set_default_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_review_document_rest_bad_request( @@ -17768,10 +17941,14 @@ def test_review_document_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_review_document" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_review_document_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_review_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ReviewDocumentRequest.pb( document_processor_service.ReviewDocumentRequest() ) @@ -17795,6 +17972,7 @@ def test_review_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.review_document( request, @@ -17806,6 +17984,7 @@ def test_review_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_evaluate_processor_version_rest_bad_request( @@ -17891,11 +18070,15 @@ def test_evaluate_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_evaluate_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_evaluate_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_evaluate_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.EvaluateProcessorVersionRequest.pb( document_processor_service.EvaluateProcessorVersionRequest() ) @@ -17919,6 +18102,7 @@ def test_evaluate_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.evaluate_processor_version( request, @@ -17930,6 +18114,7 @@ def test_evaluate_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_evaluation_rest_bad_request( @@ -18022,10 +18207,14 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_get_evaluation" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_get_evaluation_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_get_evaluation" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.GetEvaluationRequest.pb( document_processor_service.GetEvaluationRequest() ) @@ -18049,6 +18238,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = evaluation.Evaluation() + post_with_metadata.return_value = evaluation.Evaluation(), metadata client.get_evaluation( request, @@ -18060,6 +18250,7 @@ def test_get_evaluation_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_evaluations_rest_bad_request( @@ -18150,10 +18341,14 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "post_list_evaluations" ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_list_evaluations_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_list_evaluations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ListEvaluationsRequest.pb( document_processor_service.ListEvaluationsRequest() ) @@ -18179,6 +18374,10 @@ def test_list_evaluations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_processor_service.ListEvaluationsResponse() + post_with_metadata.return_value = ( + document_processor_service.ListEvaluationsResponse(), + metadata, + ) client.list_evaluations( request, @@ -18190,6 +18389,7 @@ def test_list_evaluations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_processor_version_rest_bad_request( @@ -18271,11 +18471,15 @@ def test_import_processor_version_rest_interceptors(null_interceptor): transports.DocumentProcessorServiceRestInterceptor, "post_import_processor_version", ) as post, mock.patch.object( + transports.DocumentProcessorServiceRestInterceptor, + "post_import_processor_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentProcessorServiceRestInterceptor, "pre_import_processor_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_processor_service.ImportProcessorVersionRequest.pb( document_processor_service.ImportProcessorVersionRequest() ) @@ -18299,6 +18503,7 @@ def test_import_processor_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_processor_version( request, @@ -18310,6 +18515,7 @@ def test_import_processor_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py index 63571e6d7d04..6800162d68a3 100644 --- a/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py +++ b/packages/google-cloud-documentai/tests/unit/gapic/documentai_v1beta3/test_document_service.py @@ -79,6 +79,13 @@ from google.cloud.documentai_v1beta3.types import dataset from google.cloud.documentai_v1beta3.types import dataset as gcd_dataset +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -337,6 +344,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DocumentServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5731,10 +5781,13 @@ def test_update_dataset_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_update_dataset" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_update_dataset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_update_dataset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.UpdateDatasetRequest.pb( document_service.UpdateDatasetRequest() ) @@ -5758,6 +5811,7 @@ def test_update_dataset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_dataset( request, @@ -5769,6 +5823,7 @@ def test_update_dataset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_documents_rest_bad_request( @@ -5853,10 +5908,13 @@ def test_import_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_import_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_import_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_import_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.ImportDocumentsRequest.pb( document_service.ImportDocumentsRequest() ) @@ -5880,6 +5938,7 @@ def test_import_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_documents( request, @@ -5891,6 +5950,7 @@ def test_import_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_document_rest_bad_request( @@ -5976,10 +6036,13 @@ def test_get_document_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_document" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_get_document_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_document" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.GetDocumentRequest.pb( document_service.GetDocumentRequest() ) @@ -6005,6 +6068,10 @@ def test_get_document_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.GetDocumentResponse() + post_with_metadata.return_value = ( + document_service.GetDocumentResponse(), + metadata, + ) client.get_document( request, @@ -6016,6 +6083,7 @@ def test_get_document_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_documents_rest_bad_request( @@ -6106,10 +6174,13 @@ def test_list_documents_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_list_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, "post_list_documents_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_list_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.ListDocumentsRequest.pb( document_service.ListDocumentsRequest() ) @@ -6135,6 +6206,10 @@ def test_list_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = document_service.ListDocumentsResponse() + post_with_metadata.return_value = ( + document_service.ListDocumentsResponse(), + metadata, + ) client.list_documents( request, @@ -6146,6 +6221,7 @@ def test_list_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_delete_documents_rest_bad_request( @@ -6230,10 +6306,14 @@ def test_batch_delete_documents_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DocumentServiceRestInterceptor, "post_batch_delete_documents" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_batch_delete_documents_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_batch_delete_documents" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.BatchDeleteDocumentsRequest.pb( document_service.BatchDeleteDocumentsRequest() ) @@ -6257,6 +6337,7 @@ def test_batch_delete_documents_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.batch_delete_documents( request, @@ -6268,6 +6349,7 @@ def test_batch_delete_documents_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_dataset_schema_rest_bad_request( @@ -6360,10 +6442,14 @@ def test_get_dataset_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_get_dataset_schema" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_get_dataset_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_get_dataset_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.GetDatasetSchemaRequest.pb( document_service.GetDatasetSchemaRequest() ) @@ -6387,6 +6473,7 @@ def test_get_dataset_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataset.DatasetSchema() + post_with_metadata.return_value = dataset.DatasetSchema(), metadata client.get_dataset_schema( request, @@ -6398,6 +6485,7 @@ def test_get_dataset_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_dataset_schema_rest_bad_request( @@ -6603,10 +6691,14 @@ def test_update_dataset_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DocumentServiceRestInterceptor, "post_update_dataset_schema" ) as post, mock.patch.object( + transports.DocumentServiceRestInterceptor, + "post_update_dataset_schema_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DocumentServiceRestInterceptor, "pre_update_dataset_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = document_service.UpdateDatasetSchemaRequest.pb( document_service.UpdateDatasetSchemaRequest() ) @@ -6630,6 +6722,7 @@ def test_update_dataset_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = dataset.DatasetSchema() + post_with_metadata.return_value = dataset.DatasetSchema(), metadata client.update_dataset_schema( request, @@ -6641,6 +6734,7 @@ def test_update_dataset_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-domains/CHANGELOG.md b/packages/google-cloud-domains/CHANGELOG.md index 6b52b056b2b1..a551b336e01a 100644 --- a/packages/google-cloud-domains/CHANGELOG.md +++ b/packages/google-cloud-domains/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-domains-v1.9.0...google-cloud-domains-v1.10.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-domains-v1.8.1...google-cloud-domains-v1.9.0) (2024-12-12) diff --git a/packages/google-cloud-domains/google/cloud/domains/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-domains/google/cloud/domains/gapic_version.py +++ b/packages/google-cloud-domains/google/cloud/domains/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py index ca29e1c6619e..48bff31dfc26 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py index 86aa9bccdcc9..e808770e2e7b 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1/services/domains/transports/rest.py @@ -214,12 +214,35 @@ def post_configure_contact_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for configure_contact_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_configure_contact_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_configure_contact_settings` interceptor runs + before the `post_configure_contact_settings_with_metadata` interceptor. """ return response + def post_configure_contact_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for configure_contact_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_configure_contact_settings_with_metadata` + interceptor in new development instead of the `post_configure_contact_settings` interceptor. + When both interceptors are used, this `post_configure_contact_settings_with_metadata` interceptor runs after the + `post_configure_contact_settings` interceptor. The (possibly modified) response returned by + `post_configure_contact_settings` will be passed to + `post_configure_contact_settings_with_metadata`. + """ + return response, metadata + def pre_configure_dns_settings( self, request: domains.ConfigureDnsSettingsRequest, @@ -239,12 +262,35 @@ def post_configure_dns_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for configure_dns_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_configure_dns_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_configure_dns_settings` interceptor runs + before the `post_configure_dns_settings_with_metadata` interceptor. """ return response + def post_configure_dns_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for configure_dns_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_configure_dns_settings_with_metadata` + interceptor in new development instead of the `post_configure_dns_settings` interceptor. + When both interceptors are used, this `post_configure_dns_settings_with_metadata` interceptor runs after the + `post_configure_dns_settings` interceptor. The (possibly modified) response returned by + `post_configure_dns_settings` will be passed to + `post_configure_dns_settings_with_metadata`. + """ + return response, metadata + def pre_configure_management_settings( self, request: domains.ConfigureManagementSettingsRequest, @@ -265,12 +311,35 @@ def post_configure_management_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for configure_management_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_configure_management_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_configure_management_settings` interceptor runs + before the `post_configure_management_settings_with_metadata` interceptor. """ return response + def post_configure_management_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for configure_management_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_configure_management_settings_with_metadata` + interceptor in new development instead of the `post_configure_management_settings` interceptor. + When both interceptors are used, this `post_configure_management_settings_with_metadata` interceptor runs after the + `post_configure_management_settings` interceptor. The (possibly modified) response returned by + `post_configure_management_settings` will be passed to + `post_configure_management_settings_with_metadata`. + """ + return response, metadata + def pre_delete_registration( self, request: domains.DeleteRegistrationRequest, @@ -290,12 +359,35 @@ def post_delete_registration( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_delete_registration` interceptor runs + before the `post_delete_registration_with_metadata` interceptor. """ return response + def post_delete_registration_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_delete_registration_with_metadata` + interceptor in new development instead of the `post_delete_registration` interceptor. + When both interceptors are used, this `post_delete_registration_with_metadata` interceptor runs after the + `post_delete_registration` interceptor. The (possibly modified) response returned by + `post_delete_registration` will be passed to + `post_delete_registration_with_metadata`. + """ + return response, metadata + def pre_export_registration( self, request: domains.ExportRegistrationRequest, @@ -315,12 +407,35 @@ def post_export_registration( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_export_registration` interceptor runs + before the `post_export_registration_with_metadata` interceptor. """ return response + def post_export_registration_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_export_registration_with_metadata` + interceptor in new development instead of the `post_export_registration` interceptor. + When both interceptors are used, this `post_export_registration_with_metadata` interceptor runs after the + `post_export_registration` interceptor. The (possibly modified) response returned by + `post_export_registration` will be passed to + `post_export_registration_with_metadata`. + """ + return response, metadata + def pre_get_registration( self, request: domains.GetRegistrationRequest, @@ -338,12 +453,35 @@ def post_get_registration( ) -> domains.Registration: """Post-rpc interceptor for get_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_get_registration` interceptor runs + before the `post_get_registration_with_metadata` interceptor. """ return response + def post_get_registration_with_metadata( + self, + response: domains.Registration, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.Registration, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_get_registration_with_metadata` + interceptor in new development instead of the `post_get_registration` interceptor. + When both interceptors are used, this `post_get_registration_with_metadata` interceptor runs after the + `post_get_registration` interceptor. The (possibly modified) response returned by + `post_get_registration` will be passed to + `post_get_registration_with_metadata`. + """ + return response, metadata + def pre_list_registrations( self, request: domains.ListRegistrationsRequest, @@ -363,12 +501,37 @@ def post_list_registrations( ) -> domains.ListRegistrationsResponse: """Post-rpc interceptor for list_registrations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_registrations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_list_registrations` interceptor runs + before the `post_list_registrations_with_metadata` interceptor. """ return response + def post_list_registrations_with_metadata( + self, + response: domains.ListRegistrationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + domains.ListRegistrationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_registrations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_list_registrations_with_metadata` + interceptor in new development instead of the `post_list_registrations` interceptor. + When both interceptors are used, this `post_list_registrations_with_metadata` interceptor runs after the + `post_list_registrations` interceptor. The (possibly modified) response returned by + `post_list_registrations` will be passed to + `post_list_registrations_with_metadata`. + """ + return response, metadata + def pre_register_domain( self, request: domains.RegisterDomainRequest, @@ -386,12 +549,35 @@ def post_register_domain( ) -> operations_pb2.Operation: """Post-rpc interceptor for register_domain - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_register_domain_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_register_domain` interceptor runs + before the `post_register_domain_with_metadata` interceptor. """ return response + def post_register_domain_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for register_domain + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_register_domain_with_metadata` + interceptor in new development instead of the `post_register_domain` interceptor. + When both interceptors are used, this `post_register_domain_with_metadata` interceptor runs after the + `post_register_domain` interceptor. The (possibly modified) response returned by + `post_register_domain` will be passed to + `post_register_domain_with_metadata`. + """ + return response, metadata + def pre_reset_authorization_code( self, request: domains.ResetAuthorizationCodeRequest, @@ -411,12 +597,35 @@ def post_reset_authorization_code( ) -> domains.AuthorizationCode: """Post-rpc interceptor for reset_authorization_code - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reset_authorization_code_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_reset_authorization_code` interceptor runs + before the `post_reset_authorization_code_with_metadata` interceptor. """ return response + def post_reset_authorization_code_with_metadata( + self, + response: domains.AuthorizationCode, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.AuthorizationCode, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reset_authorization_code + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_reset_authorization_code_with_metadata` + interceptor in new development instead of the `post_reset_authorization_code` interceptor. + When both interceptors are used, this `post_reset_authorization_code_with_metadata` interceptor runs after the + `post_reset_authorization_code` interceptor. The (possibly modified) response returned by + `post_reset_authorization_code` will be passed to + `post_reset_authorization_code_with_metadata`. + """ + return response, metadata + def pre_retrieve_authorization_code( self, request: domains.RetrieveAuthorizationCodeRequest, @@ -437,12 +646,35 @@ def post_retrieve_authorization_code( ) -> domains.AuthorizationCode: """Post-rpc interceptor for retrieve_authorization_code - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retrieve_authorization_code_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_retrieve_authorization_code` interceptor runs + before the `post_retrieve_authorization_code_with_metadata` interceptor. """ return response + def post_retrieve_authorization_code_with_metadata( + self, + response: domains.AuthorizationCode, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.AuthorizationCode, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for retrieve_authorization_code + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_retrieve_authorization_code_with_metadata` + interceptor in new development instead of the `post_retrieve_authorization_code` interceptor. + When both interceptors are used, this `post_retrieve_authorization_code_with_metadata` interceptor runs after the + `post_retrieve_authorization_code` interceptor. The (possibly modified) response returned by + `post_retrieve_authorization_code` will be passed to + `post_retrieve_authorization_code_with_metadata`. + """ + return response, metadata + def pre_retrieve_register_parameters( self, request: domains.RetrieveRegisterParametersRequest, @@ -463,12 +695,38 @@ def post_retrieve_register_parameters( ) -> domains.RetrieveRegisterParametersResponse: """Post-rpc interceptor for retrieve_register_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retrieve_register_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_retrieve_register_parameters` interceptor runs + before the `post_retrieve_register_parameters_with_metadata` interceptor. """ return response + def post_retrieve_register_parameters_with_metadata( + self, + response: domains.RetrieveRegisterParametersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + domains.RetrieveRegisterParametersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for retrieve_register_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_retrieve_register_parameters_with_metadata` + interceptor in new development instead of the `post_retrieve_register_parameters` interceptor. + When both interceptors are used, this `post_retrieve_register_parameters_with_metadata` interceptor runs after the + `post_retrieve_register_parameters` interceptor. The (possibly modified) response returned by + `post_retrieve_register_parameters` will be passed to + `post_retrieve_register_parameters_with_metadata`. + """ + return response, metadata + def pre_retrieve_transfer_parameters( self, request: domains.RetrieveTransferParametersRequest, @@ -489,12 +747,38 @@ def post_retrieve_transfer_parameters( ) -> domains.RetrieveTransferParametersResponse: """Post-rpc interceptor for retrieve_transfer_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retrieve_transfer_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_retrieve_transfer_parameters` interceptor runs + before the `post_retrieve_transfer_parameters_with_metadata` interceptor. """ return response + def post_retrieve_transfer_parameters_with_metadata( + self, + response: domains.RetrieveTransferParametersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + domains.RetrieveTransferParametersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for retrieve_transfer_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_retrieve_transfer_parameters_with_metadata` + interceptor in new development instead of the `post_retrieve_transfer_parameters` interceptor. + When both interceptors are used, this `post_retrieve_transfer_parameters_with_metadata` interceptor runs after the + `post_retrieve_transfer_parameters` interceptor. The (possibly modified) response returned by + `post_retrieve_transfer_parameters` will be passed to + `post_retrieve_transfer_parameters_with_metadata`. + """ + return response, metadata + def pre_search_domains( self, request: domains.SearchDomainsRequest, @@ -512,12 +796,35 @@ def post_search_domains( ) -> domains.SearchDomainsResponse: """Post-rpc interceptor for search_domains - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_domains_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_search_domains` interceptor runs + before the `post_search_domains_with_metadata` interceptor. """ return response + def post_search_domains_with_metadata( + self, + response: domains.SearchDomainsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.SearchDomainsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_domains + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_search_domains_with_metadata` + interceptor in new development instead of the `post_search_domains` interceptor. + When both interceptors are used, this `post_search_domains_with_metadata` interceptor runs after the + `post_search_domains` interceptor. The (possibly modified) response returned by + `post_search_domains` will be passed to + `post_search_domains_with_metadata`. + """ + return response, metadata + def pre_transfer_domain( self, request: domains.TransferDomainRequest, @@ -535,12 +842,35 @@ def post_transfer_domain( ) -> operations_pb2.Operation: """Post-rpc interceptor for transfer_domain - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_transfer_domain_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_transfer_domain` interceptor runs + before the `post_transfer_domain_with_metadata` interceptor. """ return response + def post_transfer_domain_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for transfer_domain + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_transfer_domain_with_metadata` + interceptor in new development instead of the `post_transfer_domain` interceptor. + When both interceptors are used, this `post_transfer_domain_with_metadata` interceptor runs after the + `post_transfer_domain` interceptor. The (possibly modified) response returned by + `post_transfer_domain` will be passed to + `post_transfer_domain_with_metadata`. + """ + return response, metadata + def pre_update_registration( self, request: domains.UpdateRegistrationRequest, @@ -560,12 +890,35 @@ def post_update_registration( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_update_registration` interceptor runs + before the `post_update_registration_with_metadata` interceptor. """ return response + def post_update_registration_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_update_registration_with_metadata` + interceptor in new development instead of the `post_update_registration` interceptor. + When both interceptors are used, this `post_update_registration_with_metadata` interceptor runs after the + `post_update_registration` interceptor. The (possibly modified) response returned by + `post_update_registration` will be passed to + `post_update_registration_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DomainsRestStub: @@ -822,6 +1175,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_configure_contact_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_configure_contact_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -971,6 +1328,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_configure_dns_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_configure_dns_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1121,6 +1482,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_configure_management_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_configure_management_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1264,6 +1632,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1413,6 +1785,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1581,6 +1957,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1725,6 +2105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_registrations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_registrations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1878,6 +2262,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_register_domain(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_register_domain_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2026,6 +2414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_reset_authorization_code(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reset_authorization_code_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2169,6 +2561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retrieve_authorization_code(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retrieve_authorization_code_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2312,6 +2708,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retrieve_register_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retrieve_register_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2457,6 +2857,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retrieve_transfer_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retrieve_transfer_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2601,6 +3005,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_domains(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_domains_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2752,6 +3160,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_transfer_domain(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_transfer_domain_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2901,6 +3313,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py index 3d6f5f03acfe..bc8c11e981b4 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -491,6 +493,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py index 84bc362c351e..a57473444380 100644 --- a/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py +++ b/packages/google-cloud-domains/google/cloud/domains_v1beta1/services/domains/transports/rest.py @@ -214,12 +214,35 @@ def post_configure_contact_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for configure_contact_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_configure_contact_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_configure_contact_settings` interceptor runs + before the `post_configure_contact_settings_with_metadata` interceptor. """ return response + def post_configure_contact_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for configure_contact_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_configure_contact_settings_with_metadata` + interceptor in new development instead of the `post_configure_contact_settings` interceptor. + When both interceptors are used, this `post_configure_contact_settings_with_metadata` interceptor runs after the + `post_configure_contact_settings` interceptor. The (possibly modified) response returned by + `post_configure_contact_settings` will be passed to + `post_configure_contact_settings_with_metadata`. + """ + return response, metadata + def pre_configure_dns_settings( self, request: domains.ConfigureDnsSettingsRequest, @@ -239,12 +262,35 @@ def post_configure_dns_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for configure_dns_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_configure_dns_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_configure_dns_settings` interceptor runs + before the `post_configure_dns_settings_with_metadata` interceptor. """ return response + def post_configure_dns_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for configure_dns_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_configure_dns_settings_with_metadata` + interceptor in new development instead of the `post_configure_dns_settings` interceptor. + When both interceptors are used, this `post_configure_dns_settings_with_metadata` interceptor runs after the + `post_configure_dns_settings` interceptor. The (possibly modified) response returned by + `post_configure_dns_settings` will be passed to + `post_configure_dns_settings_with_metadata`. + """ + return response, metadata + def pre_configure_management_settings( self, request: domains.ConfigureManagementSettingsRequest, @@ -265,12 +311,35 @@ def post_configure_management_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for configure_management_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_configure_management_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_configure_management_settings` interceptor runs + before the `post_configure_management_settings_with_metadata` interceptor. """ return response + def post_configure_management_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for configure_management_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_configure_management_settings_with_metadata` + interceptor in new development instead of the `post_configure_management_settings` interceptor. + When both interceptors are used, this `post_configure_management_settings_with_metadata` interceptor runs after the + `post_configure_management_settings` interceptor. The (possibly modified) response returned by + `post_configure_management_settings` will be passed to + `post_configure_management_settings_with_metadata`. + """ + return response, metadata + def pre_delete_registration( self, request: domains.DeleteRegistrationRequest, @@ -290,12 +359,35 @@ def post_delete_registration( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_delete_registration` interceptor runs + before the `post_delete_registration_with_metadata` interceptor. """ return response + def post_delete_registration_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_delete_registration_with_metadata` + interceptor in new development instead of the `post_delete_registration` interceptor. + When both interceptors are used, this `post_delete_registration_with_metadata` interceptor runs after the + `post_delete_registration` interceptor. The (possibly modified) response returned by + `post_delete_registration` will be passed to + `post_delete_registration_with_metadata`. + """ + return response, metadata + def pre_export_registration( self, request: domains.ExportRegistrationRequest, @@ -315,12 +407,35 @@ def post_export_registration( ) -> operations_pb2.Operation: """Post-rpc interceptor for export_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_export_registration` interceptor runs + before the `post_export_registration_with_metadata` interceptor. """ return response + def post_export_registration_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_export_registration_with_metadata` + interceptor in new development instead of the `post_export_registration` interceptor. + When both interceptors are used, this `post_export_registration_with_metadata` interceptor runs after the + `post_export_registration` interceptor. The (possibly modified) response returned by + `post_export_registration` will be passed to + `post_export_registration_with_metadata`. + """ + return response, metadata + def pre_get_registration( self, request: domains.GetRegistrationRequest, @@ -338,12 +453,35 @@ def post_get_registration( ) -> domains.Registration: """Post-rpc interceptor for get_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_get_registration` interceptor runs + before the `post_get_registration_with_metadata` interceptor. """ return response + def post_get_registration_with_metadata( + self, + response: domains.Registration, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.Registration, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_get_registration_with_metadata` + interceptor in new development instead of the `post_get_registration` interceptor. + When both interceptors are used, this `post_get_registration_with_metadata` interceptor runs after the + `post_get_registration` interceptor. The (possibly modified) response returned by + `post_get_registration` will be passed to + `post_get_registration_with_metadata`. + """ + return response, metadata + def pre_list_registrations( self, request: domains.ListRegistrationsRequest, @@ -363,12 +501,37 @@ def post_list_registrations( ) -> domains.ListRegistrationsResponse: """Post-rpc interceptor for list_registrations - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_registrations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_list_registrations` interceptor runs + before the `post_list_registrations_with_metadata` interceptor. """ return response + def post_list_registrations_with_metadata( + self, + response: domains.ListRegistrationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + domains.ListRegistrationsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_registrations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_list_registrations_with_metadata` + interceptor in new development instead of the `post_list_registrations` interceptor. + When both interceptors are used, this `post_list_registrations_with_metadata` interceptor runs after the + `post_list_registrations` interceptor. The (possibly modified) response returned by + `post_list_registrations` will be passed to + `post_list_registrations_with_metadata`. + """ + return response, metadata + def pre_register_domain( self, request: domains.RegisterDomainRequest, @@ -386,12 +549,35 @@ def post_register_domain( ) -> operations_pb2.Operation: """Post-rpc interceptor for register_domain - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_register_domain_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_register_domain` interceptor runs + before the `post_register_domain_with_metadata` interceptor. """ return response + def post_register_domain_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for register_domain + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_register_domain_with_metadata` + interceptor in new development instead of the `post_register_domain` interceptor. + When both interceptors are used, this `post_register_domain_with_metadata` interceptor runs after the + `post_register_domain` interceptor. The (possibly modified) response returned by + `post_register_domain` will be passed to + `post_register_domain_with_metadata`. + """ + return response, metadata + def pre_reset_authorization_code( self, request: domains.ResetAuthorizationCodeRequest, @@ -411,12 +597,35 @@ def post_reset_authorization_code( ) -> domains.AuthorizationCode: """Post-rpc interceptor for reset_authorization_code - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reset_authorization_code_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_reset_authorization_code` interceptor runs + before the `post_reset_authorization_code_with_metadata` interceptor. """ return response + def post_reset_authorization_code_with_metadata( + self, + response: domains.AuthorizationCode, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.AuthorizationCode, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reset_authorization_code + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_reset_authorization_code_with_metadata` + interceptor in new development instead of the `post_reset_authorization_code` interceptor. + When both interceptors are used, this `post_reset_authorization_code_with_metadata` interceptor runs after the + `post_reset_authorization_code` interceptor. The (possibly modified) response returned by + `post_reset_authorization_code` will be passed to + `post_reset_authorization_code_with_metadata`. + """ + return response, metadata + def pre_retrieve_authorization_code( self, request: domains.RetrieveAuthorizationCodeRequest, @@ -437,12 +646,35 @@ def post_retrieve_authorization_code( ) -> domains.AuthorizationCode: """Post-rpc interceptor for retrieve_authorization_code - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retrieve_authorization_code_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_retrieve_authorization_code` interceptor runs + before the `post_retrieve_authorization_code_with_metadata` interceptor. """ return response + def post_retrieve_authorization_code_with_metadata( + self, + response: domains.AuthorizationCode, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.AuthorizationCode, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for retrieve_authorization_code + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_retrieve_authorization_code_with_metadata` + interceptor in new development instead of the `post_retrieve_authorization_code` interceptor. + When both interceptors are used, this `post_retrieve_authorization_code_with_metadata` interceptor runs after the + `post_retrieve_authorization_code` interceptor. The (possibly modified) response returned by + `post_retrieve_authorization_code` will be passed to + `post_retrieve_authorization_code_with_metadata`. + """ + return response, metadata + def pre_retrieve_register_parameters( self, request: domains.RetrieveRegisterParametersRequest, @@ -463,12 +695,38 @@ def post_retrieve_register_parameters( ) -> domains.RetrieveRegisterParametersResponse: """Post-rpc interceptor for retrieve_register_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retrieve_register_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_retrieve_register_parameters` interceptor runs + before the `post_retrieve_register_parameters_with_metadata` interceptor. """ return response + def post_retrieve_register_parameters_with_metadata( + self, + response: domains.RetrieveRegisterParametersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + domains.RetrieveRegisterParametersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for retrieve_register_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_retrieve_register_parameters_with_metadata` + interceptor in new development instead of the `post_retrieve_register_parameters` interceptor. + When both interceptors are used, this `post_retrieve_register_parameters_with_metadata` interceptor runs after the + `post_retrieve_register_parameters` interceptor. The (possibly modified) response returned by + `post_retrieve_register_parameters` will be passed to + `post_retrieve_register_parameters_with_metadata`. + """ + return response, metadata + def pre_retrieve_transfer_parameters( self, request: domains.RetrieveTransferParametersRequest, @@ -489,12 +747,38 @@ def post_retrieve_transfer_parameters( ) -> domains.RetrieveTransferParametersResponse: """Post-rpc interceptor for retrieve_transfer_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_retrieve_transfer_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_retrieve_transfer_parameters` interceptor runs + before the `post_retrieve_transfer_parameters_with_metadata` interceptor. """ return response + def post_retrieve_transfer_parameters_with_metadata( + self, + response: domains.RetrieveTransferParametersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + domains.RetrieveTransferParametersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for retrieve_transfer_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_retrieve_transfer_parameters_with_metadata` + interceptor in new development instead of the `post_retrieve_transfer_parameters` interceptor. + When both interceptors are used, this `post_retrieve_transfer_parameters_with_metadata` interceptor runs after the + `post_retrieve_transfer_parameters` interceptor. The (possibly modified) response returned by + `post_retrieve_transfer_parameters` will be passed to + `post_retrieve_transfer_parameters_with_metadata`. + """ + return response, metadata + def pre_search_domains( self, request: domains.SearchDomainsRequest, @@ -512,12 +796,35 @@ def post_search_domains( ) -> domains.SearchDomainsResponse: """Post-rpc interceptor for search_domains - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_domains_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_search_domains` interceptor runs + before the `post_search_domains_with_metadata` interceptor. """ return response + def post_search_domains_with_metadata( + self, + response: domains.SearchDomainsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[domains.SearchDomainsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_domains + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_search_domains_with_metadata` + interceptor in new development instead of the `post_search_domains` interceptor. + When both interceptors are used, this `post_search_domains_with_metadata` interceptor runs after the + `post_search_domains` interceptor. The (possibly modified) response returned by + `post_search_domains` will be passed to + `post_search_domains_with_metadata`. + """ + return response, metadata + def pre_transfer_domain( self, request: domains.TransferDomainRequest, @@ -535,12 +842,35 @@ def post_transfer_domain( ) -> operations_pb2.Operation: """Post-rpc interceptor for transfer_domain - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_transfer_domain_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_transfer_domain` interceptor runs + before the `post_transfer_domain_with_metadata` interceptor. """ return response + def post_transfer_domain_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for transfer_domain + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_transfer_domain_with_metadata` + interceptor in new development instead of the `post_transfer_domain` interceptor. + When both interceptors are used, this `post_transfer_domain_with_metadata` interceptor runs after the + `post_transfer_domain` interceptor. The (possibly modified) response returned by + `post_transfer_domain` will be passed to + `post_transfer_domain_with_metadata`. + """ + return response, metadata + def pre_update_registration( self, request: domains.UpdateRegistrationRequest, @@ -560,12 +890,35 @@ def post_update_registration( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_registration - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_registration_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Domains server but before - it is returned to user code. + it is returned to user code. This `post_update_registration` interceptor runs + before the `post_update_registration_with_metadata` interceptor. """ return response + def post_update_registration_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_registration + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Domains server but before it is returned to user code. + + We recommend only using this `post_update_registration_with_metadata` + interceptor in new development instead of the `post_update_registration` interceptor. + When both interceptors are used, this `post_update_registration_with_metadata` interceptor runs after the + `post_update_registration` interceptor. The (possibly modified) response returned by + `post_update_registration` will be passed to + `post_update_registration_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DomainsRestStub: @@ -822,6 +1175,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_configure_contact_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_configure_contact_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -971,6 +1328,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_configure_dns_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_configure_dns_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1121,6 +1482,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_configure_management_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_configure_management_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1264,6 +1632,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1413,6 +1785,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1581,6 +1957,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1725,6 +2105,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_registrations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_registrations_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1878,6 +2262,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_register_domain(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_register_domain_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2026,6 +2414,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_reset_authorization_code(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reset_authorization_code_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2169,6 +2561,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retrieve_authorization_code(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retrieve_authorization_code_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2312,6 +2708,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retrieve_register_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retrieve_register_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2457,6 +2857,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_retrieve_transfer_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_retrieve_transfer_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2601,6 +3005,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_domains(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_domains_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2752,6 +3160,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_transfer_domain(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_transfer_domain_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2901,6 +3313,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_registration(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_registration_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json index db2d9e3e9be6..708884c0af9f 100644 --- a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json +++ b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-domains", - "version": "1.9.0" + "version": "1.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json index b47d940bd529..0c21ed88e78a 100644 --- a/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json +++ b/packages/google-cloud-domains/samples/generated_samples/snippet_metadata_google.cloud.domains.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-domains", - "version": "1.9.0" + "version": "1.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py index 18a6ea1a606c..44ad86c67345 100644 --- a/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1/test_domains.py @@ -75,6 +75,13 @@ ) from google.cloud.domains_v1.types import domains +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DomainsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DomainsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10424,10 +10474,13 @@ def test_search_domains_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_search_domains" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_search_domains_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_search_domains" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.SearchDomainsRequest.pb(domains.SearchDomainsRequest()) transcode.return_value = { "method": "post", @@ -10451,6 +10504,7 @@ def test_search_domains_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.SearchDomainsResponse() + post_with_metadata.return_value = domains.SearchDomainsResponse(), metadata client.search_domains( request, @@ -10462,6 +10516,7 @@ def test_search_domains_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retrieve_register_parameters_rest_bad_request( @@ -10541,10 +10596,14 @@ def test_retrieve_register_parameters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_retrieve_register_parameters" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_retrieve_register_parameters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_retrieve_register_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RetrieveRegisterParametersRequest.pb( domains.RetrieveRegisterParametersRequest() ) @@ -10570,6 +10629,10 @@ def test_retrieve_register_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.RetrieveRegisterParametersResponse() + post_with_metadata.return_value = ( + domains.RetrieveRegisterParametersResponse(), + metadata, + ) client.retrieve_register_parameters( request, @@ -10581,6 +10644,7 @@ def test_retrieve_register_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_register_domain_rest_bad_request(request_type=domains.RegisterDomainRequest): @@ -10657,10 +10721,13 @@ def test_register_domain_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_register_domain" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_register_domain_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_register_domain" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RegisterDomainRequest.pb(domains.RegisterDomainRequest()) transcode.return_value = { "method": "post", @@ -10682,6 +10749,7 @@ def test_register_domain_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.register_domain( request, @@ -10693,6 +10761,7 @@ def test_register_domain_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retrieve_transfer_parameters_rest_bad_request( @@ -10772,10 +10841,14 @@ def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_retrieve_transfer_parameters" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_retrieve_transfer_parameters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_retrieve_transfer_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RetrieveTransferParametersRequest.pb( domains.RetrieveTransferParametersRequest() ) @@ -10801,6 +10874,10 @@ def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.RetrieveTransferParametersResponse() + post_with_metadata.return_value = ( + domains.RetrieveTransferParametersResponse(), + metadata, + ) client.retrieve_transfer_parameters( request, @@ -10812,6 +10889,7 @@ def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_transfer_domain_rest_bad_request(request_type=domains.TransferDomainRequest): @@ -10888,10 +10966,13 @@ def test_transfer_domain_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_transfer_domain" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_transfer_domain_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_transfer_domain" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.TransferDomainRequest.pb(domains.TransferDomainRequest()) transcode.return_value = { "method": "post", @@ -10913,6 +10994,7 @@ def test_transfer_domain_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.transfer_domain( request, @@ -10924,6 +11006,7 @@ def test_transfer_domain_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_registrations_rest_bad_request( @@ -11006,10 +11089,13 @@ def test_list_registrations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_list_registrations" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_list_registrations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_list_registrations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ListRegistrationsRequest.pb( domains.ListRegistrationsRequest() ) @@ -11035,6 +11121,7 @@ def test_list_registrations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.ListRegistrationsResponse() + post_with_metadata.return_value = domains.ListRegistrationsResponse(), metadata client.list_registrations( request, @@ -11046,6 +11133,7 @@ def test_list_registrations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_registration_rest_bad_request(request_type=domains.GetRegistrationRequest): @@ -11134,10 +11222,13 @@ def test_get_registration_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_get_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_get_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_get_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.GetRegistrationRequest.pb(domains.GetRegistrationRequest()) transcode.return_value = { "method": "post", @@ -11159,6 +11250,7 @@ def test_get_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.Registration() + post_with_metadata.return_value = domains.Registration(), metadata client.get_registration( request, @@ -11170,6 +11262,7 @@ def test_get_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_registration_rest_bad_request( @@ -11389,10 +11482,13 @@ def test_update_registration_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_update_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_update_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_update_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.UpdateRegistrationRequest.pb( domains.UpdateRegistrationRequest() ) @@ -11416,6 +11512,7 @@ def test_update_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_registration( request, @@ -11427,6 +11524,7 @@ def test_update_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_configure_management_settings_rest_bad_request( @@ -11509,10 +11607,14 @@ def test_configure_management_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_configure_management_settings" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_configure_management_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_configure_management_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ConfigureManagementSettingsRequest.pb( domains.ConfigureManagementSettingsRequest() ) @@ -11536,6 +11638,7 @@ def test_configure_management_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.configure_management_settings( request, @@ -11547,6 +11650,7 @@ def test_configure_management_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_configure_dns_settings_rest_bad_request( @@ -11629,10 +11733,13 @@ def test_configure_dns_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_configure_dns_settings" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_dns_settings_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_configure_dns_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ConfigureDnsSettingsRequest.pb( domains.ConfigureDnsSettingsRequest() ) @@ -11656,6 +11763,7 @@ def test_configure_dns_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.configure_dns_settings( request, @@ -11667,6 +11775,7 @@ def test_configure_dns_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_configure_contact_settings_rest_bad_request( @@ -11749,10 +11858,14 @@ def test_configure_contact_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_configure_contact_settings" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_configure_contact_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_configure_contact_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ConfigureContactSettingsRequest.pb( domains.ConfigureContactSettingsRequest() ) @@ -11776,6 +11889,7 @@ def test_configure_contact_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.configure_contact_settings( request, @@ -11787,6 +11901,7 @@ def test_configure_contact_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_registration_rest_bad_request( @@ -11865,10 +11980,13 @@ def test_export_registration_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_export_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_export_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_export_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ExportRegistrationRequest.pb( domains.ExportRegistrationRequest() ) @@ -11892,6 +12010,7 @@ def test_export_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_registration( request, @@ -11903,6 +12022,7 @@ def test_export_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_registration_rest_bad_request( @@ -11981,10 +12101,13 @@ def test_delete_registration_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_delete_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_delete_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_delete_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.DeleteRegistrationRequest.pb( domains.DeleteRegistrationRequest() ) @@ -12008,6 +12131,7 @@ def test_delete_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_registration( request, @@ -12019,6 +12143,7 @@ def test_delete_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retrieve_authorization_code_rest_bad_request( @@ -12105,10 +12230,14 @@ def test_retrieve_authorization_code_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_retrieve_authorization_code" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_retrieve_authorization_code_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_retrieve_authorization_code" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RetrieveAuthorizationCodeRequest.pb( domains.RetrieveAuthorizationCodeRequest() ) @@ -12132,6 +12261,7 @@ def test_retrieve_authorization_code_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.AuthorizationCode() + post_with_metadata.return_value = domains.AuthorizationCode(), metadata client.retrieve_authorization_code( request, @@ -12143,6 +12273,7 @@ def test_retrieve_authorization_code_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reset_authorization_code_rest_bad_request( @@ -12229,10 +12360,13 @@ def test_reset_authorization_code_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_reset_authorization_code" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_reset_authorization_code_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_reset_authorization_code" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ResetAuthorizationCodeRequest.pb( domains.ResetAuthorizationCodeRequest() ) @@ -12256,6 +12390,7 @@ def test_reset_authorization_code_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.AuthorizationCode() + post_with_metadata.return_value = domains.AuthorizationCode(), metadata client.reset_authorization_code( request, @@ -12267,6 +12402,7 @@ def test_reset_authorization_code_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py index c5f9ab7872d1..610e5ffb0691 100644 --- a/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py +++ b/packages/google-cloud-domains/tests/unit/gapic/domains_v1beta1/test_domains.py @@ -75,6 +75,13 @@ ) from google.cloud.domains_v1beta1.types import domains +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -298,6 +305,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DomainsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DomainsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10424,10 +10474,13 @@ def test_search_domains_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_search_domains" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_search_domains_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_search_domains" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.SearchDomainsRequest.pb(domains.SearchDomainsRequest()) transcode.return_value = { "method": "post", @@ -10451,6 +10504,7 @@ def test_search_domains_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.SearchDomainsResponse() + post_with_metadata.return_value = domains.SearchDomainsResponse(), metadata client.search_domains( request, @@ -10462,6 +10516,7 @@ def test_search_domains_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retrieve_register_parameters_rest_bad_request( @@ -10541,10 +10596,14 @@ def test_retrieve_register_parameters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_retrieve_register_parameters" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_retrieve_register_parameters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_retrieve_register_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RetrieveRegisterParametersRequest.pb( domains.RetrieveRegisterParametersRequest() ) @@ -10570,6 +10629,10 @@ def test_retrieve_register_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.RetrieveRegisterParametersResponse() + post_with_metadata.return_value = ( + domains.RetrieveRegisterParametersResponse(), + metadata, + ) client.retrieve_register_parameters( request, @@ -10581,6 +10644,7 @@ def test_retrieve_register_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_register_domain_rest_bad_request(request_type=domains.RegisterDomainRequest): @@ -10657,10 +10721,13 @@ def test_register_domain_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_register_domain" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_register_domain_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_register_domain" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RegisterDomainRequest.pb(domains.RegisterDomainRequest()) transcode.return_value = { "method": "post", @@ -10682,6 +10749,7 @@ def test_register_domain_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.register_domain( request, @@ -10693,6 +10761,7 @@ def test_register_domain_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retrieve_transfer_parameters_rest_bad_request( @@ -10772,10 +10841,14 @@ def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_retrieve_transfer_parameters" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_retrieve_transfer_parameters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_retrieve_transfer_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RetrieveTransferParametersRequest.pb( domains.RetrieveTransferParametersRequest() ) @@ -10801,6 +10874,10 @@ def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.RetrieveTransferParametersResponse() + post_with_metadata.return_value = ( + domains.RetrieveTransferParametersResponse(), + metadata, + ) client.retrieve_transfer_parameters( request, @@ -10812,6 +10889,7 @@ def test_retrieve_transfer_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_transfer_domain_rest_bad_request(request_type=domains.TransferDomainRequest): @@ -10888,10 +10966,13 @@ def test_transfer_domain_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_transfer_domain" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_transfer_domain_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_transfer_domain" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.TransferDomainRequest.pb(domains.TransferDomainRequest()) transcode.return_value = { "method": "post", @@ -10913,6 +10994,7 @@ def test_transfer_domain_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.transfer_domain( request, @@ -10924,6 +11006,7 @@ def test_transfer_domain_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_registrations_rest_bad_request( @@ -11006,10 +11089,13 @@ def test_list_registrations_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_list_registrations" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_list_registrations_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_list_registrations" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ListRegistrationsRequest.pb( domains.ListRegistrationsRequest() ) @@ -11035,6 +11121,7 @@ def test_list_registrations_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.ListRegistrationsResponse() + post_with_metadata.return_value = domains.ListRegistrationsResponse(), metadata client.list_registrations( request, @@ -11046,6 +11133,7 @@ def test_list_registrations_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_registration_rest_bad_request(request_type=domains.GetRegistrationRequest): @@ -11134,10 +11222,13 @@ def test_get_registration_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_get_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_get_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_get_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.GetRegistrationRequest.pb(domains.GetRegistrationRequest()) transcode.return_value = { "method": "post", @@ -11159,6 +11250,7 @@ def test_get_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.Registration() + post_with_metadata.return_value = domains.Registration(), metadata client.get_registration( request, @@ -11170,6 +11262,7 @@ def test_get_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_registration_rest_bad_request( @@ -11389,10 +11482,13 @@ def test_update_registration_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_update_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_update_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_update_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.UpdateRegistrationRequest.pb( domains.UpdateRegistrationRequest() ) @@ -11416,6 +11512,7 @@ def test_update_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_registration( request, @@ -11427,6 +11524,7 @@ def test_update_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_configure_management_settings_rest_bad_request( @@ -11509,10 +11607,14 @@ def test_configure_management_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_configure_management_settings" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_configure_management_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_configure_management_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ConfigureManagementSettingsRequest.pb( domains.ConfigureManagementSettingsRequest() ) @@ -11536,6 +11638,7 @@ def test_configure_management_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.configure_management_settings( request, @@ -11547,6 +11650,7 @@ def test_configure_management_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_configure_dns_settings_rest_bad_request( @@ -11629,10 +11733,13 @@ def test_configure_dns_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_configure_dns_settings" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_configure_dns_settings_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_configure_dns_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ConfigureDnsSettingsRequest.pb( domains.ConfigureDnsSettingsRequest() ) @@ -11656,6 +11763,7 @@ def test_configure_dns_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.configure_dns_settings( request, @@ -11667,6 +11775,7 @@ def test_configure_dns_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_configure_contact_settings_rest_bad_request( @@ -11749,10 +11858,14 @@ def test_configure_contact_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_configure_contact_settings" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_configure_contact_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_configure_contact_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ConfigureContactSettingsRequest.pb( domains.ConfigureContactSettingsRequest() ) @@ -11776,6 +11889,7 @@ def test_configure_contact_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.configure_contact_settings( request, @@ -11787,6 +11901,7 @@ def test_configure_contact_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_registration_rest_bad_request( @@ -11865,10 +11980,13 @@ def test_export_registration_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_export_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_export_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_export_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ExportRegistrationRequest.pb( domains.ExportRegistrationRequest() ) @@ -11892,6 +12010,7 @@ def test_export_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_registration( request, @@ -11903,6 +12022,7 @@ def test_export_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_registration_rest_bad_request( @@ -11981,10 +12101,13 @@ def test_delete_registration_rest_interceptors(null_interceptor): ), mock.patch.object( transports.DomainsRestInterceptor, "post_delete_registration" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_delete_registration_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_delete_registration" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.DeleteRegistrationRequest.pb( domains.DeleteRegistrationRequest() ) @@ -12008,6 +12131,7 @@ def test_delete_registration_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_registration( request, @@ -12019,6 +12143,7 @@ def test_delete_registration_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_retrieve_authorization_code_rest_bad_request( @@ -12105,10 +12230,14 @@ def test_retrieve_authorization_code_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_retrieve_authorization_code" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, + "post_retrieve_authorization_code_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_retrieve_authorization_code" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.RetrieveAuthorizationCodeRequest.pb( domains.RetrieveAuthorizationCodeRequest() ) @@ -12132,6 +12261,7 @@ def test_retrieve_authorization_code_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.AuthorizationCode() + post_with_metadata.return_value = domains.AuthorizationCode(), metadata client.retrieve_authorization_code( request, @@ -12143,6 +12273,7 @@ def test_retrieve_authorization_code_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reset_authorization_code_rest_bad_request( @@ -12229,10 +12360,13 @@ def test_reset_authorization_code_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.DomainsRestInterceptor, "post_reset_authorization_code" ) as post, mock.patch.object( + transports.DomainsRestInterceptor, "post_reset_authorization_code_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.DomainsRestInterceptor, "pre_reset_authorization_code" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = domains.ResetAuthorizationCodeRequest.pb( domains.ResetAuthorizationCodeRequest() ) @@ -12256,6 +12390,7 @@ def test_reset_authorization_code_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = domains.AuthorizationCode() + post_with_metadata.return_value = domains.AuthorizationCode(), metadata client.reset_authorization_code( request, @@ -12267,6 +12402,7 @@ def test_reset_authorization_code_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-edgecontainer/CHANGELOG.md b/packages/google-cloud-edgecontainer/CHANGELOG.md index 6e931cd02cfe..19ea332bbff7 100644 --- a/packages/google-cloud-edgecontainer/CHANGELOG.md +++ b/packages/google-cloud-edgecontainer/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgecontainer-v0.5.15...google-cloud-edgecontainer-v0.5.16) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.5.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgecontainer-v0.5.14...google-cloud-edgecontainer-v0.5.15) (2024-12-12) diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py index 35c9af734238..02b0cbec08ac 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.15" # {x-release-please-version} +__version__ = "0.5.16" # {x-release-please-version} diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py index 35c9af734238..02b0cbec08ac 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.15" # {x-release-please-version} +__version__ = "0.5.16" # {x-release-please-version} diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py index c186937568dc..6d60b37b82ba 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -610,6 +612,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3283,16 +3312,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3338,16 +3371,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3504,16 +3541,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3559,16 +3600,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/transports/rest.py b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/transports/rest.py index ec50899ad0b0..33b5e96c3bd2 100644 --- a/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/transports/rest.py +++ b/packages/google-cloud-edgecontainer/google/cloud/edgecontainer_v1/services/edge_container/transports/rest.py @@ -253,12 +253,35 @@ def post_create_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_create_node_pool( self, request: service.CreateNodePoolRequest, @@ -276,12 +299,35 @@ def post_create_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_create_node_pool` interceptor runs + before the `post_create_node_pool_with_metadata` interceptor. """ return response + def post_create_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_create_node_pool_with_metadata` + interceptor in new development instead of the `post_create_node_pool` interceptor. + When both interceptors are used, this `post_create_node_pool_with_metadata` interceptor runs after the + `post_create_node_pool` interceptor. The (possibly modified) response returned by + `post_create_node_pool` will be passed to + `post_create_node_pool_with_metadata`. + """ + return response, metadata + def pre_create_vpn_connection( self, request: service.CreateVpnConnectionRequest, @@ -301,12 +347,35 @@ def post_create_vpn_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_vpn_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_vpn_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_create_vpn_connection` interceptor runs + before the `post_create_vpn_connection_with_metadata` interceptor. """ return response + def post_create_vpn_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_vpn_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_create_vpn_connection_with_metadata` + interceptor in new development instead of the `post_create_vpn_connection` interceptor. + When both interceptors are used, this `post_create_vpn_connection_with_metadata` interceptor runs after the + `post_create_vpn_connection` interceptor. The (possibly modified) response returned by + `post_create_vpn_connection` will be passed to + `post_create_vpn_connection_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: service.DeleteClusterRequest, @@ -324,12 +393,35 @@ def post_delete_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_node_pool( self, request: service.DeleteNodePoolRequest, @@ -347,12 +439,35 @@ def post_delete_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_delete_node_pool` interceptor runs + before the `post_delete_node_pool_with_metadata` interceptor. """ return response + def post_delete_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_delete_node_pool_with_metadata` + interceptor in new development instead of the `post_delete_node_pool` interceptor. + When both interceptors are used, this `post_delete_node_pool_with_metadata` interceptor runs after the + `post_delete_node_pool` interceptor. The (possibly modified) response returned by + `post_delete_node_pool` will be passed to + `post_delete_node_pool_with_metadata`. + """ + return response, metadata + def pre_delete_vpn_connection( self, request: service.DeleteVpnConnectionRequest, @@ -372,12 +487,35 @@ def post_delete_vpn_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_vpn_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_vpn_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_delete_vpn_connection` interceptor runs + before the `post_delete_vpn_connection_with_metadata` interceptor. """ return response + def post_delete_vpn_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_vpn_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_delete_vpn_connection_with_metadata` + interceptor in new development instead of the `post_delete_vpn_connection` interceptor. + When both interceptors are used, this `post_delete_vpn_connection_with_metadata` interceptor runs after the + `post_delete_vpn_connection` interceptor. The (possibly modified) response returned by + `post_delete_vpn_connection` will be passed to + `post_delete_vpn_connection_with_metadata`. + """ + return response, metadata + def pre_generate_access_token( self, request: service.GenerateAccessTokenRequest, @@ -397,12 +535,37 @@ def post_generate_access_token( ) -> service.GenerateAccessTokenResponse: """Post-rpc interceptor for generate_access_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_access_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_generate_access_token` interceptor runs + before the `post_generate_access_token_with_metadata` interceptor. """ return response + def post_generate_access_token_with_metadata( + self, + response: service.GenerateAccessTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_generate_access_token_with_metadata` + interceptor in new development instead of the `post_generate_access_token` interceptor. + When both interceptors are used, this `post_generate_access_token_with_metadata` interceptor runs after the + `post_generate_access_token` interceptor. The (possibly modified) response returned by + `post_generate_access_token` will be passed to + `post_generate_access_token_with_metadata`. + """ + return response, metadata + def pre_generate_offline_credential( self, request: service.GenerateOfflineCredentialRequest, @@ -423,12 +586,38 @@ def post_generate_offline_credential( ) -> service.GenerateOfflineCredentialResponse: """Post-rpc interceptor for generate_offline_credential - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_offline_credential_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_generate_offline_credential` interceptor runs + before the `post_generate_offline_credential_with_metadata` interceptor. """ return response + def post_generate_offline_credential_with_metadata( + self, + response: service.GenerateOfflineCredentialResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateOfflineCredentialResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_offline_credential + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_generate_offline_credential_with_metadata` + interceptor in new development instead of the `post_generate_offline_credential` interceptor. + When both interceptors are used, this `post_generate_offline_credential_with_metadata` interceptor runs after the + `post_generate_offline_credential` interceptor. The (possibly modified) response returned by + `post_generate_offline_credential` will be passed to + `post_generate_offline_credential_with_metadata`. + """ + return response, metadata + def pre_get_cluster( self, request: service.GetClusterRequest, @@ -444,12 +633,35 @@ def pre_get_cluster( def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: resources.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_get_machine( self, request: service.GetMachineRequest, @@ -465,12 +677,35 @@ def pre_get_machine( def post_get_machine(self, response: resources.Machine) -> resources.Machine: """Post-rpc interceptor for get_machine - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_machine_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_get_machine` interceptor runs + before the `post_get_machine_with_metadata` interceptor. """ return response + def post_get_machine_with_metadata( + self, + response: resources.Machine, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Machine, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_machine + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_get_machine_with_metadata` + interceptor in new development instead of the `post_get_machine` interceptor. + When both interceptors are used, this `post_get_machine_with_metadata` interceptor runs after the + `post_get_machine` interceptor. The (possibly modified) response returned by + `post_get_machine` will be passed to + `post_get_machine_with_metadata`. + """ + return response, metadata + def pre_get_node_pool( self, request: service.GetNodePoolRequest, @@ -486,12 +721,35 @@ def pre_get_node_pool( def post_get_node_pool(self, response: resources.NodePool) -> resources.NodePool: """Post-rpc interceptor for get_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_get_node_pool` interceptor runs + before the `post_get_node_pool_with_metadata` interceptor. """ return response + def post_get_node_pool_with_metadata( + self, + response: resources.NodePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.NodePool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_get_node_pool_with_metadata` + interceptor in new development instead of the `post_get_node_pool` interceptor. + When both interceptors are used, this `post_get_node_pool_with_metadata` interceptor runs after the + `post_get_node_pool` interceptor. The (possibly modified) response returned by + `post_get_node_pool` will be passed to + `post_get_node_pool_with_metadata`. + """ + return response, metadata + def pre_get_server_config( self, request: service.GetServerConfigRequest, @@ -509,12 +767,35 @@ def post_get_server_config( ) -> resources.ServerConfig: """Post-rpc interceptor for get_server_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_server_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_get_server_config` interceptor runs + before the `post_get_server_config_with_metadata` interceptor. """ return response + def post_get_server_config_with_metadata( + self, + response: resources.ServerConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ServerConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_server_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_get_server_config_with_metadata` + interceptor in new development instead of the `post_get_server_config` interceptor. + When both interceptors are used, this `post_get_server_config_with_metadata` interceptor runs after the + `post_get_server_config` interceptor. The (possibly modified) response returned by + `post_get_server_config` will be passed to + `post_get_server_config_with_metadata`. + """ + return response, metadata + def pre_get_vpn_connection( self, request: service.GetVpnConnectionRequest, @@ -534,12 +815,35 @@ def post_get_vpn_connection( ) -> resources.VpnConnection: """Post-rpc interceptor for get_vpn_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_vpn_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_get_vpn_connection` interceptor runs + before the `post_get_vpn_connection_with_metadata` interceptor. """ return response + def post_get_vpn_connection_with_metadata( + self, + response: resources.VpnConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.VpnConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_vpn_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_get_vpn_connection_with_metadata` + interceptor in new development instead of the `post_get_vpn_connection` interceptor. + When both interceptors are used, this `post_get_vpn_connection_with_metadata` interceptor runs after the + `post_get_vpn_connection` interceptor. The (possibly modified) response returned by + `post_get_vpn_connection` will be passed to + `post_get_vpn_connection_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: service.ListClustersRequest, @@ -557,12 +861,35 @@ def post_list_clusters( ) -> service.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: service.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_list_machines( self, request: service.ListMachinesRequest, @@ -580,12 +907,35 @@ def post_list_machines( ) -> service.ListMachinesResponse: """Post-rpc interceptor for list_machines - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_machines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_list_machines` interceptor runs + before the `post_list_machines_with_metadata` interceptor. """ return response + def post_list_machines_with_metadata( + self, + response: service.ListMachinesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListMachinesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_machines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_list_machines_with_metadata` + interceptor in new development instead of the `post_list_machines` interceptor. + When both interceptors are used, this `post_list_machines_with_metadata` interceptor runs after the + `post_list_machines` interceptor. The (possibly modified) response returned by + `post_list_machines` will be passed to + `post_list_machines_with_metadata`. + """ + return response, metadata + def pre_list_node_pools( self, request: service.ListNodePoolsRequest, @@ -603,12 +953,35 @@ def post_list_node_pools( ) -> service.ListNodePoolsResponse: """Post-rpc interceptor for list_node_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_node_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_list_node_pools` interceptor runs + before the `post_list_node_pools_with_metadata` interceptor. """ return response + def post_list_node_pools_with_metadata( + self, + response: service.ListNodePoolsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListNodePoolsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_node_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_list_node_pools_with_metadata` + interceptor in new development instead of the `post_list_node_pools` interceptor. + When both interceptors are used, this `post_list_node_pools_with_metadata` interceptor runs after the + `post_list_node_pools` interceptor. The (possibly modified) response returned by + `post_list_node_pools` will be passed to + `post_list_node_pools_with_metadata`. + """ + return response, metadata + def pre_list_vpn_connections( self, request: service.ListVpnConnectionsRequest, @@ -628,12 +1001,37 @@ def post_list_vpn_connections( ) -> service.ListVpnConnectionsResponse: """Post-rpc interceptor for list_vpn_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_vpn_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_list_vpn_connections` interceptor runs + before the `post_list_vpn_connections_with_metadata` interceptor. """ return response + def post_list_vpn_connections_with_metadata( + self, + response: service.ListVpnConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListVpnConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_vpn_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_list_vpn_connections_with_metadata` + interceptor in new development instead of the `post_list_vpn_connections` interceptor. + When both interceptors are used, this `post_list_vpn_connections_with_metadata` interceptor runs after the + `post_list_vpn_connections` interceptor. The (possibly modified) response returned by + `post_list_vpn_connections` will be passed to + `post_list_vpn_connections_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: service.UpdateClusterRequest, @@ -651,12 +1049,35 @@ def post_update_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_update_node_pool( self, request: service.UpdateNodePoolRequest, @@ -674,12 +1095,35 @@ def post_update_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_update_node_pool` interceptor runs + before the `post_update_node_pool_with_metadata` interceptor. """ return response + def post_update_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_update_node_pool_with_metadata` + interceptor in new development instead of the `post_update_node_pool` interceptor. + When both interceptors are used, this `post_update_node_pool_with_metadata` interceptor runs after the + `post_update_node_pool` interceptor. The (possibly modified) response returned by + `post_update_node_pool` will be passed to + `post_update_node_pool_with_metadata`. + """ + return response, metadata + def pre_upgrade_cluster( self, request: service.UpgradeClusterRequest, @@ -697,12 +1141,35 @@ def post_upgrade_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upgrade_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeContainer server but before - it is returned to user code. + it is returned to user code. This `post_upgrade_cluster` interceptor runs + before the `post_upgrade_cluster_with_metadata` interceptor. """ return response + def post_upgrade_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeContainer server but before it is returned to user code. + + We recommend only using this `post_upgrade_cluster_with_metadata` + interceptor in new development instead of the `post_upgrade_cluster` interceptor. + When both interceptors are used, this `post_upgrade_cluster_with_metadata` interceptor runs after the + `post_upgrade_cluster` interceptor. The (possibly modified) response returned by + `post_upgrade_cluster` will be passed to + `post_upgrade_cluster_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1115,6 +1582,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1264,6 +1735,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1413,6 +1888,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_vpn_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_vpn_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1554,6 +2033,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1697,6 +2180,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1840,6 +2327,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_vpn_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_vpn_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1983,6 +2474,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_access_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_access_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2132,6 +2627,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_offline_credential(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_offline_credential_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2280,6 +2779,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2427,6 +2930,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_machine(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_machine_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2572,6 +3079,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2716,6 +3227,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_server_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_server_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2858,6 +3373,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_vpn_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_vpn_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2998,6 +3517,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3138,6 +3661,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_machines(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_machines_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3278,6 +3805,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_node_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_node_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3422,6 +3953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_vpn_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_vpn_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3571,6 +4106,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3720,6 +4259,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3867,6 +4410,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_upgrade_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upgrade_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json b/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json index 06ba325e5cbe..07de18735fa1 100644 --- a/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json +++ b/packages/google-cloud-edgecontainer/samples/generated_samples/snippet_metadata_google.cloud.edgecontainer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgecontainer", - "version": "0.5.15" + "version": "0.5.16" }, "snippets": [ { diff --git a/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py b/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py index e136119c8b89..bd2f1a7d8c62 100644 --- a/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py +++ b/packages/google-cloud-edgecontainer/tests/unit/gapic/edgecontainer_v1/test_edge_container.py @@ -77,6 +77,13 @@ ) from google.cloud.edgecontainer_v1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EdgeContainerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EdgeContainerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -13870,10 +13920,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListClustersRequest.pb(service.ListClustersRequest()) transcode.return_value = { "method": "post", @@ -13897,6 +13950,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListClustersResponse() + post_with_metadata.return_value = service.ListClustersResponse(), metadata client.list_clusters( request, @@ -13908,6 +13962,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=service.GetClusterRequest): @@ -14020,10 +14075,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetClusterRequest.pb(service.GetClusterRequest()) transcode.return_value = { "method": "post", @@ -14045,6 +14103,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Cluster() + post_with_metadata.return_value = resources.Cluster(), metadata client.get_cluster( request, @@ -14056,6 +14115,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cluster_rest_bad_request(request_type=service.CreateClusterRequest): @@ -14291,10 +14351,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateClusterRequest.pb(service.CreateClusterRequest()) transcode.return_value = { "method": "post", @@ -14316,6 +14379,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_cluster( request, @@ -14327,6 +14391,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request(request_type=service.UpdateClusterRequest): @@ -14566,10 +14631,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateClusterRequest.pb(service.UpdateClusterRequest()) transcode.return_value = { "method": "post", @@ -14591,6 +14659,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_cluster( request, @@ -14602,6 +14671,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_upgrade_cluster_rest_bad_request(request_type=service.UpgradeClusterRequest): @@ -14680,10 +14750,13 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_upgrade_cluster" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_upgrade_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_upgrade_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpgradeClusterRequest.pb(service.UpgradeClusterRequest()) transcode.return_value = { "method": "post", @@ -14705,6 +14778,7 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.upgrade_cluster( request, @@ -14716,6 +14790,7 @@ def test_upgrade_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request(request_type=service.DeleteClusterRequest): @@ -14794,10 +14869,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteClusterRequest.pb(service.DeleteClusterRequest()) transcode.return_value = { "method": "post", @@ -14819,6 +14897,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_cluster( request, @@ -14830,6 +14909,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_access_token_rest_bad_request( @@ -14914,10 +14994,14 @@ def test_generate_access_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_generate_access_token" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, + "post_generate_access_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_generate_access_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateAccessTokenRequest.pb( service.GenerateAccessTokenRequest() ) @@ -14943,6 +15027,10 @@ def test_generate_access_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateAccessTokenResponse() + post_with_metadata.return_value = ( + service.GenerateAccessTokenResponse(), + metadata, + ) client.generate_access_token( request, @@ -14954,6 +15042,7 @@ def test_generate_access_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_offline_credential_rest_bad_request( @@ -15042,10 +15131,14 @@ def test_generate_offline_credential_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_generate_offline_credential" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, + "post_generate_offline_credential_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_generate_offline_credential" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateOfflineCredentialRequest.pb( service.GenerateOfflineCredentialRequest() ) @@ -15071,6 +15164,10 @@ def test_generate_offline_credential_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateOfflineCredentialResponse() + post_with_metadata.return_value = ( + service.GenerateOfflineCredentialResponse(), + metadata, + ) client.generate_offline_credential( request, @@ -15082,6 +15179,7 @@ def test_generate_offline_credential_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_node_pools_rest_bad_request(request_type=service.ListNodePoolsRequest): @@ -15166,10 +15264,13 @@ def test_list_node_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_list_node_pools" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_list_node_pools_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_list_node_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListNodePoolsRequest.pb(service.ListNodePoolsRequest()) transcode.return_value = { "method": "post", @@ -15193,6 +15294,7 @@ def test_list_node_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListNodePoolsResponse() + post_with_metadata.return_value = service.ListNodePoolsResponse(), metadata client.list_node_pools( request, @@ -15204,6 +15306,7 @@ def test_list_node_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_node_pool_rest_bad_request(request_type=service.GetNodePoolRequest): @@ -15298,10 +15401,13 @@ def test_get_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_get_node_pool" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_get_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_get_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetNodePoolRequest.pb(service.GetNodePoolRequest()) transcode.return_value = { "method": "post", @@ -15323,6 +15429,7 @@ def test_get_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.NodePool() + post_with_metadata.return_value = resources.NodePool(), metadata client.get_node_pool( request, @@ -15334,6 +15441,7 @@ def test_get_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_node_pool_rest_bad_request(request_type=service.CreateNodePoolRequest): @@ -15509,10 +15617,13 @@ def test_create_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_create_node_pool" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_create_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_create_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateNodePoolRequest.pb(service.CreateNodePoolRequest()) transcode.return_value = { "method": "post", @@ -15534,6 +15645,7 @@ def test_create_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_node_pool( request, @@ -15545,6 +15657,7 @@ def test_create_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_node_pool_rest_bad_request(request_type=service.UpdateNodePoolRequest): @@ -15728,10 +15841,13 @@ def test_update_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_update_node_pool" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_update_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_update_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateNodePoolRequest.pb(service.UpdateNodePoolRequest()) transcode.return_value = { "method": "post", @@ -15753,6 +15869,7 @@ def test_update_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_node_pool( request, @@ -15764,6 +15881,7 @@ def test_update_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_node_pool_rest_bad_request(request_type=service.DeleteNodePoolRequest): @@ -15846,10 +15964,13 @@ def test_delete_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_delete_node_pool" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_delete_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_delete_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteNodePoolRequest.pb(service.DeleteNodePoolRequest()) transcode.return_value = { "method": "post", @@ -15871,6 +15992,7 @@ def test_delete_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_node_pool( request, @@ -15882,6 +16004,7 @@ def test_delete_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_machines_rest_bad_request(request_type=service.ListMachinesRequest): @@ -15966,10 +16089,13 @@ def test_list_machines_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_list_machines" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_list_machines_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_list_machines" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListMachinesRequest.pb(service.ListMachinesRequest()) transcode.return_value = { "method": "post", @@ -15993,6 +16119,7 @@ def test_list_machines_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListMachinesResponse() + post_with_metadata.return_value = service.ListMachinesResponse(), metadata client.list_machines( request, @@ -16004,6 +16131,7 @@ def test_list_machines_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_machine_rest_bad_request(request_type=service.GetMachineRequest): @@ -16094,10 +16222,13 @@ def test_get_machine_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_get_machine" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_get_machine_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_get_machine" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetMachineRequest.pb(service.GetMachineRequest()) transcode.return_value = { "method": "post", @@ -16119,6 +16250,7 @@ def test_get_machine_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Machine() + post_with_metadata.return_value = resources.Machine(), metadata client.get_machine( request, @@ -16130,6 +16262,7 @@ def test_get_machine_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_vpn_connections_rest_bad_request( @@ -16216,10 +16349,14 @@ def test_list_vpn_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_list_vpn_connections" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, + "post_list_vpn_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_list_vpn_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListVpnConnectionsRequest.pb( service.ListVpnConnectionsRequest() ) @@ -16245,6 +16382,7 @@ def test_list_vpn_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListVpnConnectionsResponse() + post_with_metadata.return_value = service.ListVpnConnectionsResponse(), metadata client.list_vpn_connections( request, @@ -16256,6 +16394,7 @@ def test_list_vpn_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_vpn_connection_rest_bad_request( @@ -16352,10 +16491,13 @@ def test_get_vpn_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_get_vpn_connection" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_get_vpn_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_get_vpn_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetVpnConnectionRequest.pb( service.GetVpnConnectionRequest() ) @@ -16379,6 +16521,7 @@ def test_get_vpn_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.VpnConnection() + post_with_metadata.return_value = resources.VpnConnection(), metadata client.get_vpn_connection( request, @@ -16390,6 +16533,7 @@ def test_get_vpn_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_vpn_connection_rest_bad_request( @@ -16559,10 +16703,14 @@ def test_create_vpn_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_create_vpn_connection" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, + "post_create_vpn_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_create_vpn_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateVpnConnectionRequest.pb( service.CreateVpnConnectionRequest() ) @@ -16586,6 +16734,7 @@ def test_create_vpn_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_vpn_connection( request, @@ -16597,6 +16746,7 @@ def test_create_vpn_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_vpn_connection_rest_bad_request( @@ -16677,10 +16827,14 @@ def test_delete_vpn_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeContainerRestInterceptor, "post_delete_vpn_connection" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, + "post_delete_vpn_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_delete_vpn_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteVpnConnectionRequest.pb( service.DeleteVpnConnectionRequest() ) @@ -16704,6 +16858,7 @@ def test_delete_vpn_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_vpn_connection( request, @@ -16715,6 +16870,7 @@ def test_delete_vpn_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_server_config_rest_bad_request( @@ -16799,10 +16955,13 @@ def test_get_server_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeContainerRestInterceptor, "post_get_server_config" ) as post, mock.patch.object( + transports.EdgeContainerRestInterceptor, "post_get_server_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeContainerRestInterceptor, "pre_get_server_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetServerConfigRequest.pb(service.GetServerConfigRequest()) transcode.return_value = { "method": "post", @@ -16824,6 +16983,7 @@ def test_get_server_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ServerConfig() + post_with_metadata.return_value = resources.ServerConfig(), metadata client.get_server_config( request, @@ -16835,6 +16995,7 @@ def test_get_server_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-edgenetwork/CHANGELOG.md b/packages/google-cloud-edgenetwork/CHANGELOG.md index 2d8a925f2188..7a880ed4a27c 100644 --- a/packages/google-cloud-edgenetwork/CHANGELOG.md +++ b/packages/google-cloud-edgenetwork/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.14...google-cloud-edgenetwork-v0.1.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.1.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-edgenetwork-v0.1.13...google-cloud-edgenetwork-v0.1.14) (2024-12-12) diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py index 7a4d810a47da..564cdfade642 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.14" # {x-release-please-version} +__version__ = "0.1.15" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py index 7a4d810a47da..564cdfade642 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.14" # {x-release-please-version} +__version__ = "0.1.15" # {x-release-please-version} diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py index ed6c052c1936..037270a0f72c 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -615,6 +617,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3981,16 +4010,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4036,16 +4069,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4202,16 +4239,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4257,16 +4298,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/transports/rest.py b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/transports/rest.py index 15271f590f44..888d1c7e9c82 100644 --- a/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/transports/rest.py +++ b/packages/google-cloud-edgenetwork/google/cloud/edgenetwork_v1/services/edge_network/transports/rest.py @@ -304,12 +304,35 @@ def post_create_interconnect_attachment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_interconnect_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_interconnect_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_create_interconnect_attachment` interceptor runs + before the `post_create_interconnect_attachment_with_metadata` interceptor. """ return response + def post_create_interconnect_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_interconnect_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_create_interconnect_attachment_with_metadata` + interceptor in new development instead of the `post_create_interconnect_attachment` interceptor. + When both interceptors are used, this `post_create_interconnect_attachment_with_metadata` interceptor runs after the + `post_create_interconnect_attachment` interceptor. The (possibly modified) response returned by + `post_create_interconnect_attachment` will be passed to + `post_create_interconnect_attachment_with_metadata`. + """ + return response, metadata + def pre_create_network( self, request: service.CreateNetworkRequest, @@ -327,12 +350,35 @@ def post_create_network( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_create_network` interceptor runs + before the `post_create_network_with_metadata` interceptor. """ return response + def post_create_network_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_create_network_with_metadata` + interceptor in new development instead of the `post_create_network` interceptor. + When both interceptors are used, this `post_create_network_with_metadata` interceptor runs after the + `post_create_network` interceptor. The (possibly modified) response returned by + `post_create_network` will be passed to + `post_create_network_with_metadata`. + """ + return response, metadata + def pre_create_router( self, request: service.CreateRouterRequest, @@ -350,12 +396,35 @@ def post_create_router( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_router - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_router_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_create_router` interceptor runs + before the `post_create_router_with_metadata` interceptor. """ return response + def post_create_router_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_router + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_create_router_with_metadata` + interceptor in new development instead of the `post_create_router` interceptor. + When both interceptors are used, this `post_create_router_with_metadata` interceptor runs after the + `post_create_router` interceptor. The (possibly modified) response returned by + `post_create_router` will be passed to + `post_create_router_with_metadata`. + """ + return response, metadata + def pre_create_subnet( self, request: service.CreateSubnetRequest, @@ -373,12 +442,35 @@ def post_create_subnet( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_subnet - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_subnet_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_create_subnet` interceptor runs + before the `post_create_subnet_with_metadata` interceptor. """ return response + def post_create_subnet_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_subnet + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_create_subnet_with_metadata` + interceptor in new development instead of the `post_create_subnet` interceptor. + When both interceptors are used, this `post_create_subnet_with_metadata` interceptor runs after the + `post_create_subnet` interceptor. The (possibly modified) response returned by + `post_create_subnet` will be passed to + `post_create_subnet_with_metadata`. + """ + return response, metadata + def pre_delete_interconnect_attachment( self, request: service.DeleteInterconnectAttachmentRequest, @@ -399,12 +491,35 @@ def post_delete_interconnect_attachment( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_interconnect_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_interconnect_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_delete_interconnect_attachment` interceptor runs + before the `post_delete_interconnect_attachment_with_metadata` interceptor. """ return response + def post_delete_interconnect_attachment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_interconnect_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_delete_interconnect_attachment_with_metadata` + interceptor in new development instead of the `post_delete_interconnect_attachment` interceptor. + When both interceptors are used, this `post_delete_interconnect_attachment_with_metadata` interceptor runs after the + `post_delete_interconnect_attachment` interceptor. The (possibly modified) response returned by + `post_delete_interconnect_attachment` will be passed to + `post_delete_interconnect_attachment_with_metadata`. + """ + return response, metadata + def pre_delete_network( self, request: service.DeleteNetworkRequest, @@ -422,12 +537,35 @@ def post_delete_network( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_delete_network` interceptor runs + before the `post_delete_network_with_metadata` interceptor. """ return response + def post_delete_network_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_delete_network_with_metadata` + interceptor in new development instead of the `post_delete_network` interceptor. + When both interceptors are used, this `post_delete_network_with_metadata` interceptor runs after the + `post_delete_network` interceptor. The (possibly modified) response returned by + `post_delete_network` will be passed to + `post_delete_network_with_metadata`. + """ + return response, metadata + def pre_delete_router( self, request: service.DeleteRouterRequest, @@ -445,12 +583,35 @@ def post_delete_router( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_router - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_router_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_delete_router` interceptor runs + before the `post_delete_router_with_metadata` interceptor. """ return response + def post_delete_router_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_router + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_delete_router_with_metadata` + interceptor in new development instead of the `post_delete_router` interceptor. + When both interceptors are used, this `post_delete_router_with_metadata` interceptor runs after the + `post_delete_router` interceptor. The (possibly modified) response returned by + `post_delete_router` will be passed to + `post_delete_router_with_metadata`. + """ + return response, metadata + def pre_delete_subnet( self, request: service.DeleteSubnetRequest, @@ -468,12 +629,35 @@ def post_delete_subnet( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_subnet - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_subnet_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_delete_subnet` interceptor runs + before the `post_delete_subnet_with_metadata` interceptor. """ return response + def post_delete_subnet_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_subnet + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_delete_subnet_with_metadata` + interceptor in new development instead of the `post_delete_subnet` interceptor. + When both interceptors are used, this `post_delete_subnet_with_metadata` interceptor runs after the + `post_delete_subnet` interceptor. The (possibly modified) response returned by + `post_delete_subnet` will be passed to + `post_delete_subnet_with_metadata`. + """ + return response, metadata + def pre_diagnose_interconnect( self, request: service.DiagnoseInterconnectRequest, @@ -493,12 +677,37 @@ def post_diagnose_interconnect( ) -> service.DiagnoseInterconnectResponse: """Post-rpc interceptor for diagnose_interconnect - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_diagnose_interconnect_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_diagnose_interconnect` interceptor runs + before the `post_diagnose_interconnect_with_metadata` interceptor. """ return response + def post_diagnose_interconnect_with_metadata( + self, + response: service.DiagnoseInterconnectResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.DiagnoseInterconnectResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for diagnose_interconnect + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_diagnose_interconnect_with_metadata` + interceptor in new development instead of the `post_diagnose_interconnect` interceptor. + When both interceptors are used, this `post_diagnose_interconnect_with_metadata` interceptor runs after the + `post_diagnose_interconnect` interceptor. The (possibly modified) response returned by + `post_diagnose_interconnect` will be passed to + `post_diagnose_interconnect_with_metadata`. + """ + return response, metadata + def pre_diagnose_network( self, request: service.DiagnoseNetworkRequest, @@ -516,12 +725,37 @@ def post_diagnose_network( ) -> service.DiagnoseNetworkResponse: """Post-rpc interceptor for diagnose_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_diagnose_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_diagnose_network` interceptor runs + before the `post_diagnose_network_with_metadata` interceptor. """ return response + def post_diagnose_network_with_metadata( + self, + response: service.DiagnoseNetworkResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.DiagnoseNetworkResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for diagnose_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_diagnose_network_with_metadata` + interceptor in new development instead of the `post_diagnose_network` interceptor. + When both interceptors are used, this `post_diagnose_network_with_metadata` interceptor runs after the + `post_diagnose_network` interceptor. The (possibly modified) response returned by + `post_diagnose_network` will be passed to + `post_diagnose_network_with_metadata`. + """ + return response, metadata + def pre_diagnose_router( self, request: service.DiagnoseRouterRequest, @@ -539,12 +773,35 @@ def post_diagnose_router( ) -> service.DiagnoseRouterResponse: """Post-rpc interceptor for diagnose_router - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_diagnose_router_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_diagnose_router` interceptor runs + before the `post_diagnose_router_with_metadata` interceptor. """ return response + def post_diagnose_router_with_metadata( + self, + response: service.DiagnoseRouterResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.DiagnoseRouterResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for diagnose_router + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_diagnose_router_with_metadata` + interceptor in new development instead of the `post_diagnose_router` interceptor. + When both interceptors are used, this `post_diagnose_router_with_metadata` interceptor runs after the + `post_diagnose_router` interceptor. The (possibly modified) response returned by + `post_diagnose_router` will be passed to + `post_diagnose_router_with_metadata`. + """ + return response, metadata + def pre_get_interconnect( self, request: service.GetInterconnectRequest, @@ -562,12 +819,35 @@ def post_get_interconnect( ) -> resources.Interconnect: """Post-rpc interceptor for get_interconnect - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_interconnect_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_get_interconnect` interceptor runs + before the `post_get_interconnect_with_metadata` interceptor. """ return response + def post_get_interconnect_with_metadata( + self, + response: resources.Interconnect, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Interconnect, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_interconnect + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_get_interconnect_with_metadata` + interceptor in new development instead of the `post_get_interconnect` interceptor. + When both interceptors are used, this `post_get_interconnect_with_metadata` interceptor runs after the + `post_get_interconnect` interceptor. The (possibly modified) response returned by + `post_get_interconnect` will be passed to + `post_get_interconnect_with_metadata`. + """ + return response, metadata + def pre_get_interconnect_attachment( self, request: service.GetInterconnectAttachmentRequest, @@ -588,12 +868,37 @@ def post_get_interconnect_attachment( ) -> resources.InterconnectAttachment: """Post-rpc interceptor for get_interconnect_attachment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_interconnect_attachment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_get_interconnect_attachment` interceptor runs + before the `post_get_interconnect_attachment_with_metadata` interceptor. """ return response + def post_get_interconnect_attachment_with_metadata( + self, + response: resources.InterconnectAttachment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.InterconnectAttachment, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_interconnect_attachment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_get_interconnect_attachment_with_metadata` + interceptor in new development instead of the `post_get_interconnect_attachment` interceptor. + When both interceptors are used, this `post_get_interconnect_attachment_with_metadata` interceptor runs after the + `post_get_interconnect_attachment` interceptor. The (possibly modified) response returned by + `post_get_interconnect_attachment` will be passed to + `post_get_interconnect_attachment_with_metadata`. + """ + return response, metadata + def pre_get_network( self, request: service.GetNetworkRequest, @@ -609,12 +914,35 @@ def pre_get_network( def post_get_network(self, response: resources.Network) -> resources.Network: """Post-rpc interceptor for get_network - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_network_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_get_network` interceptor runs + before the `post_get_network_with_metadata` interceptor. """ return response + def post_get_network_with_metadata( + self, + response: resources.Network, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Network, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_network + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_get_network_with_metadata` + interceptor in new development instead of the `post_get_network` interceptor. + When both interceptors are used, this `post_get_network_with_metadata` interceptor runs after the + `post_get_network` interceptor. The (possibly modified) response returned by + `post_get_network` will be passed to + `post_get_network_with_metadata`. + """ + return response, metadata + def pre_get_router( self, request: service.GetRouterRequest, @@ -630,12 +958,35 @@ def pre_get_router( def post_get_router(self, response: resources.Router) -> resources.Router: """Post-rpc interceptor for get_router - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_router_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_get_router` interceptor runs + before the `post_get_router_with_metadata` interceptor. """ return response + def post_get_router_with_metadata( + self, + response: resources.Router, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Router, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_router + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_get_router_with_metadata` + interceptor in new development instead of the `post_get_router` interceptor. + When both interceptors are used, this `post_get_router_with_metadata` interceptor runs after the + `post_get_router` interceptor. The (possibly modified) response returned by + `post_get_router` will be passed to + `post_get_router_with_metadata`. + """ + return response, metadata + def pre_get_subnet( self, request: service.GetSubnetRequest, @@ -651,12 +1002,35 @@ def pre_get_subnet( def post_get_subnet(self, response: resources.Subnet) -> resources.Subnet: """Post-rpc interceptor for get_subnet - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_subnet_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_get_subnet` interceptor runs + before the `post_get_subnet_with_metadata` interceptor. """ return response + def post_get_subnet_with_metadata( + self, + response: resources.Subnet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Subnet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_subnet + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_get_subnet_with_metadata` + interceptor in new development instead of the `post_get_subnet` interceptor. + When both interceptors are used, this `post_get_subnet_with_metadata` interceptor runs after the + `post_get_subnet` interceptor. The (possibly modified) response returned by + `post_get_subnet` will be passed to + `post_get_subnet_with_metadata`. + """ + return response, metadata + def pre_get_zone( self, request: service.GetZoneRequest, @@ -672,12 +1046,35 @@ def pre_get_zone( def post_get_zone(self, response: resources.Zone) -> resources.Zone: """Post-rpc interceptor for get_zone - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_get_zone` interceptor runs + before the `post_get_zone_with_metadata` interceptor. """ return response + def post_get_zone_with_metadata( + self, + response: resources.Zone, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Zone, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_get_zone_with_metadata` + interceptor in new development instead of the `post_get_zone` interceptor. + When both interceptors are used, this `post_get_zone_with_metadata` interceptor runs after the + `post_get_zone` interceptor. The (possibly modified) response returned by + `post_get_zone` will be passed to + `post_get_zone_with_metadata`. + """ + return response, metadata + def pre_initialize_zone( self, request: service.InitializeZoneRequest, @@ -695,12 +1092,35 @@ def post_initialize_zone( ) -> service.InitializeZoneResponse: """Post-rpc interceptor for initialize_zone - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_initialize_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_initialize_zone` interceptor runs + before the `post_initialize_zone_with_metadata` interceptor. """ return response + def post_initialize_zone_with_metadata( + self, + response: service.InitializeZoneResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.InitializeZoneResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for initialize_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_initialize_zone_with_metadata` + interceptor in new development instead of the `post_initialize_zone` interceptor. + When both interceptors are used, this `post_initialize_zone_with_metadata` interceptor runs after the + `post_initialize_zone` interceptor. The (possibly modified) response returned by + `post_initialize_zone` will be passed to + `post_initialize_zone_with_metadata`. + """ + return response, metadata + def pre_list_interconnect_attachments( self, request: service.ListInterconnectAttachmentsRequest, @@ -721,12 +1141,38 @@ def post_list_interconnect_attachments( ) -> service.ListInterconnectAttachmentsResponse: """Post-rpc interceptor for list_interconnect_attachments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_interconnect_attachments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_list_interconnect_attachments` interceptor runs + before the `post_list_interconnect_attachments_with_metadata` interceptor. """ return response + def post_list_interconnect_attachments_with_metadata( + self, + response: service.ListInterconnectAttachmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListInterconnectAttachmentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_interconnect_attachments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_list_interconnect_attachments_with_metadata` + interceptor in new development instead of the `post_list_interconnect_attachments` interceptor. + When both interceptors are used, this `post_list_interconnect_attachments_with_metadata` interceptor runs after the + `post_list_interconnect_attachments` interceptor. The (possibly modified) response returned by + `post_list_interconnect_attachments` will be passed to + `post_list_interconnect_attachments_with_metadata`. + """ + return response, metadata + def pre_list_interconnects( self, request: service.ListInterconnectsRequest, @@ -746,12 +1192,37 @@ def post_list_interconnects( ) -> service.ListInterconnectsResponse: """Post-rpc interceptor for list_interconnects - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_interconnects_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_list_interconnects` interceptor runs + before the `post_list_interconnects_with_metadata` interceptor. """ return response + def post_list_interconnects_with_metadata( + self, + response: service.ListInterconnectsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListInterconnectsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_interconnects + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_list_interconnects_with_metadata` + interceptor in new development instead of the `post_list_interconnects` interceptor. + When both interceptors are used, this `post_list_interconnects_with_metadata` interceptor runs after the + `post_list_interconnects` interceptor. The (possibly modified) response returned by + `post_list_interconnects` will be passed to + `post_list_interconnects_with_metadata`. + """ + return response, metadata + def pre_list_networks( self, request: service.ListNetworksRequest, @@ -769,12 +1240,35 @@ def post_list_networks( ) -> service.ListNetworksResponse: """Post-rpc interceptor for list_networks - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_networks_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_list_networks` interceptor runs + before the `post_list_networks_with_metadata` interceptor. """ return response + def post_list_networks_with_metadata( + self, + response: service.ListNetworksResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListNetworksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_networks + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_list_networks_with_metadata` + interceptor in new development instead of the `post_list_networks` interceptor. + When both interceptors are used, this `post_list_networks_with_metadata` interceptor runs after the + `post_list_networks` interceptor. The (possibly modified) response returned by + `post_list_networks` will be passed to + `post_list_networks_with_metadata`. + """ + return response, metadata + def pre_list_routers( self, request: service.ListRoutersRequest, @@ -792,12 +1286,35 @@ def post_list_routers( ) -> service.ListRoutersResponse: """Post-rpc interceptor for list_routers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_routers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_list_routers` interceptor runs + before the `post_list_routers_with_metadata` interceptor. """ return response + def post_list_routers_with_metadata( + self, + response: service.ListRoutersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListRoutersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_routers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_list_routers_with_metadata` + interceptor in new development instead of the `post_list_routers` interceptor. + When both interceptors are used, this `post_list_routers_with_metadata` interceptor runs after the + `post_list_routers` interceptor. The (possibly modified) response returned by + `post_list_routers` will be passed to + `post_list_routers_with_metadata`. + """ + return response, metadata + def pre_list_subnets( self, request: service.ListSubnetsRequest, @@ -815,12 +1332,35 @@ def post_list_subnets( ) -> service.ListSubnetsResponse: """Post-rpc interceptor for list_subnets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_subnets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_list_subnets` interceptor runs + before the `post_list_subnets_with_metadata` interceptor. """ return response + def post_list_subnets_with_metadata( + self, + response: service.ListSubnetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListSubnetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_subnets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_list_subnets_with_metadata` + interceptor in new development instead of the `post_list_subnets` interceptor. + When both interceptors are used, this `post_list_subnets_with_metadata` interceptor runs after the + `post_list_subnets` interceptor. The (possibly modified) response returned by + `post_list_subnets` will be passed to + `post_list_subnets_with_metadata`. + """ + return response, metadata + def pre_list_zones( self, request: service.ListZonesRequest, @@ -838,12 +1378,35 @@ def post_list_zones( ) -> service.ListZonesResponse: """Post-rpc interceptor for list_zones - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_zones_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_list_zones` interceptor runs + before the `post_list_zones_with_metadata` interceptor. """ return response + def post_list_zones_with_metadata( + self, + response: service.ListZonesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListZonesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_zones + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_list_zones_with_metadata` + interceptor in new development instead of the `post_list_zones` interceptor. + When both interceptors are used, this `post_list_zones_with_metadata` interceptor runs after the + `post_list_zones` interceptor. The (possibly modified) response returned by + `post_list_zones` will be passed to + `post_list_zones_with_metadata`. + """ + return response, metadata + def pre_update_router( self, request: service.UpdateRouterRequest, @@ -861,12 +1424,35 @@ def post_update_router( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_router - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_router_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_update_router` interceptor runs + before the `post_update_router_with_metadata` interceptor. """ return response + def post_update_router_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_router + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_update_router_with_metadata` + interceptor in new development instead of the `post_update_router` interceptor. + When both interceptors are used, this `post_update_router_with_metadata` interceptor runs after the + `post_update_router` interceptor. The (possibly modified) response returned by + `post_update_router` will be passed to + `post_update_router_with_metadata`. + """ + return response, metadata + def pre_update_subnet( self, request: service.UpdateSubnetRequest, @@ -884,12 +1470,35 @@ def post_update_subnet( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_subnet - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_subnet_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EdgeNetwork server but before - it is returned to user code. + it is returned to user code. This `post_update_subnet` interceptor runs + before the `post_update_subnet_with_metadata` interceptor. """ return response + def post_update_subnet_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_subnet + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EdgeNetwork server but before it is returned to user code. + + We recommend only using this `post_update_subnet_with_metadata` + interceptor in new development instead of the `post_update_subnet` interceptor. + When both interceptors are used, this `post_update_subnet_with_metadata` interceptor runs after the + `post_update_subnet` interceptor. The (possibly modified) response returned by + `post_update_subnet` will be passed to + `post_update_subnet_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1312,6 +1921,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_interconnect_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_interconnect_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1463,6 +2079,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1616,6 +2236,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_router(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_router_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1769,6 +2393,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_subnet(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_subnet_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1917,6 +2545,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_interconnect_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_delete_interconnect_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2060,6 +2695,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2205,6 +2844,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_router(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_router_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2350,6 +2993,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_subnet(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_subnet_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2497,6 +3144,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_diagnose_interconnect(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_diagnose_interconnect_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2645,6 +3296,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_diagnose_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_diagnose_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2788,6 +3443,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_diagnose_router(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_diagnose_router_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2932,6 +3591,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_interconnect(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_interconnect_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3081,6 +3744,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_interconnect_attachment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_interconnect_attachment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3227,6 +3894,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_network(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_network_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3369,6 +4040,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_router(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_router_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3511,6 +4186,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_subnet(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_subnet_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3654,6 +4333,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_zone_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3801,6 +4484,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_initialize_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_initialize_zone_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3950,6 +4637,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_interconnect_attachments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_interconnect_attachments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4097,6 +4791,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_interconnects(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_interconnects_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4246,6 +4944,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_networks(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_networks_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4393,6 +5095,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_routers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_routers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4540,6 +5246,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_subnets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_subnets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4685,6 +5395,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_zones(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_zones_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4838,6 +5552,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_router(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_router_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4991,6 +5709,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_subnet(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_subnet_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json index c64210ad1d46..896e8ac5a600 100644 --- a/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json +++ b/packages/google-cloud-edgenetwork/samples/generated_samples/snippet_metadata_google.cloud.edgenetwork.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-edgenetwork", - "version": "0.1.14" + "version": "0.1.15" }, "snippets": [ { diff --git a/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py b/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py index 549dcf28e633..8699c408fccb 100644 --- a/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py +++ b/packages/google-cloud-edgenetwork/tests/unit/gapic/edgenetwork_v1/test_edge_network.py @@ -74,6 +74,13 @@ ) from google.cloud.edgenetwork_v1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -313,6 +320,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EdgeNetworkClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EdgeNetworkClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -17841,10 +17891,13 @@ def test_initialize_zone_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_initialize_zone" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_initialize_zone_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_initialize_zone" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.InitializeZoneRequest.pb(service.InitializeZoneRequest()) transcode.return_value = { "method": "post", @@ -17868,6 +17921,7 @@ def test_initialize_zone_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.InitializeZoneResponse() + post_with_metadata.return_value = service.InitializeZoneResponse(), metadata client.initialize_zone( request, @@ -17879,6 +17933,7 @@ def test_initialize_zone_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_zones_rest_bad_request(request_type=service.ListZonesRequest): @@ -17963,10 +18018,13 @@ def test_list_zones_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_list_zones" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_list_zones_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_list_zones" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListZonesRequest.pb(service.ListZonesRequest()) transcode.return_value = { "method": "post", @@ -17988,6 +18046,7 @@ def test_list_zones_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListZonesResponse() + post_with_metadata.return_value = service.ListZonesResponse(), metadata client.list_zones( request, @@ -17999,6 +18058,7 @@ def test_list_zones_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_zone_rest_bad_request(request_type=service.GetZoneRequest): @@ -18083,10 +18143,13 @@ def test_get_zone_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_get_zone" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_get_zone_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_get_zone" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetZoneRequest.pb(service.GetZoneRequest()) transcode.return_value = { "method": "post", @@ -18108,6 +18171,7 @@ def test_get_zone_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Zone() + post_with_metadata.return_value = resources.Zone(), metadata client.get_zone( request, @@ -18119,6 +18183,7 @@ def test_get_zone_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_networks_rest_bad_request(request_type=service.ListNetworksRequest): @@ -18203,10 +18268,13 @@ def test_list_networks_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_list_networks" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_list_networks_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_list_networks" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListNetworksRequest.pb(service.ListNetworksRequest()) transcode.return_value = { "method": "post", @@ -18230,6 +18298,7 @@ def test_list_networks_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListNetworksResponse() + post_with_metadata.return_value = service.ListNetworksResponse(), metadata client.list_networks( request, @@ -18241,6 +18310,7 @@ def test_list_networks_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_network_rest_bad_request(request_type=service.GetNetworkRequest): @@ -18331,10 +18401,13 @@ def test_get_network_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_get_network" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_get_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_get_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetNetworkRequest.pb(service.GetNetworkRequest()) transcode.return_value = { "method": "post", @@ -18356,6 +18429,7 @@ def test_get_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Network() + post_with_metadata.return_value = resources.Network(), metadata client.get_network( request, @@ -18367,6 +18441,7 @@ def test_get_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_diagnose_network_rest_bad_request(request_type=service.DiagnoseNetworkRequest): @@ -18450,10 +18525,13 @@ def test_diagnose_network_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_diagnose_network" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_diagnose_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_diagnose_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DiagnoseNetworkRequest.pb(service.DiagnoseNetworkRequest()) transcode.return_value = { "method": "post", @@ -18477,6 +18555,7 @@ def test_diagnose_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.DiagnoseNetworkResponse() + post_with_metadata.return_value = service.DiagnoseNetworkResponse(), metadata client.diagnose_network( request, @@ -18488,6 +18567,7 @@ def test_diagnose_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_network_rest_bad_request(request_type=service.CreateNetworkRequest): @@ -18641,10 +18721,13 @@ def test_create_network_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_create_network" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_create_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_create_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateNetworkRequest.pb(service.CreateNetworkRequest()) transcode.return_value = { "method": "post", @@ -18666,6 +18749,7 @@ def test_create_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_network( request, @@ -18677,6 +18761,7 @@ def test_create_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_network_rest_bad_request(request_type=service.DeleteNetworkRequest): @@ -18759,10 +18844,13 @@ def test_delete_network_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_delete_network" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_delete_network_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_delete_network" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteNetworkRequest.pb(service.DeleteNetworkRequest()) transcode.return_value = { "method": "post", @@ -18784,6 +18872,7 @@ def test_delete_network_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_network( request, @@ -18795,6 +18884,7 @@ def test_delete_network_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_subnets_rest_bad_request(request_type=service.ListSubnetsRequest): @@ -18879,10 +18969,13 @@ def test_list_subnets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_list_subnets" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_list_subnets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_list_subnets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListSubnetsRequest.pb(service.ListSubnetsRequest()) transcode.return_value = { "method": "post", @@ -18906,6 +18999,7 @@ def test_list_subnets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListSubnetsResponse() + post_with_metadata.return_value = service.ListSubnetsResponse(), metadata client.list_subnets( request, @@ -18917,6 +19011,7 @@ def test_list_subnets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_subnet_rest_bad_request(request_type=service.GetSubnetRequest): @@ -19017,10 +19112,13 @@ def test_get_subnet_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_get_subnet" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_get_subnet_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_get_subnet" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSubnetRequest.pb(service.GetSubnetRequest()) transcode.return_value = { "method": "post", @@ -19042,6 +19140,7 @@ def test_get_subnet_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Subnet() + post_with_metadata.return_value = resources.Subnet(), metadata client.get_subnet( request, @@ -19053,6 +19152,7 @@ def test_get_subnet_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_subnet_rest_bad_request(request_type=service.CreateSubnetRequest): @@ -19211,10 +19311,13 @@ def test_create_subnet_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_create_subnet" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_create_subnet_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_create_subnet" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSubnetRequest.pb(service.CreateSubnetRequest()) transcode.return_value = { "method": "post", @@ -19236,6 +19339,7 @@ def test_create_subnet_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_subnet( request, @@ -19247,6 +19351,7 @@ def test_create_subnet_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_subnet_rest_bad_request(request_type=service.UpdateSubnetRequest): @@ -19413,10 +19518,13 @@ def test_update_subnet_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_update_subnet" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_update_subnet_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_update_subnet" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateSubnetRequest.pb(service.UpdateSubnetRequest()) transcode.return_value = { "method": "post", @@ -19438,6 +19546,7 @@ def test_update_subnet_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_subnet( request, @@ -19449,6 +19558,7 @@ def test_update_subnet_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_subnet_rest_bad_request(request_type=service.DeleteSubnetRequest): @@ -19531,10 +19641,13 @@ def test_delete_subnet_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_delete_subnet" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_delete_subnet_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_delete_subnet" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteSubnetRequest.pb(service.DeleteSubnetRequest()) transcode.return_value = { "method": "post", @@ -19556,6 +19669,7 @@ def test_delete_subnet_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_subnet( request, @@ -19567,6 +19681,7 @@ def test_delete_subnet_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_interconnects_rest_bad_request( @@ -19653,10 +19768,13 @@ def test_list_interconnects_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_list_interconnects" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_list_interconnects_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_list_interconnects" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListInterconnectsRequest.pb( service.ListInterconnectsRequest() ) @@ -19682,6 +19800,7 @@ def test_list_interconnects_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListInterconnectsResponse() + post_with_metadata.return_value = service.ListInterconnectsResponse(), metadata client.list_interconnects( request, @@ -19693,6 +19812,7 @@ def test_list_interconnects_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_interconnect_rest_bad_request(request_type=service.GetInterconnectRequest): @@ -19791,10 +19911,13 @@ def test_get_interconnect_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_get_interconnect" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_get_interconnect_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_get_interconnect" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetInterconnectRequest.pb(service.GetInterconnectRequest()) transcode.return_value = { "method": "post", @@ -19816,6 +19939,7 @@ def test_get_interconnect_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Interconnect() + post_with_metadata.return_value = resources.Interconnect(), metadata client.get_interconnect( request, @@ -19827,6 +19951,7 @@ def test_get_interconnect_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_diagnose_interconnect_rest_bad_request( @@ -19912,10 +20037,14 @@ def test_diagnose_interconnect_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_diagnose_interconnect" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, + "post_diagnose_interconnect_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_diagnose_interconnect" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DiagnoseInterconnectRequest.pb( service.DiagnoseInterconnectRequest() ) @@ -19941,6 +20070,10 @@ def test_diagnose_interconnect_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.DiagnoseInterconnectResponse() + post_with_metadata.return_value = ( + service.DiagnoseInterconnectResponse(), + metadata, + ) client.diagnose_interconnect( request, @@ -19952,6 +20085,7 @@ def test_diagnose_interconnect_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_interconnect_attachments_rest_bad_request( @@ -20038,10 +20172,14 @@ def test_list_interconnect_attachments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_list_interconnect_attachments" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, + "post_list_interconnect_attachments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_list_interconnect_attachments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListInterconnectAttachmentsRequest.pb( service.ListInterconnectAttachmentsRequest() ) @@ -20067,6 +20205,10 @@ def test_list_interconnect_attachments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListInterconnectAttachmentsResponse() + post_with_metadata.return_value = ( + service.ListInterconnectAttachmentsResponse(), + metadata, + ) client.list_interconnect_attachments( request, @@ -20078,6 +20220,7 @@ def test_list_interconnect_attachments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_interconnect_attachment_rest_bad_request( @@ -20178,10 +20321,14 @@ def test_get_interconnect_attachment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_get_interconnect_attachment" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, + "post_get_interconnect_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_get_interconnect_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetInterconnectAttachmentRequest.pb( service.GetInterconnectAttachmentRequest() ) @@ -20207,6 +20354,7 @@ def test_get_interconnect_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.InterconnectAttachment() + post_with_metadata.return_value = resources.InterconnectAttachment(), metadata client.get_interconnect_attachment( request, @@ -20218,6 +20366,7 @@ def test_get_interconnect_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_interconnect_attachment_rest_bad_request( @@ -20381,10 +20530,14 @@ def test_create_interconnect_attachment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_create_interconnect_attachment" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, + "post_create_interconnect_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_create_interconnect_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateInterconnectAttachmentRequest.pb( service.CreateInterconnectAttachmentRequest() ) @@ -20408,6 +20561,7 @@ def test_create_interconnect_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_interconnect_attachment( request, @@ -20419,6 +20573,7 @@ def test_create_interconnect_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_interconnect_attachment_rest_bad_request( @@ -20503,10 +20658,14 @@ def test_delete_interconnect_attachment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_delete_interconnect_attachment" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, + "post_delete_interconnect_attachment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_delete_interconnect_attachment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteInterconnectAttachmentRequest.pb( service.DeleteInterconnectAttachmentRequest() ) @@ -20530,6 +20689,7 @@ def test_delete_interconnect_attachment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_interconnect_attachment( request, @@ -20541,6 +20701,7 @@ def test_delete_interconnect_attachment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_routers_rest_bad_request(request_type=service.ListRoutersRequest): @@ -20625,10 +20786,13 @@ def test_list_routers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_list_routers" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_list_routers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_list_routers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListRoutersRequest.pb(service.ListRoutersRequest()) transcode.return_value = { "method": "post", @@ -20652,6 +20816,7 @@ def test_list_routers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListRoutersResponse() + post_with_metadata.return_value = service.ListRoutersResponse(), metadata client.list_routers( request, @@ -20663,6 +20828,7 @@ def test_list_routers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_router_rest_bad_request(request_type=service.GetRouterRequest): @@ -20757,10 +20923,13 @@ def test_get_router_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_get_router" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_get_router_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_get_router" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetRouterRequest.pb(service.GetRouterRequest()) transcode.return_value = { "method": "post", @@ -20782,6 +20951,7 @@ def test_get_router_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Router() + post_with_metadata.return_value = resources.Router(), metadata client.get_router( request, @@ -20793,6 +20963,7 @@ def test_get_router_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_diagnose_router_rest_bad_request(request_type=service.DiagnoseRouterRequest): @@ -20876,10 +21047,13 @@ def test_diagnose_router_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_diagnose_router" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_diagnose_router_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_diagnose_router" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DiagnoseRouterRequest.pb(service.DiagnoseRouterRequest()) transcode.return_value = { "method": "post", @@ -20903,6 +21077,7 @@ def test_diagnose_router_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.DiagnoseRouterResponse() + post_with_metadata.return_value = service.DiagnoseRouterResponse(), metadata client.diagnose_router( request, @@ -20914,6 +21089,7 @@ def test_diagnose_router_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_router_rest_bad_request(request_type=service.CreateRouterRequest): @@ -21098,10 +21274,13 @@ def test_create_router_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_create_router" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_create_router_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_create_router" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateRouterRequest.pb(service.CreateRouterRequest()) transcode.return_value = { "method": "post", @@ -21123,6 +21302,7 @@ def test_create_router_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_router( request, @@ -21134,6 +21314,7 @@ def test_create_router_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_router_rest_bad_request(request_type=service.UpdateRouterRequest): @@ -21326,10 +21507,13 @@ def test_update_router_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_update_router" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_update_router_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_update_router" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateRouterRequest.pb(service.UpdateRouterRequest()) transcode.return_value = { "method": "post", @@ -21351,6 +21535,7 @@ def test_update_router_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_router( request, @@ -21362,6 +21547,7 @@ def test_update_router_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_router_rest_bad_request(request_type=service.DeleteRouterRequest): @@ -21444,10 +21630,13 @@ def test_delete_router_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EdgeNetworkRestInterceptor, "post_delete_router" ) as post, mock.patch.object( + transports.EdgeNetworkRestInterceptor, "post_delete_router_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EdgeNetworkRestInterceptor, "pre_delete_router" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteRouterRequest.pb(service.DeleteRouterRequest()) transcode.return_value = { "method": "post", @@ -21469,6 +21658,7 @@ def test_delete_router_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_router( request, @@ -21480,6 +21670,7 @@ def test_delete_router_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md b/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md index d6e1f5dd9539..df19180a1acf 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md +++ b/packages/google-cloud-enterpriseknowledgegraph/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.3.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-enterpriseknowledgegraph-v0.3.14...google-cloud-enterpriseknowledgegraph-v0.3.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) +* Add support for reading selective GAPIC generation methods from service YAML ([e92d527](https://github.com/googleapis/google-cloud-python/commit/e92d52797ffbce45d033eb81af24e0cad32baa55)) + ## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-enterpriseknowledgegraph-v0.3.13...google-cloud-enterpriseknowledgegraph-v0.3.14) (2024-12-12) diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py index 0106eadcd8d9..7d28791e7569 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.14" # {x-release-please-version} +__version__ = "0.3.15" # {x-release-please-version} diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py index 0106eadcd8d9..7d28791e7569 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.14" # {x-release-please-version} +__version__ = "0.3.15" # {x-release-please-version} diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py index 209d1b899e30..5f8b9d2d40f5 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -583,6 +585,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py index 2df11d1722ab..3a5fa42d565e 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py +++ b/packages/google-cloud-enterpriseknowledgegraph/google/cloud/enterpriseknowledgegraph_v1/services/enterprise_knowledge_graph_service/transports/rest.py @@ -175,12 +175,37 @@ def post_create_entity_reconciliation_job( ) -> service.EntityReconciliationJob: """Post-rpc interceptor for create_entity_reconciliation_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_entity_reconciliation_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_create_entity_reconciliation_job` interceptor runs + before the `post_create_entity_reconciliation_job_with_metadata` interceptor. """ return response + def post_create_entity_reconciliation_job_with_metadata( + self, + response: service.EntityReconciliationJob, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.EntityReconciliationJob, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_entity_reconciliation_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_create_entity_reconciliation_job_with_metadata` + interceptor in new development instead of the `post_create_entity_reconciliation_job` interceptor. + When both interceptors are used, this `post_create_entity_reconciliation_job_with_metadata` interceptor runs after the + `post_create_entity_reconciliation_job` interceptor. The (possibly modified) response returned by + `post_create_entity_reconciliation_job` will be passed to + `post_create_entity_reconciliation_job_with_metadata`. + """ + return response, metadata + def pre_delete_entity_reconciliation_job( self, request: service.DeleteEntityReconciliationJobRequest, @@ -216,12 +241,37 @@ def post_get_entity_reconciliation_job( ) -> service.EntityReconciliationJob: """Post-rpc interceptor for get_entity_reconciliation_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_entity_reconciliation_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_get_entity_reconciliation_job` interceptor runs + before the `post_get_entity_reconciliation_job_with_metadata` interceptor. """ return response + def post_get_entity_reconciliation_job_with_metadata( + self, + response: service.EntityReconciliationJob, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.EntityReconciliationJob, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_entity_reconciliation_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_get_entity_reconciliation_job_with_metadata` + interceptor in new development instead of the `post_get_entity_reconciliation_job` interceptor. + When both interceptors are used, this `post_get_entity_reconciliation_job_with_metadata` interceptor runs after the + `post_get_entity_reconciliation_job` interceptor. The (possibly modified) response returned by + `post_get_entity_reconciliation_job` will be passed to + `post_get_entity_reconciliation_job_with_metadata`. + """ + return response, metadata + def pre_list_entity_reconciliation_jobs( self, request: service.ListEntityReconciliationJobsRequest, @@ -242,12 +292,38 @@ def post_list_entity_reconciliation_jobs( ) -> service.ListEntityReconciliationJobsResponse: """Post-rpc interceptor for list_entity_reconciliation_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_entity_reconciliation_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_list_entity_reconciliation_jobs` interceptor runs + before the `post_list_entity_reconciliation_jobs_with_metadata` interceptor. """ return response + def post_list_entity_reconciliation_jobs_with_metadata( + self, + response: service.ListEntityReconciliationJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListEntityReconciliationJobsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_entity_reconciliation_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_list_entity_reconciliation_jobs_with_metadata` + interceptor in new development instead of the `post_list_entity_reconciliation_jobs` interceptor. + When both interceptors are used, this `post_list_entity_reconciliation_jobs_with_metadata` interceptor runs after the + `post_list_entity_reconciliation_jobs` interceptor. The (possibly modified) response returned by + `post_list_entity_reconciliation_jobs` will be passed to + `post_list_entity_reconciliation_jobs_with_metadata`. + """ + return response, metadata + def pre_lookup( self, request: service.LookupRequest, @@ -263,12 +339,35 @@ def pre_lookup( def post_lookup(self, response: service.LookupResponse) -> service.LookupResponse: """Post-rpc interceptor for lookup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_lookup` interceptor runs + before the `post_lookup_with_metadata` interceptor. """ return response + def post_lookup_with_metadata( + self, + response: service.LookupResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.LookupResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lookup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_lookup_with_metadata` + interceptor in new development instead of the `post_lookup` interceptor. + When both interceptors are used, this `post_lookup_with_metadata` interceptor runs after the + `post_lookup` interceptor. The (possibly modified) response returned by + `post_lookup` will be passed to + `post_lookup_with_metadata`. + """ + return response, metadata + def pre_lookup_public_kg( self, request: service.LookupPublicKgRequest, @@ -286,12 +385,35 @@ def post_lookup_public_kg( ) -> service.LookupPublicKgResponse: """Post-rpc interceptor for lookup_public_kg - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_lookup_public_kg_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_lookup_public_kg` interceptor runs + before the `post_lookup_public_kg_with_metadata` interceptor. """ return response + def post_lookup_public_kg_with_metadata( + self, + response: service.LookupPublicKgResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.LookupPublicKgResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for lookup_public_kg + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_lookup_public_kg_with_metadata` + interceptor in new development instead of the `post_lookup_public_kg` interceptor. + When both interceptors are used, this `post_lookup_public_kg_with_metadata` interceptor runs after the + `post_lookup_public_kg` interceptor. The (possibly modified) response returned by + `post_lookup_public_kg` will be passed to + `post_lookup_public_kg_with_metadata`. + """ + return response, metadata + def pre_search( self, request: service.SearchRequest, @@ -307,12 +429,35 @@ def pre_search( def post_search(self, response: service.SearchResponse) -> service.SearchResponse: """Post-rpc interceptor for search - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_search` interceptor runs + before the `post_search_with_metadata` interceptor. """ return response + def post_search_with_metadata( + self, + response: service.SearchResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.SearchResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_search_with_metadata` + interceptor in new development instead of the `post_search` interceptor. + When both interceptors are used, this `post_search_with_metadata` interceptor runs after the + `post_search` interceptor. The (possibly modified) response returned by + `post_search` will be passed to + `post_search_with_metadata`. + """ + return response, metadata + def pre_search_public_kg( self, request: service.SearchPublicKgRequest, @@ -330,12 +475,35 @@ def post_search_public_kg( ) -> service.SearchPublicKgResponse: """Post-rpc interceptor for search_public_kg - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_public_kg_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EnterpriseKnowledgeGraphService server but before - it is returned to user code. + it is returned to user code. This `post_search_public_kg` interceptor runs + before the `post_search_public_kg_with_metadata` interceptor. """ return response + def post_search_public_kg_with_metadata( + self, + response: service.SearchPublicKgResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.SearchPublicKgResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_public_kg + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EnterpriseKnowledgeGraphService server but before it is returned to user code. + + We recommend only using this `post_search_public_kg_with_metadata` + interceptor in new development instead of the `post_search_public_kg` interceptor. + When both interceptors are used, this `post_search_public_kg_with_metadata` interceptor runs after the + `post_search_public_kg` interceptor. The (possibly modified) response returned by + `post_search_public_kg` will be passed to + `post_search_public_kg_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class EnterpriseKnowledgeGraphServiceRestStub: @@ -676,6 +844,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_entity_reconciliation_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_entity_reconciliation_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -936,6 +1111,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_entity_reconciliation_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_entity_reconciliation_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1085,6 +1267,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_entity_reconciliation_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_entity_reconciliation_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1233,6 +1422,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1379,6 +1572,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_lookup_public_kg(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lookup_public_kg_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1525,6 +1722,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1671,6 +1872,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_public_kg(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_public_kg_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json b/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json index 4e07ff14c814..9902f695486b 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json +++ b/packages/google-cloud-enterpriseknowledgegraph/samples/generated_samples/snippet_metadata_google.cloud.enterpriseknowledgegraph.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-enterpriseknowledgegraph", - "version": "0.3.14" + "version": "0.3.15" }, "snippets": [ { diff --git a/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py b/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py index b5e9bfca2832..4968b334a5b3 100644 --- a/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py +++ b/packages/google-cloud-enterpriseknowledgegraph/tests/unit/gapic/enterpriseknowledgegraph_v1/test_enterprise_knowledge_graph_service.py @@ -66,6 +66,13 @@ ) from google.cloud.enterpriseknowledgegraph_v1.types import job_state, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -360,6 +367,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EnterpriseKnowledgeGraphServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EnterpriseKnowledgeGraphServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7035,11 +7085,15 @@ def test_create_entity_reconciliation_job_rest_interceptors(null_interceptor): transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_create_entity_reconciliation_job", ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_create_entity_reconciliation_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_create_entity_reconciliation_job", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateEntityReconciliationJobRequest.pb( service.CreateEntityReconciliationJobRequest() ) @@ -7065,6 +7119,7 @@ def test_create_entity_reconciliation_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.EntityReconciliationJob() + post_with_metadata.return_value = service.EntityReconciliationJob(), metadata client.create_entity_reconciliation_job( request, @@ -7076,6 +7131,7 @@ def test_create_entity_reconciliation_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_entity_reconciliation_job_rest_bad_request( @@ -7167,11 +7223,15 @@ def test_get_entity_reconciliation_job_rest_interceptors(null_interceptor): transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_get_entity_reconciliation_job", ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_get_entity_reconciliation_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_get_entity_reconciliation_job", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetEntityReconciliationJobRequest.pb( service.GetEntityReconciliationJobRequest() ) @@ -7197,6 +7257,7 @@ def test_get_entity_reconciliation_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.EntityReconciliationJob() + post_with_metadata.return_value = service.EntityReconciliationJob(), metadata client.get_entity_reconciliation_job( request, @@ -7208,6 +7269,7 @@ def test_get_entity_reconciliation_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_entity_reconciliation_jobs_rest_bad_request( @@ -7293,11 +7355,15 @@ def test_list_entity_reconciliation_jobs_rest_interceptors(null_interceptor): transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_list_entity_reconciliation_jobs", ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_list_entity_reconciliation_jobs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_list_entity_reconciliation_jobs", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListEntityReconciliationJobsRequest.pb( service.ListEntityReconciliationJobsRequest() ) @@ -7323,6 +7389,10 @@ def test_list_entity_reconciliation_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListEntityReconciliationJobsResponse() + post_with_metadata.return_value = ( + service.ListEntityReconciliationJobsResponse(), + metadata, + ) client.list_entity_reconciliation_jobs( request, @@ -7334,6 +7404,7 @@ def test_list_entity_reconciliation_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_entity_reconciliation_job_rest_bad_request( @@ -7641,10 +7712,14 @@ def test_lookup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_lookup" ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_lookup_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_lookup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.LookupRequest.pb(service.LookupRequest()) transcode.return_value = { "method": "post", @@ -7666,6 +7741,7 @@ def test_lookup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.LookupResponse() + post_with_metadata.return_value = service.LookupResponse(), metadata client.lookup( request, @@ -7677,6 +7753,7 @@ def test_lookup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_rest_bad_request(request_type=service.SearchRequest): @@ -7756,10 +7833,14 @@ def test_search_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_search" ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_search_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_search" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SearchRequest.pb(service.SearchRequest()) transcode.return_value = { "method": "post", @@ -7781,6 +7862,7 @@ def test_search_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.SearchResponse() + post_with_metadata.return_value = service.SearchResponse(), metadata client.search( request, @@ -7792,6 +7874,7 @@ def test_search_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_lookup_public_kg_rest_bad_request(request_type=service.LookupPublicKgRequest): @@ -7872,11 +7955,15 @@ def test_lookup_public_kg_rest_interceptors(null_interceptor): transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_lookup_public_kg", ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_lookup_public_kg_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_lookup_public_kg", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.LookupPublicKgRequest.pb(service.LookupPublicKgRequest()) transcode.return_value = { "method": "post", @@ -7900,6 +7987,7 @@ def test_lookup_public_kg_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.LookupPublicKgResponse() + post_with_metadata.return_value = service.LookupPublicKgResponse(), metadata client.lookup_public_kg( request, @@ -7911,6 +7999,7 @@ def test_lookup_public_kg_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_public_kg_rest_bad_request(request_type=service.SearchPublicKgRequest): @@ -7991,11 +8080,15 @@ def test_search_public_kg_rest_interceptors(null_interceptor): transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "post_search_public_kg", ) as post, mock.patch.object( + transports.EnterpriseKnowledgeGraphServiceRestInterceptor, + "post_search_public_kg_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EnterpriseKnowledgeGraphServiceRestInterceptor, "pre_search_public_kg", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SearchPublicKgRequest.pb(service.SearchPublicKgRequest()) transcode.return_value = { "method": "post", @@ -8019,6 +8112,7 @@ def test_search_public_kg_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.SearchPublicKgResponse() + post_with_metadata.return_value = service.SearchPublicKgResponse(), metadata client.search_public_kg( request, @@ -8030,6 +8124,7 @@ def test_search_public_kg_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-essential-contacts/CHANGELOG.md b/packages/google-cloud-essential-contacts/CHANGELOG.md index 6a47e17ac529..3a037d1a50b1 100644 --- a/packages/google-cloud-essential-contacts/CHANGELOG.md +++ b/packages/google-cloud-essential-contacts/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-essential-contacts-v1.9.0...google-cloud-essential-contacts-v1.10.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-essential-contacts-v1.8.1...google-cloud-essential-contacts-v1.9.0) (2024-12-12) diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py index a6285d076150..698b0d5817df 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -486,6 +488,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/transports/rest.py b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/transports/rest.py index 26bd3a41ab62..aec636750405 100644 --- a/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/transports/rest.py +++ b/packages/google-cloud-essential-contacts/google/cloud/essential_contacts_v1/services/essential_contacts_service/transports/rest.py @@ -140,12 +140,37 @@ def post_compute_contacts( ) -> service.ComputeContactsResponse: """Post-rpc interceptor for compute_contacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_compute_contacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EssentialContactsService server but before - it is returned to user code. + it is returned to user code. This `post_compute_contacts` interceptor runs + before the `post_compute_contacts_with_metadata` interceptor. """ return response + def post_compute_contacts_with_metadata( + self, + response: service.ComputeContactsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ComputeContactsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for compute_contacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EssentialContactsService server but before it is returned to user code. + + We recommend only using this `post_compute_contacts_with_metadata` + interceptor in new development instead of the `post_compute_contacts` interceptor. + When both interceptors are used, this `post_compute_contacts_with_metadata` interceptor runs after the + `post_compute_contacts` interceptor. The (possibly modified) response returned by + `post_compute_contacts` will be passed to + `post_compute_contacts_with_metadata`. + """ + return response, metadata + def pre_create_contact( self, request: service.CreateContactRequest, @@ -161,12 +186,35 @@ def pre_create_contact( def post_create_contact(self, response: service.Contact) -> service.Contact: """Post-rpc interceptor for create_contact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_contact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EssentialContactsService server but before - it is returned to user code. + it is returned to user code. This `post_create_contact` interceptor runs + before the `post_create_contact_with_metadata` interceptor. """ return response + def post_create_contact_with_metadata( + self, + response: service.Contact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Contact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_contact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EssentialContactsService server but before it is returned to user code. + + We recommend only using this `post_create_contact_with_metadata` + interceptor in new development instead of the `post_create_contact` interceptor. + When both interceptors are used, this `post_create_contact_with_metadata` interceptor runs after the + `post_create_contact` interceptor. The (possibly modified) response returned by + `post_create_contact` will be passed to + `post_create_contact_with_metadata`. + """ + return response, metadata + def pre_delete_contact( self, request: service.DeleteContactRequest, @@ -194,12 +242,35 @@ def pre_get_contact( def post_get_contact(self, response: service.Contact) -> service.Contact: """Post-rpc interceptor for get_contact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_contact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EssentialContactsService server but before - it is returned to user code. + it is returned to user code. This `post_get_contact` interceptor runs + before the `post_get_contact_with_metadata` interceptor. """ return response + def post_get_contact_with_metadata( + self, + response: service.Contact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Contact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_contact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EssentialContactsService server but before it is returned to user code. + + We recommend only using this `post_get_contact_with_metadata` + interceptor in new development instead of the `post_get_contact` interceptor. + When both interceptors are used, this `post_get_contact_with_metadata` interceptor runs after the + `post_get_contact` interceptor. The (possibly modified) response returned by + `post_get_contact` will be passed to + `post_get_contact_with_metadata`. + """ + return response, metadata + def pre_list_contacts( self, request: service.ListContactsRequest, @@ -217,12 +288,35 @@ def post_list_contacts( ) -> service.ListContactsResponse: """Post-rpc interceptor for list_contacts - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_contacts_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EssentialContactsService server but before - it is returned to user code. + it is returned to user code. This `post_list_contacts` interceptor runs + before the `post_list_contacts_with_metadata` interceptor. """ return response + def post_list_contacts_with_metadata( + self, + response: service.ListContactsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListContactsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_contacts + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EssentialContactsService server but before it is returned to user code. + + We recommend only using this `post_list_contacts_with_metadata` + interceptor in new development instead of the `post_list_contacts` interceptor. + When both interceptors are used, this `post_list_contacts_with_metadata` interceptor runs after the + `post_list_contacts` interceptor. The (possibly modified) response returned by + `post_list_contacts` will be passed to + `post_list_contacts_with_metadata`. + """ + return response, metadata + def pre_send_test_message( self, request: service.SendTestMessageRequest, @@ -250,12 +344,35 @@ def pre_update_contact( def post_update_contact(self, response: service.Contact) -> service.Contact: """Post-rpc interceptor for update_contact - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_contact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EssentialContactsService server but before - it is returned to user code. + it is returned to user code. This `post_update_contact` interceptor runs + before the `post_update_contact_with_metadata` interceptor. """ return response + def post_update_contact_with_metadata( + self, + response: service.Contact, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.Contact, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_contact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EssentialContactsService server but before it is returned to user code. + + We recommend only using this `post_update_contact_with_metadata` + interceptor in new development instead of the `post_update_contact` interceptor. + When both interceptors are used, this `post_update_contact_with_metadata` interceptor runs after the + `post_update_contact` interceptor. The (possibly modified) response returned by + `post_update_contact` will be passed to + `post_update_contact_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class EssentialContactsServiceRestStub: @@ -468,6 +585,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_compute_contacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_compute_contacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -620,6 +741,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_contact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_contact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -874,6 +999,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_contact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_contact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1020,6 +1149,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_contacts(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_contacts_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1290,6 +1423,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_contact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_contact_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json b/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json index 0c856a9a6e6c..950b2f19aff5 100644 --- a/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json +++ b/packages/google-cloud-essential-contacts/samples/generated_samples/snippet_metadata_google.cloud.essentialcontacts.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-essential-contacts", - "version": "1.9.0" + "version": "1.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py b/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py index 641bd3bb6974..ace4985938b0 100644 --- a/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py +++ b/packages/google-cloud-essential-contacts/tests/unit/gapic/essential_contacts_v1/test_essential_contacts_service.py @@ -62,6 +62,13 @@ ) from google.cloud.essential_contacts_v1.types import enums, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -337,6 +344,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EssentialContactsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EssentialContactsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5599,10 +5649,14 @@ def test_create_contact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "post_create_contact" ) as post, mock.patch.object( + transports.EssentialContactsServiceRestInterceptor, + "post_create_contact_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "pre_create_contact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateContactRequest.pb(service.CreateContactRequest()) transcode.return_value = { "method": "post", @@ -5624,6 +5678,7 @@ def test_create_contact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Contact() + post_with_metadata.return_value = service.Contact(), metadata client.create_contact( request, @@ -5635,6 +5690,7 @@ def test_create_contact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_contact_rest_bad_request(request_type=service.UpdateContactRequest): @@ -5802,10 +5858,14 @@ def test_update_contact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "post_update_contact" ) as post, mock.patch.object( + transports.EssentialContactsServiceRestInterceptor, + "post_update_contact_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "pre_update_contact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateContactRequest.pb(service.UpdateContactRequest()) transcode.return_value = { "method": "post", @@ -5827,6 +5887,7 @@ def test_update_contact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Contact() + post_with_metadata.return_value = service.Contact(), metadata client.update_contact( request, @@ -5838,6 +5899,7 @@ def test_update_contact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_contacts_rest_bad_request(request_type=service.ListContactsRequest): @@ -5920,10 +5982,14 @@ def test_list_contacts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "post_list_contacts" ) as post, mock.patch.object( + transports.EssentialContactsServiceRestInterceptor, + "post_list_contacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "pre_list_contacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListContactsRequest.pb(service.ListContactsRequest()) transcode.return_value = { "method": "post", @@ -5947,6 +6013,7 @@ def test_list_contacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListContactsResponse() + post_with_metadata.return_value = service.ListContactsResponse(), metadata client.list_contacts( request, @@ -5958,6 +6025,7 @@ def test_list_contacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_contact_rest_bad_request(request_type=service.GetContactRequest): @@ -6050,10 +6118,14 @@ def test_get_contact_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "post_get_contact" ) as post, mock.patch.object( + transports.EssentialContactsServiceRestInterceptor, + "post_get_contact_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "pre_get_contact" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetContactRequest.pb(service.GetContactRequest()) transcode.return_value = { "method": "post", @@ -6075,6 +6147,7 @@ def test_get_contact_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Contact() + post_with_metadata.return_value = service.Contact(), metadata client.get_contact( request, @@ -6086,6 +6159,7 @@ def test_get_contact_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_contact_rest_bad_request(request_type=service.DeleteContactRequest): @@ -6273,10 +6347,14 @@ def test_compute_contacts_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "post_compute_contacts" ) as post, mock.patch.object( + transports.EssentialContactsServiceRestInterceptor, + "post_compute_contacts_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EssentialContactsServiceRestInterceptor, "pre_compute_contacts" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ComputeContactsRequest.pb(service.ComputeContactsRequest()) transcode.return_value = { "method": "post", @@ -6300,6 +6378,7 @@ def test_compute_contacts_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ComputeContactsResponse() + post_with_metadata.return_value = service.ComputeContactsResponse(), metadata client.compute_contacts( request, @@ -6311,6 +6390,7 @@ def test_compute_contacts_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_send_test_message_rest_bad_request( diff --git a/packages/google-cloud-eventarc-publishing/CHANGELOG.md b/packages/google-cloud-eventarc-publishing/CHANGELOG.md index 9746329fa9c9..28c1aaf4be47 100644 --- a/packages/google-cloud-eventarc-publishing/CHANGELOG.md +++ b/packages/google-cloud-eventarc-publishing/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.6.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-eventarc-publishing-v0.6.15...google-cloud-eventarc-publishing-v0.6.16) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [0.6.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-eventarc-publishing-v0.6.14...google-cloud-eventarc-publishing-v0.6.15) (2024-12-12) diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py index e51340f75942..a22e7bbe7e4a 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.15" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py index e51340f75942..a22e7bbe7e4a 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.15" # {x-release-please-version} +__version__ = "0.6.16" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py index e0434e6e921d..6384fed5badc 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -487,6 +489,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/transports/rest.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/transports/rest.py index 4157fc7276de..ad92f3d9fcd6 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/services/publisher/transports/rest.py @@ -115,12 +115,35 @@ def post_publish( ) -> publisher.PublishResponse: """Post-rpc interceptor for publish - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_publish_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_publish` interceptor runs + before the `post_publish_with_metadata` interceptor. """ return response + def post_publish_with_metadata( + self, + response: publisher.PublishResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[publisher.PublishResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for publish + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_publish_with_metadata` + interceptor in new development instead of the `post_publish` interceptor. + When both interceptors are used, this `post_publish_with_metadata` interceptor runs after the + `post_publish` interceptor. The (possibly modified) response returned by + `post_publish` will be passed to + `post_publish_with_metadata`. + """ + return response, metadata + def pre_publish_channel_connection_events( self, request: publisher.PublishChannelConnectionEventsRequest, @@ -141,12 +164,38 @@ def post_publish_channel_connection_events( ) -> publisher.PublishChannelConnectionEventsResponse: """Post-rpc interceptor for publish_channel_connection_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_publish_channel_connection_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_publish_channel_connection_events` interceptor runs + before the `post_publish_channel_connection_events_with_metadata` interceptor. """ return response + def post_publish_channel_connection_events_with_metadata( + self, + response: publisher.PublishChannelConnectionEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + publisher.PublishChannelConnectionEventsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for publish_channel_connection_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_publish_channel_connection_events_with_metadata` + interceptor in new development instead of the `post_publish_channel_connection_events` interceptor. + When both interceptors are used, this `post_publish_channel_connection_events_with_metadata` interceptor runs after the + `post_publish_channel_connection_events` interceptor. The (possibly modified) response returned by + `post_publish_channel_connection_events` will be passed to + `post_publish_channel_connection_events_with_metadata`. + """ + return response, metadata + def pre_publish_events( self, request: publisher.PublishEventsRequest, @@ -164,12 +213,37 @@ def post_publish_events( ) -> publisher.PublishEventsResponse: """Post-rpc interceptor for publish_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_publish_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_publish_events` interceptor runs + before the `post_publish_events_with_metadata` interceptor. """ return response + def post_publish_events_with_metadata( + self, + response: publisher.PublishEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + publisher.PublishEventsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for publish_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_publish_events_with_metadata` + interceptor in new development instead of the `post_publish_events` interceptor. + When both interceptors are used, this `post_publish_events_with_metadata` interceptor runs after the + `post_publish_events` interceptor. The (possibly modified) response returned by + `post_publish_events` will be passed to + `post_publish_events_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class PublisherRestStub: @@ -410,6 +484,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_publish(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_publish_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -565,6 +643,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_publish_channel_connection_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_publish_channel_connection_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -724,6 +809,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_publish_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_publish_events_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json b/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json index 418db6cb175e..03f03623b4f5 100644 --- a/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json +++ b/packages/google-cloud-eventarc-publishing/samples/generated_samples/snippet_metadata_google.cloud.eventarc.publishing.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-eventarc-publishing", - "version": "0.6.15" + "version": "0.6.16" }, "snippets": [ { diff --git a/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py b/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py index 55ea48dabf99..b35827aa1335 100644 --- a/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py +++ b/packages/google-cloud-eventarc-publishing/tests/unit/gapic/eventarc_publishing_v1/test_publisher.py @@ -61,6 +61,13 @@ ) from google.cloud.eventarc_publishing_v1.types import cloud_event, publisher +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -294,6 +301,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PublisherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PublisherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2283,10 +2333,14 @@ def test_publish_channel_connection_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_publish_channel_connection_events" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, + "post_publish_channel_connection_events_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_publish_channel_connection_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = publisher.PublishChannelConnectionEventsRequest.pb( publisher.PublishChannelConnectionEventsRequest() ) @@ -2312,6 +2366,10 @@ def test_publish_channel_connection_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = publisher.PublishChannelConnectionEventsResponse() + post_with_metadata.return_value = ( + publisher.PublishChannelConnectionEventsResponse(), + metadata, + ) client.publish_channel_connection_events( request, @@ -2323,6 +2381,7 @@ def test_publish_channel_connection_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_publish_events_rest_bad_request(request_type=publisher.PublishEventsRequest): @@ -2400,10 +2459,13 @@ def test_publish_events_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_publish_events" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish_events_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_publish_events" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = publisher.PublishEventsRequest.pb(publisher.PublishEventsRequest()) transcode.return_value = { "method": "post", @@ -2427,6 +2489,7 @@ def test_publish_events_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = publisher.PublishEventsResponse() + post_with_metadata.return_value = publisher.PublishEventsResponse(), metadata client.publish_events( request, @@ -2438,6 +2501,7 @@ def test_publish_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_publish_rest_bad_request(request_type=publisher.PublishRequest): @@ -2519,10 +2583,13 @@ def test_publish_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_publish" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_publish" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = publisher.PublishRequest.pb(publisher.PublishRequest()) transcode.return_value = { "method": "post", @@ -2544,6 +2611,7 @@ def test_publish_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = publisher.PublishResponse() + post_with_metadata.return_value = publisher.PublishResponse(), metadata client.publish( request, @@ -2555,6 +2623,7 @@ def test_publish_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-eventarc/CHANGELOG.md b/packages/google-cloud-eventarc/CHANGELOG.md index fe85b0ca81c2..b05efbf7e450 100644 --- a/packages/google-cloud-eventarc/CHANGELOG.md +++ b/packages/google-cloud-eventarc/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-eventarc-v1.14.0...google-cloud-eventarc-v1.15.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.14.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-eventarc-v1.13.2...google-cloud-eventarc-v1.14.0) (2024-12-12) diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py b/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py index 2159c8af6f8e..cf18a472a8a2 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.14.0" # {x-release-please-version} +__version__ = "1.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 1f319a69dfd1..77f5a09c4ec4 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -830,6 +832,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -6030,16 +6059,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -6085,16 +6118,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -6317,16 +6354,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -6439,16 +6480,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -6499,16 +6544,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -6554,16 +6603,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -6609,16 +6662,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index f0a0b6ede923..5a40d3b84910 100644 --- a/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/google-cloud-eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -419,12 +419,35 @@ def post_create_channel( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_channel` interceptor runs + before the `post_create_channel_with_metadata` interceptor. """ return response + def post_create_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_channel_with_metadata` + interceptor in new development instead of the `post_create_channel` interceptor. + When both interceptors are used, this `post_create_channel_with_metadata` interceptor runs after the + `post_create_channel` interceptor. The (possibly modified) response returned by + `post_create_channel` will be passed to + `post_create_channel_with_metadata`. + """ + return response, metadata + def pre_create_channel_connection( self, request: eventarc.CreateChannelConnectionRequest, @@ -444,12 +467,35 @@ def post_create_channel_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_channel_connection` interceptor runs + before the `post_create_channel_connection_with_metadata` interceptor. """ return response + def post_create_channel_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_channel_connection_with_metadata` + interceptor in new development instead of the `post_create_channel_connection` interceptor. + When both interceptors are used, this `post_create_channel_connection_with_metadata` interceptor runs after the + `post_create_channel_connection` interceptor. The (possibly modified) response returned by + `post_create_channel_connection` will be passed to + `post_create_channel_connection_with_metadata`. + """ + return response, metadata + def pre_create_enrollment( self, request: eventarc.CreateEnrollmentRequest, @@ -469,12 +515,35 @@ def post_create_enrollment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_enrollment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_enrollment` interceptor runs + before the `post_create_enrollment_with_metadata` interceptor. """ return response + def post_create_enrollment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_enrollment_with_metadata` + interceptor in new development instead of the `post_create_enrollment` interceptor. + When both interceptors are used, this `post_create_enrollment_with_metadata` interceptor runs after the + `post_create_enrollment` interceptor. The (possibly modified) response returned by + `post_create_enrollment` will be passed to + `post_create_enrollment_with_metadata`. + """ + return response, metadata + def pre_create_google_api_source( self, request: eventarc.CreateGoogleApiSourceRequest, @@ -494,12 +563,35 @@ def post_create_google_api_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_google_api_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_google_api_source` interceptor runs + before the `post_create_google_api_source_with_metadata` interceptor. """ return response + def post_create_google_api_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_google_api_source_with_metadata` + interceptor in new development instead of the `post_create_google_api_source` interceptor. + When both interceptors are used, this `post_create_google_api_source_with_metadata` interceptor runs after the + `post_create_google_api_source` interceptor. The (possibly modified) response returned by + `post_create_google_api_source` will be passed to + `post_create_google_api_source_with_metadata`. + """ + return response, metadata + def pre_create_message_bus( self, request: eventarc.CreateMessageBusRequest, @@ -519,12 +611,35 @@ def post_create_message_bus( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_message_bus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_message_bus` interceptor runs + before the `post_create_message_bus_with_metadata` interceptor. """ return response + def post_create_message_bus_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_message_bus_with_metadata` + interceptor in new development instead of the `post_create_message_bus` interceptor. + When both interceptors are used, this `post_create_message_bus_with_metadata` interceptor runs after the + `post_create_message_bus` interceptor. The (possibly modified) response returned by + `post_create_message_bus` will be passed to + `post_create_message_bus_with_metadata`. + """ + return response, metadata + def pre_create_pipeline( self, request: eventarc.CreatePipelineRequest, @@ -542,12 +657,35 @@ def post_create_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_pipeline` interceptor runs + before the `post_create_pipeline_with_metadata` interceptor. """ return response + def post_create_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_pipeline_with_metadata` + interceptor in new development instead of the `post_create_pipeline` interceptor. + When both interceptors are used, this `post_create_pipeline_with_metadata` interceptor runs after the + `post_create_pipeline` interceptor. The (possibly modified) response returned by + `post_create_pipeline` will be passed to + `post_create_pipeline_with_metadata`. + """ + return response, metadata + def pre_create_trigger( self, request: eventarc.CreateTriggerRequest, @@ -565,12 +703,35 @@ def post_create_trigger( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_trigger` interceptor runs + before the `post_create_trigger_with_metadata` interceptor. """ return response + def post_create_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_trigger_with_metadata` + interceptor in new development instead of the `post_create_trigger` interceptor. + When both interceptors are used, this `post_create_trigger_with_metadata` interceptor runs after the + `post_create_trigger` interceptor. The (possibly modified) response returned by + `post_create_trigger` will be passed to + `post_create_trigger_with_metadata`. + """ + return response, metadata + def pre_delete_channel( self, request: eventarc.DeleteChannelRequest, @@ -588,12 +749,35 @@ def post_delete_channel( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_channel` interceptor runs + before the `post_delete_channel_with_metadata` interceptor. """ return response + def post_delete_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_channel_with_metadata` + interceptor in new development instead of the `post_delete_channel` interceptor. + When both interceptors are used, this `post_delete_channel_with_metadata` interceptor runs after the + `post_delete_channel` interceptor. The (possibly modified) response returned by + `post_delete_channel` will be passed to + `post_delete_channel_with_metadata`. + """ + return response, metadata + def pre_delete_channel_connection( self, request: eventarc.DeleteChannelConnectionRequest, @@ -613,12 +797,35 @@ def post_delete_channel_connection( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_channel_connection` interceptor runs + before the `post_delete_channel_connection_with_metadata` interceptor. """ return response + def post_delete_channel_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_channel_connection_with_metadata` + interceptor in new development instead of the `post_delete_channel_connection` interceptor. + When both interceptors are used, this `post_delete_channel_connection_with_metadata` interceptor runs after the + `post_delete_channel_connection` interceptor. The (possibly modified) response returned by + `post_delete_channel_connection` will be passed to + `post_delete_channel_connection_with_metadata`. + """ + return response, metadata + def pre_delete_enrollment( self, request: eventarc.DeleteEnrollmentRequest, @@ -638,12 +845,35 @@ def post_delete_enrollment( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_enrollment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_enrollment` interceptor runs + before the `post_delete_enrollment_with_metadata` interceptor. """ return response + def post_delete_enrollment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_enrollment_with_metadata` + interceptor in new development instead of the `post_delete_enrollment` interceptor. + When both interceptors are used, this `post_delete_enrollment_with_metadata` interceptor runs after the + `post_delete_enrollment` interceptor. The (possibly modified) response returned by + `post_delete_enrollment` will be passed to + `post_delete_enrollment_with_metadata`. + """ + return response, metadata + def pre_delete_google_api_source( self, request: eventarc.DeleteGoogleApiSourceRequest, @@ -663,12 +893,35 @@ def post_delete_google_api_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_google_api_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_google_api_source` interceptor runs + before the `post_delete_google_api_source_with_metadata` interceptor. """ return response + def post_delete_google_api_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_google_api_source_with_metadata` + interceptor in new development instead of the `post_delete_google_api_source` interceptor. + When both interceptors are used, this `post_delete_google_api_source_with_metadata` interceptor runs after the + `post_delete_google_api_source` interceptor. The (possibly modified) response returned by + `post_delete_google_api_source` will be passed to + `post_delete_google_api_source_with_metadata`. + """ + return response, metadata + def pre_delete_message_bus( self, request: eventarc.DeleteMessageBusRequest, @@ -688,12 +941,35 @@ def post_delete_message_bus( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_message_bus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_message_bus` interceptor runs + before the `post_delete_message_bus_with_metadata` interceptor. """ return response + def post_delete_message_bus_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_message_bus_with_metadata` + interceptor in new development instead of the `post_delete_message_bus` interceptor. + When both interceptors are used, this `post_delete_message_bus_with_metadata` interceptor runs after the + `post_delete_message_bus` interceptor. The (possibly modified) response returned by + `post_delete_message_bus` will be passed to + `post_delete_message_bus_with_metadata`. + """ + return response, metadata + def pre_delete_pipeline( self, request: eventarc.DeletePipelineRequest, @@ -711,12 +987,35 @@ def post_delete_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_pipeline` interceptor runs + before the `post_delete_pipeline_with_metadata` interceptor. """ return response + def post_delete_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_pipeline_with_metadata` + interceptor in new development instead of the `post_delete_pipeline` interceptor. + When both interceptors are used, this `post_delete_pipeline_with_metadata` interceptor runs after the + `post_delete_pipeline` interceptor. The (possibly modified) response returned by + `post_delete_pipeline` will be passed to + `post_delete_pipeline_with_metadata`. + """ + return response, metadata + def pre_delete_trigger( self, request: eventarc.DeleteTriggerRequest, @@ -734,12 +1033,35 @@ def post_delete_trigger( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_trigger` interceptor runs + before the `post_delete_trigger_with_metadata` interceptor. """ return response + def post_delete_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_trigger_with_metadata` + interceptor in new development instead of the `post_delete_trigger` interceptor. + When both interceptors are used, this `post_delete_trigger_with_metadata` interceptor runs after the + `post_delete_trigger` interceptor. The (possibly modified) response returned by + `post_delete_trigger` will be passed to + `post_delete_trigger_with_metadata`. + """ + return response, metadata + def pre_get_channel( self, request: eventarc.GetChannelRequest, @@ -755,12 +1077,35 @@ def pre_get_channel( def post_get_channel(self, response: channel.Channel) -> channel.Channel: """Post-rpc interceptor for get_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_channel` interceptor runs + before the `post_get_channel_with_metadata` interceptor. """ return response + def post_get_channel_with_metadata( + self, + response: channel.Channel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_with_metadata` + interceptor in new development instead of the `post_get_channel` interceptor. + When both interceptors are used, this `post_get_channel_with_metadata` interceptor runs after the + `post_get_channel` interceptor. The (possibly modified) response returned by + `post_get_channel` will be passed to + `post_get_channel_with_metadata`. + """ + return response, metadata + def pre_get_channel_connection( self, request: eventarc.GetChannelConnectionRequest, @@ -780,12 +1125,37 @@ def post_get_channel_connection( ) -> channel_connection.ChannelConnection: """Post-rpc interceptor for get_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_channel_connection` interceptor runs + before the `post_get_channel_connection_with_metadata` interceptor. """ return response + def post_get_channel_connection_with_metadata( + self, + response: channel_connection.ChannelConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_connection_with_metadata` + interceptor in new development instead of the `post_get_channel_connection` interceptor. + When both interceptors are used, this `post_get_channel_connection_with_metadata` interceptor runs after the + `post_get_channel_connection` interceptor. The (possibly modified) response returned by + `post_get_channel_connection` will be passed to + `post_get_channel_connection_with_metadata`. + """ + return response, metadata + def pre_get_enrollment( self, request: eventarc.GetEnrollmentRequest, @@ -803,12 +1173,35 @@ def post_get_enrollment( ) -> enrollment.Enrollment: """Post-rpc interceptor for get_enrollment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_enrollment` interceptor runs + before the `post_get_enrollment_with_metadata` interceptor. """ return response + def post_get_enrollment_with_metadata( + self, + response: enrollment.Enrollment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[enrollment.Enrollment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_enrollment_with_metadata` + interceptor in new development instead of the `post_get_enrollment` interceptor. + When both interceptors are used, this `post_get_enrollment_with_metadata` interceptor runs after the + `post_get_enrollment` interceptor. The (possibly modified) response returned by + `post_get_enrollment` will be passed to + `post_get_enrollment_with_metadata`. + """ + return response, metadata + def pre_get_google_api_source( self, request: eventarc.GetGoogleApiSourceRequest, @@ -828,12 +1221,37 @@ def post_get_google_api_source( ) -> google_api_source.GoogleApiSource: """Post-rpc interceptor for get_google_api_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_google_api_source` interceptor runs + before the `post_get_google_api_source_with_metadata` interceptor. """ return response + def post_get_google_api_source_with_metadata( + self, + response: google_api_source.GoogleApiSource, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + google_api_source.GoogleApiSource, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_google_api_source_with_metadata` + interceptor in new development instead of the `post_get_google_api_source` interceptor. + When both interceptors are used, this `post_get_google_api_source_with_metadata` interceptor runs after the + `post_get_google_api_source` interceptor. The (possibly modified) response returned by + `post_get_google_api_source` will be passed to + `post_get_google_api_source_with_metadata`. + """ + return response, metadata + def pre_get_google_channel_config( self, request: eventarc.GetGoogleChannelConfigRequest, @@ -853,12 +1271,38 @@ def post_get_google_channel_config( ) -> google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for get_google_channel_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_google_channel_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_google_channel_config` interceptor runs + before the `post_get_google_channel_config_with_metadata` interceptor. """ return response + def post_get_google_channel_config_with_metadata( + self, + response: google_channel_config.GoogleChannelConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + google_channel_config.GoogleChannelConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_google_channel_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_google_channel_config_with_metadata` + interceptor in new development instead of the `post_get_google_channel_config` interceptor. + When both interceptors are used, this `post_get_google_channel_config_with_metadata` interceptor runs after the + `post_get_google_channel_config` interceptor. The (possibly modified) response returned by + `post_get_google_channel_config` will be passed to + `post_get_google_channel_config_with_metadata`. + """ + return response, metadata + def pre_get_message_bus( self, request: eventarc.GetMessageBusRequest, @@ -876,12 +1320,35 @@ def post_get_message_bus( ) -> message_bus.MessageBus: """Post-rpc interceptor for get_message_bus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_message_bus` interceptor runs + before the `post_get_message_bus_with_metadata` interceptor. """ return response + def post_get_message_bus_with_metadata( + self, + response: message_bus.MessageBus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[message_bus.MessageBus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_message_bus_with_metadata` + interceptor in new development instead of the `post_get_message_bus` interceptor. + When both interceptors are used, this `post_get_message_bus_with_metadata` interceptor runs after the + `post_get_message_bus` interceptor. The (possibly modified) response returned by + `post_get_message_bus` will be passed to + `post_get_message_bus_with_metadata`. + """ + return response, metadata + def pre_get_pipeline( self, request: eventarc.GetPipelineRequest, @@ -897,12 +1364,35 @@ def pre_get_pipeline( def post_get_pipeline(self, response: pipeline.Pipeline) -> pipeline.Pipeline: """Post-rpc interceptor for get_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_pipeline` interceptor runs + before the `post_get_pipeline_with_metadata` interceptor. """ return response + def post_get_pipeline_with_metadata( + self, + response: pipeline.Pipeline, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pipeline.Pipeline, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_pipeline_with_metadata` + interceptor in new development instead of the `post_get_pipeline` interceptor. + When both interceptors are used, this `post_get_pipeline_with_metadata` interceptor runs after the + `post_get_pipeline` interceptor. The (possibly modified) response returned by + `post_get_pipeline` will be passed to + `post_get_pipeline_with_metadata`. + """ + return response, metadata + def pre_get_provider( self, request: eventarc.GetProviderRequest, @@ -918,12 +1408,35 @@ def pre_get_provider( def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: """Post-rpc interceptor for get_provider - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_provider_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_provider` interceptor runs + before the `post_get_provider_with_metadata` interceptor. """ return response + def post_get_provider_with_metadata( + self, + response: discovery.Provider, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_provider + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_provider_with_metadata` + interceptor in new development instead of the `post_get_provider` interceptor. + When both interceptors are used, this `post_get_provider_with_metadata` interceptor runs after the + `post_get_provider` interceptor. The (possibly modified) response returned by + `post_get_provider` will be passed to + `post_get_provider_with_metadata`. + """ + return response, metadata + def pre_get_trigger( self, request: eventarc.GetTriggerRequest, @@ -939,12 +1452,35 @@ def pre_get_trigger( def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """Post-rpc interceptor for get_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_trigger` interceptor runs + before the `post_get_trigger_with_metadata` interceptor. """ return response + def post_get_trigger_with_metadata( + self, + response: trigger.Trigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_trigger_with_metadata` + interceptor in new development instead of the `post_get_trigger` interceptor. + When both interceptors are used, this `post_get_trigger_with_metadata` interceptor runs after the + `post_get_trigger` interceptor. The (possibly modified) response returned by + `post_get_trigger` will be passed to + `post_get_trigger_with_metadata`. + """ + return response, metadata + def pre_list_channel_connections( self, request: eventarc.ListChannelConnectionsRequest, @@ -964,12 +1500,37 @@ def post_list_channel_connections( ) -> eventarc.ListChannelConnectionsResponse: """Post-rpc interceptor for list_channel_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channel_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_channel_connections` interceptor runs + before the `post_list_channel_connections_with_metadata` interceptor. """ return response + def post_list_channel_connections_with_metadata( + self, + response: eventarc.ListChannelConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_channel_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_channel_connections_with_metadata` + interceptor in new development instead of the `post_list_channel_connections` interceptor. + When both interceptors are used, this `post_list_channel_connections_with_metadata` interceptor runs after the + `post_list_channel_connections` interceptor. The (possibly modified) response returned by + `post_list_channel_connections` will be passed to + `post_list_channel_connections_with_metadata`. + """ + return response, metadata + def pre_list_channels( self, request: eventarc.ListChannelsRequest, @@ -987,12 +1548,35 @@ def post_list_channels( ) -> eventarc.ListChannelsResponse: """Post-rpc interceptor for list_channels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_channels` interceptor runs + before the `post_list_channels_with_metadata` interceptor. """ return response + def post_list_channels_with_metadata( + self, + response: eventarc.ListChannelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_channels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_channels_with_metadata` + interceptor in new development instead of the `post_list_channels` interceptor. + When both interceptors are used, this `post_list_channels_with_metadata` interceptor runs after the + `post_list_channels` interceptor. The (possibly modified) response returned by + `post_list_channels` will be passed to + `post_list_channels_with_metadata`. + """ + return response, metadata + def pre_list_enrollments( self, request: eventarc.ListEnrollmentsRequest, @@ -1012,12 +1596,37 @@ def post_list_enrollments( ) -> eventarc.ListEnrollmentsResponse: """Post-rpc interceptor for list_enrollments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_enrollments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_enrollments` interceptor runs + before the `post_list_enrollments_with_metadata` interceptor. """ return response + def post_list_enrollments_with_metadata( + self, + response: eventarc.ListEnrollmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListEnrollmentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_enrollments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_enrollments_with_metadata` + interceptor in new development instead of the `post_list_enrollments` interceptor. + When both interceptors are used, this `post_list_enrollments_with_metadata` interceptor runs after the + `post_list_enrollments` interceptor. The (possibly modified) response returned by + `post_list_enrollments` will be passed to + `post_list_enrollments_with_metadata`. + """ + return response, metadata + def pre_list_google_api_sources( self, request: eventarc.ListGoogleApiSourcesRequest, @@ -1037,12 +1646,37 @@ def post_list_google_api_sources( ) -> eventarc.ListGoogleApiSourcesResponse: """Post-rpc interceptor for list_google_api_sources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_google_api_sources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_google_api_sources` interceptor runs + before the `post_list_google_api_sources_with_metadata` interceptor. """ return response + def post_list_google_api_sources_with_metadata( + self, + response: eventarc.ListGoogleApiSourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListGoogleApiSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_google_api_sources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_google_api_sources_with_metadata` + interceptor in new development instead of the `post_list_google_api_sources` interceptor. + When both interceptors are used, this `post_list_google_api_sources_with_metadata` interceptor runs after the + `post_list_google_api_sources` interceptor. The (possibly modified) response returned by + `post_list_google_api_sources` will be passed to + `post_list_google_api_sources_with_metadata`. + """ + return response, metadata + def pre_list_message_bus_enrollments( self, request: eventarc.ListMessageBusEnrollmentsRequest, @@ -1063,12 +1697,38 @@ def post_list_message_bus_enrollments( ) -> eventarc.ListMessageBusEnrollmentsResponse: """Post-rpc interceptor for list_message_bus_enrollments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_message_bus_enrollments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_message_bus_enrollments` interceptor runs + before the `post_list_message_bus_enrollments_with_metadata` interceptor. """ return response + def post_list_message_bus_enrollments_with_metadata( + self, + response: eventarc.ListMessageBusEnrollmentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListMessageBusEnrollmentsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_message_bus_enrollments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_message_bus_enrollments_with_metadata` + interceptor in new development instead of the `post_list_message_bus_enrollments` interceptor. + When both interceptors are used, this `post_list_message_bus_enrollments_with_metadata` interceptor runs after the + `post_list_message_bus_enrollments` interceptor. The (possibly modified) response returned by + `post_list_message_bus_enrollments` will be passed to + `post_list_message_bus_enrollments_with_metadata`. + """ + return response, metadata + def pre_list_message_buses( self, request: eventarc.ListMessageBusesRequest, @@ -1088,12 +1748,37 @@ def post_list_message_buses( ) -> eventarc.ListMessageBusesResponse: """Post-rpc interceptor for list_message_buses - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_message_buses_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_message_buses` interceptor runs + before the `post_list_message_buses_with_metadata` interceptor. """ return response + def post_list_message_buses_with_metadata( + self, + response: eventarc.ListMessageBusesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + eventarc.ListMessageBusesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_message_buses + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_message_buses_with_metadata` + interceptor in new development instead of the `post_list_message_buses` interceptor. + When both interceptors are used, this `post_list_message_buses_with_metadata` interceptor runs after the + `post_list_message_buses` interceptor. The (possibly modified) response returned by + `post_list_message_buses` will be passed to + `post_list_message_buses_with_metadata`. + """ + return response, metadata + def pre_list_pipelines( self, request: eventarc.ListPipelinesRequest, @@ -1111,12 +1796,35 @@ def post_list_pipelines( ) -> eventarc.ListPipelinesResponse: """Post-rpc interceptor for list_pipelines - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_pipelines_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_pipelines` interceptor runs + before the `post_list_pipelines_with_metadata` interceptor. """ return response + def post_list_pipelines_with_metadata( + self, + response: eventarc.ListPipelinesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListPipelinesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_pipelines + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_pipelines_with_metadata` + interceptor in new development instead of the `post_list_pipelines` interceptor. + When both interceptors are used, this `post_list_pipelines_with_metadata` interceptor runs after the + `post_list_pipelines` interceptor. The (possibly modified) response returned by + `post_list_pipelines` will be passed to + `post_list_pipelines_with_metadata`. + """ + return response, metadata + def pre_list_providers( self, request: eventarc.ListProvidersRequest, @@ -1134,12 +1842,35 @@ def post_list_providers( ) -> eventarc.ListProvidersResponse: """Post-rpc interceptor for list_providers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_providers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_providers` interceptor runs + before the `post_list_providers_with_metadata` interceptor. """ return response + def post_list_providers_with_metadata( + self, + response: eventarc.ListProvidersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_providers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_providers_with_metadata` + interceptor in new development instead of the `post_list_providers` interceptor. + When both interceptors are used, this `post_list_providers_with_metadata` interceptor runs after the + `post_list_providers` interceptor. The (possibly modified) response returned by + `post_list_providers` will be passed to + `post_list_providers_with_metadata`. + """ + return response, metadata + def pre_list_triggers( self, request: eventarc.ListTriggersRequest, @@ -1157,12 +1888,35 @@ def post_list_triggers( ) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_triggers` interceptor runs + before the `post_list_triggers_with_metadata` interceptor. """ return response + def post_list_triggers_with_metadata( + self, + response: eventarc.ListTriggersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_triggers_with_metadata` + interceptor in new development instead of the `post_list_triggers` interceptor. + When both interceptors are used, this `post_list_triggers_with_metadata` interceptor runs after the + `post_list_triggers` interceptor. The (possibly modified) response returned by + `post_list_triggers` will be passed to + `post_list_triggers_with_metadata`. + """ + return response, metadata + def pre_update_channel( self, request: eventarc.UpdateChannelRequest, @@ -1180,12 +1934,35 @@ def post_update_channel( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_channel` interceptor runs + before the `post_update_channel_with_metadata` interceptor. """ return response + def post_update_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_channel_with_metadata` + interceptor in new development instead of the `post_update_channel` interceptor. + When both interceptors are used, this `post_update_channel_with_metadata` interceptor runs after the + `post_update_channel` interceptor. The (possibly modified) response returned by + `post_update_channel` will be passed to + `post_update_channel_with_metadata`. + """ + return response, metadata + def pre_update_enrollment( self, request: eventarc.UpdateEnrollmentRequest, @@ -1205,12 +1982,35 @@ def post_update_enrollment( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_enrollment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_enrollment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_enrollment` interceptor runs + before the `post_update_enrollment_with_metadata` interceptor. """ return response + def post_update_enrollment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_enrollment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_enrollment_with_metadata` + interceptor in new development instead of the `post_update_enrollment` interceptor. + When both interceptors are used, this `post_update_enrollment_with_metadata` interceptor runs after the + `post_update_enrollment` interceptor. The (possibly modified) response returned by + `post_update_enrollment` will be passed to + `post_update_enrollment_with_metadata`. + """ + return response, metadata + def pre_update_google_api_source( self, request: eventarc.UpdateGoogleApiSourceRequest, @@ -1230,12 +2030,35 @@ def post_update_google_api_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_google_api_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_api_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_google_api_source` interceptor runs + before the `post_update_google_api_source_with_metadata` interceptor. """ return response + def post_update_google_api_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_google_api_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_google_api_source_with_metadata` + interceptor in new development instead of the `post_update_google_api_source` interceptor. + When both interceptors are used, this `post_update_google_api_source_with_metadata` interceptor runs after the + `post_update_google_api_source` interceptor. The (possibly modified) response returned by + `post_update_google_api_source` will be passed to + `post_update_google_api_source_with_metadata`. + """ + return response, metadata + def pre_update_google_channel_config( self, request: eventarc.UpdateGoogleChannelConfigRequest, @@ -1256,12 +2079,38 @@ def post_update_google_channel_config( ) -> gce_google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for update_google_channel_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_channel_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_google_channel_config` interceptor runs + before the `post_update_google_channel_config_with_metadata` interceptor. """ return response + def post_update_google_channel_config_with_metadata( + self, + response: gce_google_channel_config.GoogleChannelConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gce_google_channel_config.GoogleChannelConfig, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_google_channel_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_google_channel_config_with_metadata` + interceptor in new development instead of the `post_update_google_channel_config` interceptor. + When both interceptors are used, this `post_update_google_channel_config_with_metadata` interceptor runs after the + `post_update_google_channel_config` interceptor. The (possibly modified) response returned by + `post_update_google_channel_config` will be passed to + `post_update_google_channel_config_with_metadata`. + """ + return response, metadata + def pre_update_message_bus( self, request: eventarc.UpdateMessageBusRequest, @@ -1281,12 +2130,35 @@ def post_update_message_bus( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_message_bus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_message_bus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_message_bus` interceptor runs + before the `post_update_message_bus_with_metadata` interceptor. """ return response + def post_update_message_bus_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_message_bus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_message_bus_with_metadata` + interceptor in new development instead of the `post_update_message_bus` interceptor. + When both interceptors are used, this `post_update_message_bus_with_metadata` interceptor runs after the + `post_update_message_bus` interceptor. The (possibly modified) response returned by + `post_update_message_bus` will be passed to + `post_update_message_bus_with_metadata`. + """ + return response, metadata + def pre_update_pipeline( self, request: eventarc.UpdatePipelineRequest, @@ -1304,12 +2176,35 @@ def post_update_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_pipeline` interceptor runs + before the `post_update_pipeline_with_metadata` interceptor. """ return response + def post_update_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_pipeline_with_metadata` + interceptor in new development instead of the `post_update_pipeline` interceptor. + When both interceptors are used, this `post_update_pipeline_with_metadata` interceptor runs after the + `post_update_pipeline` interceptor. The (possibly modified) response returned by + `post_update_pipeline` will be passed to + `post_update_pipeline_with_metadata`. + """ + return response, metadata + def pre_update_trigger( self, request: eventarc.UpdateTriggerRequest, @@ -1327,12 +2222,35 @@ def post_update_trigger( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_trigger` interceptor runs + before the `post_update_trigger_with_metadata` interceptor. """ return response + def post_update_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_trigger_with_metadata` + interceptor in new development instead of the `post_update_trigger` interceptor. + When both interceptors are used, this `post_update_trigger_with_metadata` interceptor runs after the + `post_update_trigger` interceptor. The (possibly modified) response returned by + `post_update_trigger` will be passed to + `post_update_trigger_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1823,6 +2741,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1973,6 +2895,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2127,6 +3053,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_enrollment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2277,6 +3207,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_google_api_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2431,6 +3365,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_message_bus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2585,6 +3523,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2737,6 +3679,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2883,6 +3829,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3027,6 +3977,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3173,6 +4127,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_enrollment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3317,6 +4275,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_google_api_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3463,6 +4425,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_message_bus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3609,6 +4575,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3755,6 +4725,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3906,6 +4880,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4056,6 +5034,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4209,6 +5191,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_enrollment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4355,6 +5341,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_api_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4508,6 +5498,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_channel_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4663,6 +5657,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_message_bus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4808,6 +5806,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4953,6 +5955,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_provider(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_provider_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5098,6 +6104,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5243,6 +6253,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_channel_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channel_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5388,6 +6402,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_channels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channels_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5535,6 +6553,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_enrollments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_enrollments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5682,6 +6704,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_google_api_sources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_google_api_sources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5830,6 +6856,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_message_bus_enrollments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_message_bus_enrollments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5979,6 +7009,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_message_buses(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_message_buses_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6128,6 +7162,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_pipelines(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_pipelines_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6273,6 +7311,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_providers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_providers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6416,6 +7458,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_triggers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_triggers_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6568,6 +7614,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_channel_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6722,6 +7772,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_enrollment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_enrollment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6872,6 +7926,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_google_api_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_google_api_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7030,6 +8088,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_google_channel_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7186,6 +8248,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_message_bus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_message_bus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7340,6 +8406,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7492,6 +8562,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_trigger_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 5822f5d5795f..0c4167f3cfdb 100644 --- a/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/packages/google-cloud-eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-eventarc", - "version": "1.14.0" + "version": "1.15.0" }, "snippets": [ { diff --git a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 8bbd1174cac8..033a57437c9b 100644 --- a/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/google-cloud-eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -99,6 +99,13 @@ from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -329,6 +336,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EventarcClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EventarcClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -26554,10 +26604,13 @@ def test_get_trigger_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_trigger" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) transcode.return_value = { "method": "post", @@ -26579,6 +26632,7 @@ def test_get_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = trigger.Trigger() + post_with_metadata.return_value = trigger.Trigger(), metadata client.get_trigger( request, @@ -26590,6 +26644,7 @@ def test_get_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): @@ -26672,10 +26727,13 @@ def test_list_triggers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_triggers" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_triggers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_triggers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) transcode.return_value = { "method": "post", @@ -26699,6 +26757,7 @@ def test_list_triggers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListTriggersResponse() + post_with_metadata.return_value = eventarc.ListTriggersResponse(), metadata client.list_triggers( request, @@ -26710,6 +26769,7 @@ def test_list_triggers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): @@ -26894,10 +26954,13 @@ def test_create_trigger_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_trigger" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_create_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) transcode.return_value = { "method": "post", @@ -26919,6 +26982,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_trigger( request, @@ -26930,6 +26994,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): @@ -27118,10 +27183,13 @@ def test_update_trigger_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_update_trigger" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_update_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) transcode.return_value = { "method": "post", @@ -27143,6 +27211,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_trigger( request, @@ -27154,6 +27223,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): @@ -27230,10 +27300,13 @@ def test_delete_trigger_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_trigger" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_trigger" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) transcode.return_value = { "method": "post", @@ -27255,6 +27328,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_trigger( request, @@ -27266,6 +27340,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): @@ -27359,10 +27434,13 @@ def test_get_channel_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_channel" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_channel_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_channel" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) transcode.return_value = { "method": "post", @@ -27384,6 +27462,7 @@ def test_get_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = channel.Channel() + post_with_metadata.return_value = channel.Channel(), metadata client.get_channel( request, @@ -27395,6 +27474,7 @@ def test_get_channel_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): @@ -27477,10 +27557,13 @@ def test_list_channels_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_channels" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_channels_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_channels" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) transcode.return_value = { "method": "post", @@ -27504,6 +27587,7 @@ def test_list_channels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListChannelsResponse() + post_with_metadata.return_value = eventarc.ListChannelsResponse(), metadata client.list_channels( request, @@ -27515,6 +27599,7 @@ def test_list_channels_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): @@ -27670,10 +27755,13 @@ def test_create_channel_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_channel" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_create_channel_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_channel" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) transcode.return_value = { "method": "post", @@ -27695,6 +27783,7 @@ def test_create_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_channel( request, @@ -27706,6 +27795,7 @@ def test_create_channel_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): @@ -27865,10 +27955,13 @@ def test_update_channel_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_update_channel" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_update_channel_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_channel" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) transcode.return_value = { "method": "post", @@ -27890,6 +27983,7 @@ def test_update_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_channel( request, @@ -27901,6 +27995,7 @@ def test_update_channel_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): @@ -27977,10 +28072,13 @@ def test_delete_channel_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_channel" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_channel_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_channel" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) transcode.return_value = { "method": "post", @@ -28002,6 +28100,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_channel( request, @@ -28013,6 +28112,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): @@ -28095,10 +28195,13 @@ def test_get_provider_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_provider" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_provider_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_provider" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) transcode.return_value = { "method": "post", @@ -28120,6 +28223,7 @@ def test_get_provider_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discovery.Provider() + post_with_metadata.return_value = discovery.Provider(), metadata client.get_provider( request, @@ -28131,6 +28235,7 @@ def test_get_provider_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): @@ -28213,10 +28318,13 @@ def test_list_providers_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_providers" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_providers_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_providers" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) transcode.return_value = { "method": "post", @@ -28240,6 +28348,7 @@ def test_list_providers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListProvidersResponse() + post_with_metadata.return_value = eventarc.ListProvidersResponse(), metadata client.list_providers( request, @@ -28251,6 +28360,7 @@ def test_list_providers_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_channel_connection_rest_bad_request( @@ -28343,10 +28453,13 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_channel_connection" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_channel_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_channel_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetChannelConnectionRequest.pb( eventarc.GetChannelConnectionRequest() ) @@ -28372,6 +28485,10 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = channel_connection.ChannelConnection() + post_with_metadata.return_value = ( + channel_connection.ChannelConnection(), + metadata, + ) client.get_channel_connection( request, @@ -28383,6 +28500,7 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_channel_connections_rest_bad_request( @@ -28467,10 +28585,14 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_channel_connections" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_list_channel_connections_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_channel_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListChannelConnectionsRequest.pb( eventarc.ListChannelConnectionsRequest() ) @@ -28496,6 +28618,10 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListChannelConnectionsResponse() + post_with_metadata.return_value = ( + eventarc.ListChannelConnectionsResponse(), + metadata, + ) client.list_channel_connections( request, @@ -28507,6 +28633,7 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_channel_connection_rest_bad_request( @@ -28662,10 +28789,14 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_channel_connection" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_create_channel_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_channel_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateChannelConnectionRequest.pb( eventarc.CreateChannelConnectionRequest() ) @@ -28689,6 +28820,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_channel_connection( request, @@ -28700,6 +28832,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_channel_connection_rest_bad_request( @@ -28782,10 +28915,14 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_channel_connection" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_delete_channel_connection_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_channel_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteChannelConnectionRequest.pb( eventarc.DeleteChannelConnectionRequest() ) @@ -28809,6 +28946,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_channel_connection( request, @@ -28820,6 +28958,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_google_channel_config_rest_bad_request( @@ -28904,10 +29043,14 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_google_channel_config" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_get_google_channel_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_google_channel_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetGoogleChannelConfigRequest.pb( eventarc.GetGoogleChannelConfigRequest() ) @@ -28933,6 +29076,10 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = ( + google_channel_config.GoogleChannelConfig(), + metadata, + ) client.get_google_channel_config( request, @@ -28944,6 +29091,7 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_google_channel_config_rest_bad_request( @@ -29112,10 +29260,14 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_update_google_channel_config" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_update_google_channel_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_google_channel_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb( eventarc.UpdateGoogleChannelConfigRequest() ) @@ -29141,6 +29293,10 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gce_google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = ( + gce_google_channel_config.GoogleChannelConfig(), + metadata, + ) client.update_google_channel_config( request, @@ -29152,6 +29308,7 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_message_bus_rest_bad_request(request_type=eventarc.GetMessageBusRequest): @@ -29240,10 +29397,13 @@ def test_get_message_bus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_message_bus" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_message_bus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_message_bus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetMessageBusRequest.pb(eventarc.GetMessageBusRequest()) transcode.return_value = { "method": "post", @@ -29265,6 +29425,7 @@ def test_get_message_bus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = message_bus.MessageBus() + post_with_metadata.return_value = message_bus.MessageBus(), metadata client.get_message_bus( request, @@ -29276,6 +29437,7 @@ def test_get_message_bus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_message_buses_rest_bad_request( @@ -29360,10 +29522,13 @@ def test_list_message_buses_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_message_buses" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_message_buses_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_message_buses" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListMessageBusesRequest.pb( eventarc.ListMessageBusesRequest() ) @@ -29389,6 +29554,7 @@ def test_list_message_buses_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListMessageBusesResponse() + post_with_metadata.return_value = eventarc.ListMessageBusesResponse(), metadata client.list_message_buses( request, @@ -29400,6 +29566,7 @@ def test_list_message_buses_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_message_bus_enrollments_rest_bad_request( @@ -29486,10 +29653,14 @@ def test_list_message_bus_enrollments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_message_bus_enrollments" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_list_message_bus_enrollments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_message_bus_enrollments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListMessageBusEnrollmentsRequest.pb( eventarc.ListMessageBusEnrollmentsRequest() ) @@ -29515,6 +29686,10 @@ def test_list_message_bus_enrollments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListMessageBusEnrollmentsResponse() + post_with_metadata.return_value = ( + eventarc.ListMessageBusEnrollmentsResponse(), + metadata, + ) client.list_message_bus_enrollments( request, @@ -29526,6 +29701,7 @@ def test_list_message_bus_enrollments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_message_bus_rest_bad_request( @@ -29683,10 +29859,13 @@ def test_create_message_bus_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_message_bus" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_create_message_bus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_message_bus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateMessageBusRequest.pb( eventarc.CreateMessageBusRequest() ) @@ -29710,6 +29889,7 @@ def test_create_message_bus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_message_bus( request, @@ -29721,6 +29901,7 @@ def test_create_message_bus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_message_bus_rest_bad_request( @@ -29886,10 +30067,13 @@ def test_update_message_bus_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_update_message_bus" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_update_message_bus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_message_bus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateMessageBusRequest.pb( eventarc.UpdateMessageBusRequest() ) @@ -29913,6 +30097,7 @@ def test_update_message_bus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_message_bus( request, @@ -29924,6 +30109,7 @@ def test_update_message_bus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_message_bus_rest_bad_request( @@ -30002,10 +30188,13 @@ def test_delete_message_bus_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_message_bus" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_message_bus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_message_bus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteMessageBusRequest.pb( eventarc.DeleteMessageBusRequest() ) @@ -30029,6 +30218,7 @@ def test_delete_message_bus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_message_bus( request, @@ -30040,6 +30230,7 @@ def test_delete_message_bus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_enrollment_rest_bad_request(request_type=eventarc.GetEnrollmentRequest): @@ -30132,10 +30323,13 @@ def test_get_enrollment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_enrollment" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_enrollment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_enrollment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetEnrollmentRequest.pb(eventarc.GetEnrollmentRequest()) transcode.return_value = { "method": "post", @@ -30157,6 +30351,7 @@ def test_get_enrollment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = enrollment.Enrollment() + post_with_metadata.return_value = enrollment.Enrollment(), metadata client.get_enrollment( request, @@ -30168,6 +30363,7 @@ def test_get_enrollment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_enrollments_rest_bad_request( @@ -30252,10 +30448,13 @@ def test_list_enrollments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_enrollments" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_enrollments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_enrollments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListEnrollmentsRequest.pb( eventarc.ListEnrollmentsRequest() ) @@ -30281,6 +30480,7 @@ def test_list_enrollments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListEnrollmentsResponse() + post_with_metadata.return_value = eventarc.ListEnrollmentsResponse(), metadata client.list_enrollments( request, @@ -30292,6 +30492,7 @@ def test_list_enrollments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_enrollment_rest_bad_request( @@ -30450,10 +30651,13 @@ def test_create_enrollment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_enrollment" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_create_enrollment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_enrollment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateEnrollmentRequest.pb( eventarc.CreateEnrollmentRequest() ) @@ -30477,6 +30681,7 @@ def test_create_enrollment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_enrollment( request, @@ -30488,6 +30693,7 @@ def test_create_enrollment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_enrollment_rest_bad_request( @@ -30650,10 +30856,13 @@ def test_update_enrollment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_update_enrollment" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_update_enrollment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_enrollment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateEnrollmentRequest.pb( eventarc.UpdateEnrollmentRequest() ) @@ -30677,6 +30886,7 @@ def test_update_enrollment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_enrollment( request, @@ -30688,6 +30898,7 @@ def test_update_enrollment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_enrollment_rest_bad_request( @@ -30766,10 +30977,13 @@ def test_delete_enrollment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_enrollment" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_enrollment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_enrollment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteEnrollmentRequest.pb( eventarc.DeleteEnrollmentRequest() ) @@ -30793,6 +31007,7 @@ def test_delete_enrollment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_enrollment( request, @@ -30804,6 +31019,7 @@ def test_delete_enrollment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_pipeline_rest_bad_request(request_type=eventarc.GetPipelineRequest): @@ -30892,10 +31108,13 @@ def test_get_pipeline_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_pipeline" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_pipeline_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetPipelineRequest.pb(eventarc.GetPipelineRequest()) transcode.return_value = { "method": "post", @@ -30917,6 +31136,7 @@ def test_get_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pipeline.Pipeline() + post_with_metadata.return_value = pipeline.Pipeline(), metadata client.get_pipeline( request, @@ -30928,6 +31148,7 @@ def test_get_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_pipelines_rest_bad_request(request_type=eventarc.ListPipelinesRequest): @@ -31010,10 +31231,13 @@ def test_list_pipelines_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_pipelines" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_pipelines_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_pipelines" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListPipelinesRequest.pb(eventarc.ListPipelinesRequest()) transcode.return_value = { "method": "post", @@ -31037,6 +31261,7 @@ def test_list_pipelines_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListPipelinesResponse() + post_with_metadata.return_value = eventarc.ListPipelinesResponse(), metadata client.list_pipelines( request, @@ -31048,6 +31273,7 @@ def test_list_pipelines_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_pipeline_rest_bad_request(request_type=eventarc.CreatePipelineRequest): @@ -31243,10 +31469,13 @@ def test_create_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_pipeline" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_create_pipeline_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreatePipelineRequest.pb(eventarc.CreatePipelineRequest()) transcode.return_value = { "method": "post", @@ -31268,6 +31497,7 @@ def test_create_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_pipeline( request, @@ -31279,6 +31509,7 @@ def test_create_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_pipeline_rest_bad_request(request_type=eventarc.UpdatePipelineRequest): @@ -31478,10 +31709,13 @@ def test_update_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_update_pipeline" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_update_pipeline_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdatePipelineRequest.pb(eventarc.UpdatePipelineRequest()) transcode.return_value = { "method": "post", @@ -31503,6 +31737,7 @@ def test_update_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_pipeline( request, @@ -31514,6 +31749,7 @@ def test_update_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_pipeline_rest_bad_request(request_type=eventarc.DeletePipelineRequest): @@ -31590,10 +31826,13 @@ def test_delete_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_pipeline" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_delete_pipeline_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeletePipelineRequest.pb(eventarc.DeletePipelineRequest()) transcode.return_value = { "method": "post", @@ -31615,6 +31854,7 @@ def test_delete_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_pipeline( request, @@ -31626,6 +31866,7 @@ def test_delete_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_google_api_source_rest_bad_request( @@ -31722,10 +31963,13 @@ def test_get_google_api_source_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_get_google_api_source" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_get_google_api_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_get_google_api_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetGoogleApiSourceRequest.pb( eventarc.GetGoogleApiSourceRequest() ) @@ -31751,6 +31995,7 @@ def test_get_google_api_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = google_api_source.GoogleApiSource() + post_with_metadata.return_value = google_api_source.GoogleApiSource(), metadata client.get_google_api_source( request, @@ -31762,6 +32007,7 @@ def test_get_google_api_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_google_api_sources_rest_bad_request( @@ -31846,10 +32092,13 @@ def test_list_google_api_sources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EventarcRestInterceptor, "post_list_google_api_sources" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, "post_list_google_api_sources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_list_google_api_sources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListGoogleApiSourcesRequest.pb( eventarc.ListGoogleApiSourcesRequest() ) @@ -31875,6 +32124,10 @@ def test_list_google_api_sources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListGoogleApiSourcesResponse() + post_with_metadata.return_value = ( + eventarc.ListGoogleApiSourcesResponse(), + metadata, + ) client.list_google_api_sources( request, @@ -31886,6 +32139,7 @@ def test_list_google_api_sources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_google_api_source_rest_bad_request( @@ -32044,10 +32298,14 @@ def test_create_google_api_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_create_google_api_source" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_create_google_api_source_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_create_google_api_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateGoogleApiSourceRequest.pb( eventarc.CreateGoogleApiSourceRequest() ) @@ -32071,6 +32329,7 @@ def test_create_google_api_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_google_api_source( request, @@ -32082,6 +32341,7 @@ def test_create_google_api_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_google_api_source_rest_bad_request( @@ -32248,10 +32508,14 @@ def test_update_google_api_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_update_google_api_source" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_update_google_api_source_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_update_google_api_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateGoogleApiSourceRequest.pb( eventarc.UpdateGoogleApiSourceRequest() ) @@ -32275,6 +32539,7 @@ def test_update_google_api_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_google_api_source( request, @@ -32286,6 +32551,7 @@ def test_update_google_api_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_google_api_source_rest_bad_request( @@ -32368,10 +32634,14 @@ def test_delete_google_api_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.EventarcRestInterceptor, "post_delete_google_api_source" ) as post, mock.patch.object( + transports.EventarcRestInterceptor, + "post_delete_google_api_source_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.EventarcRestInterceptor, "pre_delete_google_api_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteGoogleApiSourceRequest.pb( eventarc.DeleteGoogleApiSourceRequest() ) @@ -32395,6 +32665,7 @@ def test_delete_google_api_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_google_api_source( request, @@ -32406,6 +32677,7 @@ def test_delete_google_api_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-filestore/CHANGELOG.md b/packages/google-cloud-filestore/CHANGELOG.md index 1b707b3e7bd1..14dffdbdeed0 100644 --- a/packages/google-cloud-filestore/CHANGELOG.md +++ b/packages/google-cloud-filestore/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-filestore-v1.11.0...google-cloud-filestore-v1.12.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-filestore-v1.10.1...google-cloud-filestore-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py b/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py +++ b/packages/google-cloud-filestore/google/cloud/filestore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py b/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py +++ b/packages/google-cloud-filestore/google/cloud/filestore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py index 273d0c676af5..ebb46394e114 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py +++ b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -565,6 +567,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2960,16 +2989,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3015,16 +3048,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3181,16 +3218,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3236,16 +3277,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/transports/rest.py b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/transports/rest.py index 6c7a98c40c74..f5e9e508aa19 100644 --- a/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/transports/rest.py +++ b/packages/google-cloud-filestore/google/cloud/filestore_v1/services/cloud_filestore_manager/transports/rest.py @@ -232,12 +232,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: cloud_filestore_service.CreateInstanceRequest, @@ -258,12 +281,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_create_snapshot( self, request: cloud_filestore_service.CreateSnapshotRequest, @@ -284,12 +330,35 @@ def post_create_snapshot( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_create_snapshot` interceptor runs + before the `post_create_snapshot_with_metadata` interceptor. """ return response + def post_create_snapshot_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_create_snapshot_with_metadata` + interceptor in new development instead of the `post_create_snapshot` interceptor. + When both interceptors are used, this `post_create_snapshot_with_metadata` interceptor runs after the + `post_create_snapshot` interceptor. The (possibly modified) response returned by + `post_create_snapshot` will be passed to + `post_create_snapshot_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: cloud_filestore_service.DeleteBackupRequest, @@ -310,12 +379,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: cloud_filestore_service.DeleteInstanceRequest, @@ -336,12 +428,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_delete_snapshot( self, request: cloud_filestore_service.DeleteSnapshotRequest, @@ -362,12 +477,35 @@ def post_delete_snapshot( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_delete_snapshot` interceptor runs + before the `post_delete_snapshot_with_metadata` interceptor. """ return response + def post_delete_snapshot_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_delete_snapshot_with_metadata` + interceptor in new development instead of the `post_delete_snapshot` interceptor. + When both interceptors are used, this `post_delete_snapshot_with_metadata` interceptor runs after the + `post_delete_snapshot` interceptor. The (possibly modified) response returned by + `post_delete_snapshot` will be passed to + `post_delete_snapshot_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: cloud_filestore_service.GetBackupRequest, @@ -388,12 +526,35 @@ def post_get_backup( ) -> cloud_filestore_service.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, + response: cloud_filestore_service.Backup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_filestore_service.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: cloud_filestore_service.GetInstanceRequest, @@ -414,12 +575,37 @@ def post_get_instance( ) -> cloud_filestore_service.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: cloud_filestore_service.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_filestore_service.Instance, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_snapshot( self, request: cloud_filestore_service.GetSnapshotRequest, @@ -440,12 +626,37 @@ def post_get_snapshot( ) -> cloud_filestore_service.Snapshot: """Post-rpc interceptor for get_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_get_snapshot` interceptor runs + before the `post_get_snapshot_with_metadata` interceptor. """ return response + def post_get_snapshot_with_metadata( + self, + response: cloud_filestore_service.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_filestore_service.Snapshot, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_get_snapshot_with_metadata` + interceptor in new development instead of the `post_get_snapshot` interceptor. + When both interceptors are used, this `post_get_snapshot_with_metadata` interceptor runs after the + `post_get_snapshot` interceptor. The (possibly modified) response returned by + `post_get_snapshot` will be passed to + `post_get_snapshot_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: cloud_filestore_service.ListBackupsRequest, @@ -466,12 +677,38 @@ def post_list_backups( ) -> cloud_filestore_service.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: cloud_filestore_service.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_filestore_service.ListBackupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: cloud_filestore_service.ListInstancesRequest, @@ -492,12 +729,38 @@ def post_list_instances( ) -> cloud_filestore_service.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: cloud_filestore_service.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_filestore_service.ListInstancesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_list_snapshots( self, request: cloud_filestore_service.ListSnapshotsRequest, @@ -518,12 +781,38 @@ def post_list_snapshots( ) -> cloud_filestore_service.ListSnapshotsResponse: """Post-rpc interceptor for list_snapshots - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_list_snapshots` interceptor runs + before the `post_list_snapshots_with_metadata` interceptor. """ return response + def post_list_snapshots_with_metadata( + self, + response: cloud_filestore_service.ListSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_filestore_service.ListSnapshotsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_list_snapshots_with_metadata` + interceptor in new development instead of the `post_list_snapshots` interceptor. + When both interceptors are used, this `post_list_snapshots_with_metadata` interceptor runs after the + `post_list_snapshots` interceptor. The (possibly modified) response returned by + `post_list_snapshots` will be passed to + `post_list_snapshots_with_metadata`. + """ + return response, metadata + def pre_restore_instance( self, request: cloud_filestore_service.RestoreInstanceRequest, @@ -544,12 +833,35 @@ def post_restore_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for restore_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_restore_instance` interceptor runs + before the `post_restore_instance_with_metadata` interceptor. """ return response + def post_restore_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_restore_instance_with_metadata` + interceptor in new development instead of the `post_restore_instance` interceptor. + When both interceptors are used, this `post_restore_instance_with_metadata` interceptor runs after the + `post_restore_instance` interceptor. The (possibly modified) response returned by + `post_restore_instance` will be passed to + `post_restore_instance_with_metadata`. + """ + return response, metadata + def pre_revert_instance( self, request: cloud_filestore_service.RevertInstanceRequest, @@ -570,12 +882,35 @@ def post_revert_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for revert_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_revert_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_revert_instance` interceptor runs + before the `post_revert_instance_with_metadata` interceptor. """ return response + def post_revert_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for revert_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_revert_instance_with_metadata` + interceptor in new development instead of the `post_revert_instance` interceptor. + When both interceptors are used, this `post_revert_instance_with_metadata` interceptor runs after the + `post_revert_instance` interceptor. The (possibly modified) response returned by + `post_revert_instance` will be passed to + `post_revert_instance_with_metadata`. + """ + return response, metadata + def pre_update_backup( self, request: cloud_filestore_service.UpdateBackupRequest, @@ -596,12 +931,35 @@ def post_update_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. """ return response + def post_update_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: cloud_filestore_service.UpdateInstanceRequest, @@ -622,12 +980,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_snapshot( self, request: cloud_filestore_service.UpdateSnapshotRequest, @@ -648,12 +1029,35 @@ def post_update_snapshot( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFilestoreManager server but before - it is returned to user code. + it is returned to user code. This `post_update_snapshot` interceptor runs + before the `post_update_snapshot_with_metadata` interceptor. """ return response + def post_update_snapshot_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFilestoreManager server but before it is returned to user code. + + We recommend only using this `post_update_snapshot_with_metadata` + interceptor in new development instead of the `post_update_snapshot` interceptor. + When both interceptors are used, this `post_update_snapshot_with_metadata` interceptor runs after the + `post_update_snapshot` interceptor. The (possibly modified) response returned by + `post_update_snapshot` will be passed to + `post_update_snapshot_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1088,6 +1492,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1237,6 +1645,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1386,6 +1798,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1528,6 +1944,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1671,6 +2091,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1814,6 +2238,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1956,6 +2384,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2098,6 +2530,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2242,6 +2678,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2387,6 +2827,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2532,6 +2976,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2677,6 +3125,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_snapshots_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2833,6 +3285,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2983,6 +3439,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_revert_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_revert_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3132,6 +3592,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3281,6 +3745,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3431,6 +3899,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_snapshot_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json b/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json index 0d0ed54e673b..ec658b504a7a 100644 --- a/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json +++ b/packages/google-cloud-filestore/samples/generated_samples/snippet_metadata_google.cloud.filestore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-filestore", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py b/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py index 12173798f328..c97c064f5c87 100644 --- a/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py +++ b/packages/google-cloud-filestore/tests/unit/gapic/filestore_v1/test_cloud_filestore_manager.py @@ -76,6 +76,13 @@ ) from google.cloud.filestore_v1.types import cloud_filestore_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -351,6 +358,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudFilestoreManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudFilestoreManagerClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -11571,10 +11621,14 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_list_instances_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.ListInstancesRequest.pb( cloud_filestore_service.ListInstancesRequest() ) @@ -11600,6 +11654,10 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_filestore_service.ListInstancesResponse() + post_with_metadata.return_value = ( + cloud_filestore_service.ListInstancesResponse(), + metadata, + ) client.list_instances( request, @@ -11611,6 +11669,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request( @@ -11715,10 +11774,14 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_get_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.GetInstanceRequest.pb( cloud_filestore_service.GetInstanceRequest() ) @@ -11744,6 +11807,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_filestore_service.Instance() + post_with_metadata.return_value = cloud_filestore_service.Instance(), metadata client.get_instance( request, @@ -11755,6 +11819,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request( @@ -11941,10 +12006,14 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_create_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.CreateInstanceRequest.pb( cloud_filestore_service.CreateInstanceRequest() ) @@ -11968,6 +12037,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -11979,6 +12049,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -12169,10 +12240,14 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_update_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.UpdateInstanceRequest.pb( cloud_filestore_service.UpdateInstanceRequest() ) @@ -12196,6 +12271,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -12207,6 +12283,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_instance_rest_bad_request( @@ -12287,10 +12364,14 @@ def test_restore_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_restore_instance" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_restore_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_restore_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.RestoreInstanceRequest.pb( cloud_filestore_service.RestoreInstanceRequest() ) @@ -12314,6 +12395,7 @@ def test_restore_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.restore_instance( request, @@ -12325,6 +12407,7 @@ def test_restore_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_revert_instance_rest_bad_request( @@ -12405,10 +12488,14 @@ def test_revert_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_revert_instance" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_revert_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_revert_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.RevertInstanceRequest.pb( cloud_filestore_service.RevertInstanceRequest() ) @@ -12432,6 +12519,7 @@ def test_revert_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.revert_instance( request, @@ -12443,6 +12531,7 @@ def test_revert_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -12523,10 +12612,14 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_delete_instance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.DeleteInstanceRequest.pb( cloud_filestore_service.DeleteInstanceRequest() ) @@ -12550,6 +12643,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -12561,6 +12655,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_snapshots_rest_bad_request( @@ -12645,10 +12740,14 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_list_snapshots" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_list_snapshots_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_list_snapshots" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.ListSnapshotsRequest.pb( cloud_filestore_service.ListSnapshotsRequest() ) @@ -12674,6 +12773,10 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_filestore_service.ListSnapshotsResponse() + post_with_metadata.return_value = ( + cloud_filestore_service.ListSnapshotsResponse(), + metadata, + ) client.list_snapshots( request, @@ -12685,6 +12788,7 @@ def test_list_snapshots_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_snapshot_rest_bad_request( @@ -12779,10 +12883,14 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_get_snapshot" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_get_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_get_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.GetSnapshotRequest.pb( cloud_filestore_service.GetSnapshotRequest() ) @@ -12808,6 +12916,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_filestore_service.Snapshot() + post_with_metadata.return_value = cloud_filestore_service.Snapshot(), metadata client.get_snapshot( request, @@ -12819,6 +12928,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_snapshot_rest_bad_request( @@ -12974,10 +13084,14 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_create_snapshot" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_create_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_create_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.CreateSnapshotRequest.pb( cloud_filestore_service.CreateSnapshotRequest() ) @@ -13001,6 +13115,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_snapshot( request, @@ -13012,6 +13127,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_snapshot_rest_bad_request( @@ -13096,10 +13212,14 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_delete_snapshot" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_delete_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_delete_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.DeleteSnapshotRequest.pb( cloud_filestore_service.DeleteSnapshotRequest() ) @@ -13123,6 +13243,7 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_snapshot( request, @@ -13134,6 +13255,7 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_snapshot_rest_bad_request( @@ -13297,10 +13419,14 @@ def test_update_snapshot_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_update_snapshot" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_update_snapshot_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_update_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.UpdateSnapshotRequest.pb( cloud_filestore_service.UpdateSnapshotRequest() ) @@ -13324,6 +13450,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_snapshot( request, @@ -13335,6 +13462,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request( @@ -13421,10 +13549,14 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_list_backups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.ListBackupsRequest.pb( cloud_filestore_service.ListBackupsRequest() ) @@ -13450,6 +13582,10 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_filestore_service.ListBackupsResponse() + post_with_metadata.return_value = ( + cloud_filestore_service.ListBackupsResponse(), + metadata, + ) client.list_backups( request, @@ -13461,6 +13597,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request( @@ -13567,10 +13704,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.GetBackupRequest.pb( cloud_filestore_service.GetBackupRequest() ) @@ -13596,6 +13736,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_filestore_service.Backup() + post_with_metadata.return_value = cloud_filestore_service.Backup(), metadata client.get_backup( request, @@ -13607,6 +13748,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request( @@ -13770,10 +13912,14 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_create_backup_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.CreateBackupRequest.pb( cloud_filestore_service.CreateBackupRequest() ) @@ -13797,6 +13943,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -13808,6 +13955,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request( @@ -13888,10 +14036,14 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_delete_backup_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.DeleteBackupRequest.pb( cloud_filestore_service.DeleteBackupRequest() ) @@ -13915,6 +14067,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -13926,6 +14079,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_rest_bad_request( @@ -14093,10 +14247,14 @@ def test_update_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "post_update_backup" ) as post, mock.patch.object( + transports.CloudFilestoreManagerRestInterceptor, + "post_update_backup_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFilestoreManagerRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_filestore_service.UpdateBackupRequest.pb( cloud_filestore_service.UpdateBackupRequest() ) @@ -14120,6 +14278,7 @@ def test_update_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup( request, @@ -14131,6 +14290,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-functions/CHANGELOG.md b/packages/google-cloud-functions/CHANGELOG.md index 0055f6e0f4f5..42162abe5c6c 100644 --- a/packages/google-cloud-functions/CHANGELOG.md +++ b/packages/google-cloud-functions/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.20.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-functions-v1.19.0...google-cloud-functions-v1.20.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.19.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-functions-v1.18.1...google-cloud-functions-v1.19.0) (2024-12-12) diff --git a/packages/google-cloud-functions/google/cloud/functions/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions/gapic_version.py index f1337c609ff8..c8ba2b4c6a4f 100644 --- a/packages/google-cloud-functions/google/cloud/functions/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "1.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py index f1337c609ff8..c8ba2b4c6a4f 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "1.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py index 338ce446332c..ddc39973f00f 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -546,6 +548,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2025,16 +2054,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2080,16 +2113,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2135,16 +2172,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py index 1fa2cb722fd6..15860d47d143 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py +++ b/packages/google-cloud-functions/google/cloud/functions_v1/services/cloud_functions_service/transports/rest.py @@ -183,12 +183,35 @@ def post_call_function( ) -> functions.CallFunctionResponse: """Post-rpc interceptor for call_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_call_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_call_function` interceptor runs + before the `post_call_function_with_metadata` interceptor. """ return response + def post_call_function_with_metadata( + self, + response: functions.CallFunctionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[functions.CallFunctionResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for call_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_call_function_with_metadata` + interceptor in new development instead of the `post_call_function` interceptor. + When both interceptors are used, this `post_call_function_with_metadata` interceptor runs after the + `post_call_function` interceptor. The (possibly modified) response returned by + `post_call_function` will be passed to + `post_call_function_with_metadata`. + """ + return response, metadata + def pre_create_function( self, request: functions.CreateFunctionRequest, @@ -208,12 +231,35 @@ def post_create_function( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_create_function` interceptor runs + before the `post_create_function_with_metadata` interceptor. """ return response + def post_create_function_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_create_function_with_metadata` + interceptor in new development instead of the `post_create_function` interceptor. + When both interceptors are used, this `post_create_function_with_metadata` interceptor runs after the + `post_create_function` interceptor. The (possibly modified) response returned by + `post_create_function` will be passed to + `post_create_function_with_metadata`. + """ + return response, metadata + def pre_delete_function( self, request: functions.DeleteFunctionRequest, @@ -233,12 +279,35 @@ def post_delete_function( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_delete_function` interceptor runs + before the `post_delete_function_with_metadata` interceptor. """ return response + def post_delete_function_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_delete_function_with_metadata` + interceptor in new development instead of the `post_delete_function` interceptor. + When both interceptors are used, this `post_delete_function_with_metadata` interceptor runs after the + `post_delete_function` interceptor. The (possibly modified) response returned by + `post_delete_function` will be passed to + `post_delete_function_with_metadata`. + """ + return response, metadata + def pre_generate_download_url( self, request: functions.GenerateDownloadUrlRequest, @@ -258,12 +327,37 @@ def post_generate_download_url( ) -> functions.GenerateDownloadUrlResponse: """Post-rpc interceptor for generate_download_url - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_download_url_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_generate_download_url` interceptor runs + before the `post_generate_download_url_with_metadata` interceptor. """ return response + def post_generate_download_url_with_metadata( + self, + response: functions.GenerateDownloadUrlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + functions.GenerateDownloadUrlResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_download_url + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_generate_download_url_with_metadata` + interceptor in new development instead of the `post_generate_download_url` interceptor. + When both interceptors are used, this `post_generate_download_url_with_metadata` interceptor runs after the + `post_generate_download_url` interceptor. The (possibly modified) response returned by + `post_generate_download_url` will be passed to + `post_generate_download_url_with_metadata`. + """ + return response, metadata + def pre_generate_upload_url( self, request: functions.GenerateUploadUrlRequest, @@ -283,12 +377,37 @@ def post_generate_upload_url( ) -> functions.GenerateUploadUrlResponse: """Post-rpc interceptor for generate_upload_url - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_upload_url_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_generate_upload_url` interceptor runs + before the `post_generate_upload_url_with_metadata` interceptor. """ return response + def post_generate_upload_url_with_metadata( + self, + response: functions.GenerateUploadUrlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + functions.GenerateUploadUrlResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_upload_url + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_generate_upload_url_with_metadata` + interceptor in new development instead of the `post_generate_upload_url` interceptor. + When both interceptors are used, this `post_generate_upload_url_with_metadata` interceptor runs after the + `post_generate_upload_url` interceptor. The (possibly modified) response returned by + `post_generate_upload_url` will be passed to + `post_generate_upload_url_with_metadata`. + """ + return response, metadata + def pre_get_function( self, request: functions.GetFunctionRequest, @@ -306,12 +425,35 @@ def post_get_function( ) -> functions.CloudFunction: """Post-rpc interceptor for get_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_get_function` interceptor runs + before the `post_get_function_with_metadata` interceptor. """ return response + def post_get_function_with_metadata( + self, + response: functions.CloudFunction, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[functions.CloudFunction, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_get_function_with_metadata` + interceptor in new development instead of the `post_get_function` interceptor. + When both interceptors are used, this `post_get_function_with_metadata` interceptor runs after the + `post_get_function` interceptor. The (possibly modified) response returned by + `post_get_function` will be passed to + `post_get_function_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -329,12 +471,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_list_functions( self, request: functions.ListFunctionsRequest, @@ -352,12 +517,37 @@ def post_list_functions( ) -> functions.ListFunctionsResponse: """Post-rpc interceptor for list_functions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_functions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_list_functions` interceptor runs + before the `post_list_functions_with_metadata` interceptor. """ return response + def post_list_functions_with_metadata( + self, + response: functions.ListFunctionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + functions.ListFunctionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_functions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_list_functions_with_metadata` + interceptor in new development instead of the `post_list_functions` interceptor. + When both interceptors are used, this `post_list_functions_with_metadata` interceptor runs after the + `post_list_functions` interceptor. The (possibly modified) response returned by + `post_list_functions` will be passed to + `post_list_functions_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -375,12 +565,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -401,12 +614,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_function( self, request: functions.UpdateFunctionRequest, @@ -426,12 +665,35 @@ def post_update_function( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudFunctionsService server but before - it is returned to user code. + it is returned to user code. This `post_update_function` interceptor runs + before the `post_update_function_with_metadata` interceptor. """ return response + def post_update_function_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudFunctionsService server but before it is returned to user code. + + We recommend only using this `post_update_function_with_metadata` + interceptor in new development instead of the `post_update_function` interceptor. + When both interceptors are used, this `post_update_function_with_metadata` interceptor runs after the + `post_update_function` interceptor. The (possibly modified) response returned by + `post_update_function` will be passed to + `post_update_function_with_metadata`. + """ + return response, metadata + def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, @@ -760,6 +1022,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_call_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_call_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -908,6 +1174,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1050,6 +1320,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1201,6 +1475,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_download_url(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_download_url_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1354,6 +1632,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_upload_url(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_upload_url_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1501,6 +1783,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1717,6 +2003,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1858,6 +2148,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_functions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_functions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2080,6 +2374,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2231,6 +2529,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2379,6 +2681,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py b/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py index f1337c609ff8..c8ba2b4c6a4f 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.19.0" # {x-release-please-version} +__version__ = "1.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py index b64e1bc3073c..863515105970 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -699,6 +701,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1885,16 +1914,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1940,16 +1973,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def set_iam_policy( self, @@ -2061,16 +2098,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2183,16 +2224,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2243,16 +2288,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2298,16 +2347,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/transports/rest.py b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/transports/rest.py index 94b2adba6a04..0d4059bbc2c8 100644 --- a/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/transports/rest.py +++ b/packages/google-cloud-functions/google/cloud/functions_v2/services/function_service/transports/rest.py @@ -161,12 +161,35 @@ def post_create_function( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_create_function` interceptor runs + before the `post_create_function_with_metadata` interceptor. """ return response + def post_create_function_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_create_function_with_metadata` + interceptor in new development instead of the `post_create_function` interceptor. + When both interceptors are used, this `post_create_function_with_metadata` interceptor runs after the + `post_create_function` interceptor. The (possibly modified) response returned by + `post_create_function` will be passed to + `post_create_function_with_metadata`. + """ + return response, metadata + def pre_delete_function( self, request: functions.DeleteFunctionRequest, @@ -186,12 +209,35 @@ def post_delete_function( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_delete_function` interceptor runs + before the `post_delete_function_with_metadata` interceptor. """ return response + def post_delete_function_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_delete_function_with_metadata` + interceptor in new development instead of the `post_delete_function` interceptor. + When both interceptors are used, this `post_delete_function_with_metadata` interceptor runs after the + `post_delete_function` interceptor. The (possibly modified) response returned by + `post_delete_function` will be passed to + `post_delete_function_with_metadata`. + """ + return response, metadata + def pre_generate_download_url( self, request: functions.GenerateDownloadUrlRequest, @@ -211,12 +257,37 @@ def post_generate_download_url( ) -> functions.GenerateDownloadUrlResponse: """Post-rpc interceptor for generate_download_url - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_download_url_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_generate_download_url` interceptor runs + before the `post_generate_download_url_with_metadata` interceptor. """ return response + def post_generate_download_url_with_metadata( + self, + response: functions.GenerateDownloadUrlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + functions.GenerateDownloadUrlResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_download_url + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_generate_download_url_with_metadata` + interceptor in new development instead of the `post_generate_download_url` interceptor. + When both interceptors are used, this `post_generate_download_url_with_metadata` interceptor runs after the + `post_generate_download_url` interceptor. The (possibly modified) response returned by + `post_generate_download_url` will be passed to + `post_generate_download_url_with_metadata`. + """ + return response, metadata + def pre_generate_upload_url( self, request: functions.GenerateUploadUrlRequest, @@ -236,12 +307,37 @@ def post_generate_upload_url( ) -> functions.GenerateUploadUrlResponse: """Post-rpc interceptor for generate_upload_url - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_upload_url_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_generate_upload_url` interceptor runs + before the `post_generate_upload_url_with_metadata` interceptor. """ return response + def post_generate_upload_url_with_metadata( + self, + response: functions.GenerateUploadUrlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + functions.GenerateUploadUrlResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_upload_url + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_generate_upload_url_with_metadata` + interceptor in new development instead of the `post_generate_upload_url` interceptor. + When both interceptors are used, this `post_generate_upload_url_with_metadata` interceptor runs after the + `post_generate_upload_url` interceptor. The (possibly modified) response returned by + `post_generate_upload_url` will be passed to + `post_generate_upload_url_with_metadata`. + """ + return response, metadata + def pre_get_function( self, request: functions.GetFunctionRequest, @@ -257,12 +353,35 @@ def pre_get_function( def post_get_function(self, response: functions.Function) -> functions.Function: """Post-rpc interceptor for get_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_get_function` interceptor runs + before the `post_get_function_with_metadata` interceptor. """ return response + def post_get_function_with_metadata( + self, + response: functions.Function, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[functions.Function, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_get_function_with_metadata` + interceptor in new development instead of the `post_get_function` interceptor. + When both interceptors are used, this `post_get_function_with_metadata` interceptor runs after the + `post_get_function` interceptor. The (possibly modified) response returned by + `post_get_function` will be passed to + `post_get_function_with_metadata`. + """ + return response, metadata + def pre_list_functions( self, request: functions.ListFunctionsRequest, @@ -280,12 +399,37 @@ def post_list_functions( ) -> functions.ListFunctionsResponse: """Post-rpc interceptor for list_functions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_functions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_list_functions` interceptor runs + before the `post_list_functions_with_metadata` interceptor. """ return response + def post_list_functions_with_metadata( + self, + response: functions.ListFunctionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + functions.ListFunctionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_functions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_list_functions_with_metadata` + interceptor in new development instead of the `post_list_functions` interceptor. + When both interceptors are used, this `post_list_functions_with_metadata` interceptor runs after the + `post_list_functions` interceptor. The (possibly modified) response returned by + `post_list_functions` will be passed to + `post_list_functions_with_metadata`. + """ + return response, metadata + def pre_list_runtimes( self, request: functions.ListRuntimesRequest, @@ -303,12 +447,35 @@ def post_list_runtimes( ) -> functions.ListRuntimesResponse: """Post-rpc interceptor for list_runtimes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_runtimes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_list_runtimes` interceptor runs + before the `post_list_runtimes_with_metadata` interceptor. """ return response + def post_list_runtimes_with_metadata( + self, + response: functions.ListRuntimesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[functions.ListRuntimesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_runtimes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_list_runtimes_with_metadata` + interceptor in new development instead of the `post_list_runtimes` interceptor. + When both interceptors are used, this `post_list_runtimes_with_metadata` interceptor runs after the + `post_list_runtimes` interceptor. The (possibly modified) response returned by + `post_list_runtimes` will be passed to + `post_list_runtimes_with_metadata`. + """ + return response, metadata + def pre_update_function( self, request: functions.UpdateFunctionRequest, @@ -328,12 +495,35 @@ def post_update_function( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_function - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_function_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the FunctionService server but before - it is returned to user code. + it is returned to user code. This `post_update_function` interceptor runs + before the `post_update_function_with_metadata` interceptor. """ return response + def post_update_function_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_function + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the FunctionService server but before it is returned to user code. + + We recommend only using this `post_update_function_with_metadata` + interceptor in new development instead of the `post_update_function` interceptor. + When both interceptors are used, this `post_update_function_with_metadata` interceptor runs after the + `post_update_function` interceptor. The (possibly modified) response returned by + `post_update_function` will be passed to + `post_update_function_with_metadata`. + """ + return response, metadata + def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, @@ -738,6 +928,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -879,6 +1073,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1028,6 +1226,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_download_url(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_download_url_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1179,6 +1381,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_upload_url(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_upload_url_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1325,6 +1531,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1465,6 +1675,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_functions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_functions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1605,6 +1819,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_runtimes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_runtimes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1752,6 +1970,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_function(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_function_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json index 23ca7290d59e..51c2aa20b98a 100644 --- a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json +++ b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-functions", - "version": "1.19.0" + "version": "1.20.0" }, "snippets": [ { diff --git a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json index 1ff5f17e7a51..ad3141327543 100644 --- a/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json +++ b/packages/google-cloud-functions/samples/generated_samples/snippet_metadata_google.cloud.functions.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-functions", - "version": "1.19.0" + "version": "1.20.0" }, "snippets": [ { diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py index fe0d1b2df197..c3441a30fd75 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v1/test_cloud_functions_service.py @@ -79,6 +79,13 @@ ) from google.cloud.functions_v1.types import functions, operations +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -354,6 +361,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudFunctionsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudFunctionsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6889,10 +6939,14 @@ def test_list_functions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_list_functions" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_list_functions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_list_functions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.ListFunctionsRequest.pb(functions.ListFunctionsRequest()) transcode.return_value = { "method": "post", @@ -6916,6 +6970,7 @@ def test_list_functions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.ListFunctionsResponse() + post_with_metadata.return_value = functions.ListFunctionsResponse(), metadata client.list_functions( request, @@ -6927,6 +6982,7 @@ def test_list_functions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_function_rest_bad_request(request_type=functions.GetFunctionRequest): @@ -7060,10 +7116,14 @@ def test_get_function_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_get_function" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_get_function_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_get_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.GetFunctionRequest.pb(functions.GetFunctionRequest()) transcode.return_value = { "method": "post", @@ -7085,6 +7145,7 @@ def test_get_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.CloudFunction() + post_with_metadata.return_value = functions.CloudFunction(), metadata client.get_function( request, @@ -7096,6 +7157,7 @@ def test_get_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_function_rest_bad_request(request_type=functions.CreateFunctionRequest): @@ -7298,10 +7360,14 @@ def test_create_function_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_create_function" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_create_function_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_create_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.CreateFunctionRequest.pb( functions.CreateFunctionRequest() ) @@ -7325,6 +7391,7 @@ def test_create_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_function( request, @@ -7336,6 +7403,7 @@ def test_create_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_function_rest_bad_request(request_type=functions.UpdateFunctionRequest): @@ -7542,10 +7610,14 @@ def test_update_function_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_update_function" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_update_function_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_update_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.UpdateFunctionRequest.pb( functions.UpdateFunctionRequest() ) @@ -7569,6 +7641,7 @@ def test_update_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_function( request, @@ -7580,6 +7653,7 @@ def test_update_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_function_rest_bad_request(request_type=functions.DeleteFunctionRequest): @@ -7658,10 +7732,14 @@ def test_delete_function_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_delete_function" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_delete_function_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_delete_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.DeleteFunctionRequest.pb( functions.DeleteFunctionRequest() ) @@ -7685,6 +7763,7 @@ def test_delete_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_function( request, @@ -7696,6 +7775,7 @@ def test_delete_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_call_function_rest_bad_request(request_type=functions.CallFunctionRequest): @@ -7782,10 +7862,14 @@ def test_call_function_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_call_function" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_call_function_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_call_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.CallFunctionRequest.pb(functions.CallFunctionRequest()) transcode.return_value = { "method": "post", @@ -7809,6 +7893,7 @@ def test_call_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.CallFunctionResponse() + post_with_metadata.return_value = functions.CallFunctionResponse(), metadata client.call_function( request, @@ -7820,6 +7905,7 @@ def test_call_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_upload_url_rest_bad_request( @@ -7904,10 +7990,14 @@ def test_generate_upload_url_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_generate_upload_url" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_generate_upload_url_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_generate_upload_url" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.GenerateUploadUrlRequest.pb( functions.GenerateUploadUrlRequest() ) @@ -7933,6 +8023,10 @@ def test_generate_upload_url_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.GenerateUploadUrlResponse() + post_with_metadata.return_value = ( + functions.GenerateUploadUrlResponse(), + metadata, + ) client.generate_upload_url( request, @@ -7944,6 +8038,7 @@ def test_generate_upload_url_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_download_url_rest_bad_request( @@ -8028,10 +8123,14 @@ def test_generate_download_url_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_generate_download_url" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_generate_download_url_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_generate_download_url" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.GenerateDownloadUrlRequest.pb( functions.GenerateDownloadUrlRequest() ) @@ -8057,6 +8156,10 @@ def test_generate_download_url_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.GenerateDownloadUrlResponse() + post_with_metadata.return_value = ( + functions.GenerateDownloadUrlResponse(), + metadata, + ) client.generate_download_url( request, @@ -8068,6 +8171,7 @@ def test_generate_download_url_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_set_iam_policy_rest_bad_request( @@ -8151,10 +8255,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -8176,6 +8284,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -8187,6 +8296,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -8270,10 +8380,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -8295,6 +8409,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -8306,6 +8421,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -8387,10 +8503,14 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( + transports.CloudFunctionsServiceRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudFunctionsServiceRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -8414,6 +8534,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -8425,6 +8549,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_locations_rest_bad_request( diff --git a/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py b/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py index c0d372f29107..4427ef0ac3a5 100644 --- a/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py +++ b/packages/google-cloud-functions/tests/unit/gapic/functions_v2/test_function_service.py @@ -77,6 +77,13 @@ ) from google.cloud.functions_v2.types import functions +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -335,6 +342,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = FunctionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = FunctionServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5836,10 +5886,13 @@ def test_get_function_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FunctionServiceRestInterceptor, "post_get_function" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, "post_get_function_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_get_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.GetFunctionRequest.pb(functions.GetFunctionRequest()) transcode.return_value = { "method": "post", @@ -5861,6 +5914,7 @@ def test_get_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.Function() + post_with_metadata.return_value = functions.Function(), metadata client.get_function( request, @@ -5872,6 +5926,7 @@ def test_get_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_functions_rest_bad_request(request_type=functions.ListFunctionsRequest): @@ -5956,10 +6011,13 @@ def test_list_functions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FunctionServiceRestInterceptor, "post_list_functions" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, "post_list_functions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_list_functions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.ListFunctionsRequest.pb(functions.ListFunctionsRequest()) transcode.return_value = { "method": "post", @@ -5983,6 +6041,7 @@ def test_list_functions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.ListFunctionsResponse() + post_with_metadata.return_value = functions.ListFunctionsResponse(), metadata client.list_functions( request, @@ -5994,6 +6053,7 @@ def test_list_functions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_function_rest_bad_request(request_type=functions.CreateFunctionRequest): @@ -6241,10 +6301,13 @@ def test_create_function_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FunctionServiceRestInterceptor, "post_create_function" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, "post_create_function_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_create_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.CreateFunctionRequest.pb( functions.CreateFunctionRequest() ) @@ -6268,6 +6331,7 @@ def test_create_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_function( request, @@ -6279,6 +6343,7 @@ def test_create_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_function_rest_bad_request(request_type=functions.UpdateFunctionRequest): @@ -6530,10 +6595,13 @@ def test_update_function_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FunctionServiceRestInterceptor, "post_update_function" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, "post_update_function_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_update_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.UpdateFunctionRequest.pb( functions.UpdateFunctionRequest() ) @@ -6557,6 +6625,7 @@ def test_update_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_function( request, @@ -6568,6 +6637,7 @@ def test_update_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_function_rest_bad_request(request_type=functions.DeleteFunctionRequest): @@ -6646,10 +6716,13 @@ def test_delete_function_rest_interceptors(null_interceptor): ), mock.patch.object( transports.FunctionServiceRestInterceptor, "post_delete_function" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, "post_delete_function_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_delete_function" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.DeleteFunctionRequest.pb( functions.DeleteFunctionRequest() ) @@ -6673,6 +6746,7 @@ def test_delete_function_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_function( request, @@ -6684,6 +6758,7 @@ def test_delete_function_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_upload_url_rest_bad_request( @@ -6768,10 +6843,14 @@ def test_generate_upload_url_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FunctionServiceRestInterceptor, "post_generate_upload_url" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, + "post_generate_upload_url_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_generate_upload_url" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.GenerateUploadUrlRequest.pb( functions.GenerateUploadUrlRequest() ) @@ -6797,6 +6876,10 @@ def test_generate_upload_url_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.GenerateUploadUrlResponse() + post_with_metadata.return_value = ( + functions.GenerateUploadUrlResponse(), + metadata, + ) client.generate_upload_url( request, @@ -6808,6 +6891,7 @@ def test_generate_upload_url_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_download_url_rest_bad_request( @@ -6892,10 +6976,14 @@ def test_generate_download_url_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FunctionServiceRestInterceptor, "post_generate_download_url" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, + "post_generate_download_url_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_generate_download_url" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.GenerateDownloadUrlRequest.pb( functions.GenerateDownloadUrlRequest() ) @@ -6921,6 +7009,10 @@ def test_generate_download_url_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.GenerateDownloadUrlResponse() + post_with_metadata.return_value = ( + functions.GenerateDownloadUrlResponse(), + metadata, + ) client.generate_download_url( request, @@ -6932,6 +7024,7 @@ def test_generate_download_url_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_runtimes_rest_bad_request(request_type=functions.ListRuntimesRequest): @@ -7011,10 +7104,13 @@ def test_list_runtimes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.FunctionServiceRestInterceptor, "post_list_runtimes" ) as post, mock.patch.object( + transports.FunctionServiceRestInterceptor, "post_list_runtimes_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.FunctionServiceRestInterceptor, "pre_list_runtimes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = functions.ListRuntimesRequest.pb(functions.ListRuntimesRequest()) transcode.return_value = { "method": "post", @@ -7038,6 +7134,7 @@ def test_list_runtimes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = functions.ListRuntimesResponse() + post_with_metadata.return_value = functions.ListRuntimesResponse(), metadata client.list_runtimes( request, @@ -7049,6 +7146,7 @@ def test_list_runtimes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_locations_rest_bad_request( diff --git a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md index 8554165dd9b4..158fc8c11db4 100644 --- a/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md +++ b/packages/google-cloud-gdchardwaremanagement/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.10](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.9...google-cloud-gdchardwaremanagement-v0.1.10) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [0.1.9](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gdchardwaremanagement-v0.1.8...google-cloud-gdchardwaremanagement-v0.1.9) (2024-12-12) diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py index f8ea948a9c30..9413c3341313 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.9" # {x-release-please-version} +__version__ = "0.1.10" # {x-release-please-version} diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py index 4dc6254247cb..effea1d094ae 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -657,6 +659,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -5316,16 +5345,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -5371,16 +5404,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -5537,16 +5574,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -5592,16 +5633,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py index 3687ef86b9c3..7929bc4eee30 100644 --- a/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py +++ b/packages/google-cloud-gdchardwaremanagement/google/cloud/gdchardwaremanagement_v1alpha/services/gdc_hardware_management/transports/rest.py @@ -373,12 +373,35 @@ def post_create_comment( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_comment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_comment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_create_comment` interceptor runs + before the `post_create_comment_with_metadata` interceptor. """ return response + def post_create_comment_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_comment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_create_comment_with_metadata` + interceptor in new development instead of the `post_create_comment` interceptor. + When both interceptors are used, this `post_create_comment_with_metadata` interceptor runs after the + `post_create_comment` interceptor. The (possibly modified) response returned by + `post_create_comment` will be passed to + `post_create_comment_with_metadata`. + """ + return response, metadata + def pre_create_hardware( self, request: service.CreateHardwareRequest, @@ -396,12 +419,35 @@ def post_create_hardware( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_hardware - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_hardware_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_create_hardware` interceptor runs + before the `post_create_hardware_with_metadata` interceptor. """ return response + def post_create_hardware_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_hardware + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_create_hardware_with_metadata` + interceptor in new development instead of the `post_create_hardware` interceptor. + When both interceptors are used, this `post_create_hardware_with_metadata` interceptor runs after the + `post_create_hardware` interceptor. The (possibly modified) response returned by + `post_create_hardware` will be passed to + `post_create_hardware_with_metadata`. + """ + return response, metadata + def pre_create_hardware_group( self, request: service.CreateHardwareGroupRequest, @@ -421,12 +467,35 @@ def post_create_hardware_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_hardware_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_hardware_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_create_hardware_group` interceptor runs + before the `post_create_hardware_group_with_metadata` interceptor. """ return response + def post_create_hardware_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_hardware_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_create_hardware_group_with_metadata` + interceptor in new development instead of the `post_create_hardware_group` interceptor. + When both interceptors are used, this `post_create_hardware_group_with_metadata` interceptor runs after the + `post_create_hardware_group` interceptor. The (possibly modified) response returned by + `post_create_hardware_group` will be passed to + `post_create_hardware_group_with_metadata`. + """ + return response, metadata + def pre_create_order( self, request: service.CreateOrderRequest, @@ -444,12 +513,35 @@ def post_create_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_create_order` interceptor runs + before the `post_create_order_with_metadata` interceptor. """ return response + def post_create_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_create_order_with_metadata` + interceptor in new development instead of the `post_create_order` interceptor. + When both interceptors are used, this `post_create_order_with_metadata` interceptor runs after the + `post_create_order` interceptor. The (possibly modified) response returned by + `post_create_order` will be passed to + `post_create_order_with_metadata`. + """ + return response, metadata + def pre_create_site( self, request: service.CreateSiteRequest, @@ -467,12 +559,35 @@ def post_create_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_create_site` interceptor runs + before the `post_create_site_with_metadata` interceptor. """ return response + def post_create_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_create_site_with_metadata` + interceptor in new development instead of the `post_create_site` interceptor. + When both interceptors are used, this `post_create_site_with_metadata` interceptor runs after the + `post_create_site` interceptor. The (possibly modified) response returned by + `post_create_site` will be passed to + `post_create_site_with_metadata`. + """ + return response, metadata + def pre_create_zone( self, request: service.CreateZoneRequest, @@ -490,12 +605,35 @@ def post_create_zone( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_zone - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_create_zone` interceptor runs + before the `post_create_zone_with_metadata` interceptor. """ return response + def post_create_zone_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_create_zone_with_metadata` + interceptor in new development instead of the `post_create_zone` interceptor. + When both interceptors are used, this `post_create_zone_with_metadata` interceptor runs after the + `post_create_zone` interceptor. The (possibly modified) response returned by + `post_create_zone` will be passed to + `post_create_zone_with_metadata`. + """ + return response, metadata + def pre_delete_hardware( self, request: service.DeleteHardwareRequest, @@ -513,12 +651,35 @@ def post_delete_hardware( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_hardware - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_hardware_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_delete_hardware` interceptor runs + before the `post_delete_hardware_with_metadata` interceptor. """ return response + def post_delete_hardware_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_hardware + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_delete_hardware_with_metadata` + interceptor in new development instead of the `post_delete_hardware` interceptor. + When both interceptors are used, this `post_delete_hardware_with_metadata` interceptor runs after the + `post_delete_hardware` interceptor. The (possibly modified) response returned by + `post_delete_hardware` will be passed to + `post_delete_hardware_with_metadata`. + """ + return response, metadata + def pre_delete_hardware_group( self, request: service.DeleteHardwareGroupRequest, @@ -538,12 +699,35 @@ def post_delete_hardware_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_hardware_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_hardware_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_delete_hardware_group` interceptor runs + before the `post_delete_hardware_group_with_metadata` interceptor. """ return response + def post_delete_hardware_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_hardware_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_delete_hardware_group_with_metadata` + interceptor in new development instead of the `post_delete_hardware_group` interceptor. + When both interceptors are used, this `post_delete_hardware_group_with_metadata` interceptor runs after the + `post_delete_hardware_group` interceptor. The (possibly modified) response returned by + `post_delete_hardware_group` will be passed to + `post_delete_hardware_group_with_metadata`. + """ + return response, metadata + def pre_delete_order( self, request: service.DeleteOrderRequest, @@ -561,12 +745,35 @@ def post_delete_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_delete_order` interceptor runs + before the `post_delete_order_with_metadata` interceptor. """ return response + def post_delete_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_delete_order_with_metadata` + interceptor in new development instead of the `post_delete_order` interceptor. + When both interceptors are used, this `post_delete_order_with_metadata` interceptor runs after the + `post_delete_order` interceptor. The (possibly modified) response returned by + `post_delete_order` will be passed to + `post_delete_order_with_metadata`. + """ + return response, metadata + def pre_delete_site( self, request: service.DeleteSiteRequest, @@ -584,12 +791,35 @@ def post_delete_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_delete_site` interceptor runs + before the `post_delete_site_with_metadata` interceptor. """ return response + def post_delete_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_delete_site_with_metadata` + interceptor in new development instead of the `post_delete_site` interceptor. + When both interceptors are used, this `post_delete_site_with_metadata` interceptor runs after the + `post_delete_site` interceptor. The (possibly modified) response returned by + `post_delete_site` will be passed to + `post_delete_site_with_metadata`. + """ + return response, metadata + def pre_delete_zone( self, request: service.DeleteZoneRequest, @@ -607,12 +837,35 @@ def post_delete_zone( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_zone - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_delete_zone` interceptor runs + before the `post_delete_zone_with_metadata` interceptor. """ return response + def post_delete_zone_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_delete_zone_with_metadata` + interceptor in new development instead of the `post_delete_zone` interceptor. + When both interceptors are used, this `post_delete_zone_with_metadata` interceptor runs after the + `post_delete_zone` interceptor. The (possibly modified) response returned by + `post_delete_zone` will be passed to + `post_delete_zone_with_metadata`. + """ + return response, metadata + def pre_get_change_log_entry( self, request: service.GetChangeLogEntryRequest, @@ -632,12 +885,35 @@ def post_get_change_log_entry( ) -> resources.ChangeLogEntry: """Post-rpc interceptor for get_change_log_entry - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_change_log_entry_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_change_log_entry` interceptor runs + before the `post_get_change_log_entry_with_metadata` interceptor. """ return response + def post_get_change_log_entry_with_metadata( + self, + response: resources.ChangeLogEntry, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ChangeLogEntry, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_change_log_entry + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_change_log_entry_with_metadata` + interceptor in new development instead of the `post_get_change_log_entry` interceptor. + When both interceptors are used, this `post_get_change_log_entry_with_metadata` interceptor runs after the + `post_get_change_log_entry` interceptor. The (possibly modified) response returned by + `post_get_change_log_entry` will be passed to + `post_get_change_log_entry_with_metadata`. + """ + return response, metadata + def pre_get_comment( self, request: service.GetCommentRequest, @@ -653,12 +929,35 @@ def pre_get_comment( def post_get_comment(self, response: resources.Comment) -> resources.Comment: """Post-rpc interceptor for get_comment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_comment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_comment` interceptor runs + before the `post_get_comment_with_metadata` interceptor. """ return response + def post_get_comment_with_metadata( + self, + response: resources.Comment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Comment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_comment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_comment_with_metadata` + interceptor in new development instead of the `post_get_comment` interceptor. + When both interceptors are used, this `post_get_comment_with_metadata` interceptor runs after the + `post_get_comment` interceptor. The (possibly modified) response returned by + `post_get_comment` will be passed to + `post_get_comment_with_metadata`. + """ + return response, metadata + def pre_get_hardware( self, request: service.GetHardwareRequest, @@ -674,12 +973,35 @@ def pre_get_hardware( def post_get_hardware(self, response: resources.Hardware) -> resources.Hardware: """Post-rpc interceptor for get_hardware - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_hardware_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_hardware` interceptor runs + before the `post_get_hardware_with_metadata` interceptor. """ return response + def post_get_hardware_with_metadata( + self, + response: resources.Hardware, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Hardware, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_hardware + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_hardware_with_metadata` + interceptor in new development instead of the `post_get_hardware` interceptor. + When both interceptors are used, this `post_get_hardware_with_metadata` interceptor runs after the + `post_get_hardware` interceptor. The (possibly modified) response returned by + `post_get_hardware` will be passed to + `post_get_hardware_with_metadata`. + """ + return response, metadata + def pre_get_hardware_group( self, request: service.GetHardwareGroupRequest, @@ -699,12 +1021,35 @@ def post_get_hardware_group( ) -> resources.HardwareGroup: """Post-rpc interceptor for get_hardware_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_hardware_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_hardware_group` interceptor runs + before the `post_get_hardware_group_with_metadata` interceptor. """ return response + def post_get_hardware_group_with_metadata( + self, + response: resources.HardwareGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.HardwareGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_hardware_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_hardware_group_with_metadata` + interceptor in new development instead of the `post_get_hardware_group` interceptor. + When both interceptors are used, this `post_get_hardware_group_with_metadata` interceptor runs after the + `post_get_hardware_group` interceptor. The (possibly modified) response returned by + `post_get_hardware_group` will be passed to + `post_get_hardware_group_with_metadata`. + """ + return response, metadata + def pre_get_order( self, request: service.GetOrderRequest, @@ -720,12 +1065,35 @@ def pre_get_order( def post_get_order(self, response: resources.Order) -> resources.Order: """Post-rpc interceptor for get_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_order` interceptor runs + before the `post_get_order_with_metadata` interceptor. """ return response + def post_get_order_with_metadata( + self, + response: resources.Order, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Order, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_order_with_metadata` + interceptor in new development instead of the `post_get_order` interceptor. + When both interceptors are used, this `post_get_order_with_metadata` interceptor runs after the + `post_get_order` interceptor. The (possibly modified) response returned by + `post_get_order` will be passed to + `post_get_order_with_metadata`. + """ + return response, metadata + def pre_get_site( self, request: service.GetSiteRequest, @@ -741,12 +1109,35 @@ def pre_get_site( def post_get_site(self, response: resources.Site) -> resources.Site: """Post-rpc interceptor for get_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_site` interceptor runs + before the `post_get_site_with_metadata` interceptor. """ return response + def post_get_site_with_metadata( + self, + response: resources.Site, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Site, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_site_with_metadata` + interceptor in new development instead of the `post_get_site` interceptor. + When both interceptors are used, this `post_get_site_with_metadata` interceptor runs after the + `post_get_site` interceptor. The (possibly modified) response returned by + `post_get_site` will be passed to + `post_get_site_with_metadata`. + """ + return response, metadata + def pre_get_sku( self, request: service.GetSkuRequest, @@ -762,12 +1153,33 @@ def pre_get_sku( def post_get_sku(self, response: resources.Sku) -> resources.Sku: """Post-rpc interceptor for get_sku - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_sku_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_sku` interceptor runs + before the `post_get_sku_with_metadata` interceptor. """ return response + def post_get_sku_with_metadata( + self, response: resources.Sku, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[resources.Sku, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_sku + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_sku_with_metadata` + interceptor in new development instead of the `post_get_sku` interceptor. + When both interceptors are used, this `post_get_sku_with_metadata` interceptor runs after the + `post_get_sku` interceptor. The (possibly modified) response returned by + `post_get_sku` will be passed to + `post_get_sku_with_metadata`. + """ + return response, metadata + def pre_get_zone( self, request: service.GetZoneRequest, @@ -783,12 +1195,35 @@ def pre_get_zone( def post_get_zone(self, response: resources.Zone) -> resources.Zone: """Post-rpc interceptor for get_zone - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_get_zone` interceptor runs + before the `post_get_zone_with_metadata` interceptor. """ return response + def post_get_zone_with_metadata( + self, + response: resources.Zone, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Zone, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_get_zone_with_metadata` + interceptor in new development instead of the `post_get_zone` interceptor. + When both interceptors are used, this `post_get_zone_with_metadata` interceptor runs after the + `post_get_zone` interceptor. The (possibly modified) response returned by + `post_get_zone` will be passed to + `post_get_zone_with_metadata`. + """ + return response, metadata + def pre_list_change_log_entries( self, request: service.ListChangeLogEntriesRequest, @@ -808,12 +1243,37 @@ def post_list_change_log_entries( ) -> service.ListChangeLogEntriesResponse: """Post-rpc interceptor for list_change_log_entries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_change_log_entries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_change_log_entries` interceptor runs + before the `post_list_change_log_entries_with_metadata` interceptor. """ return response + def post_list_change_log_entries_with_metadata( + self, + response: service.ListChangeLogEntriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListChangeLogEntriesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_change_log_entries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_change_log_entries_with_metadata` + interceptor in new development instead of the `post_list_change_log_entries` interceptor. + When both interceptors are used, this `post_list_change_log_entries_with_metadata` interceptor runs after the + `post_list_change_log_entries` interceptor. The (possibly modified) response returned by + `post_list_change_log_entries` will be passed to + `post_list_change_log_entries_with_metadata`. + """ + return response, metadata + def pre_list_comments( self, request: service.ListCommentsRequest, @@ -831,12 +1291,35 @@ def post_list_comments( ) -> service.ListCommentsResponse: """Post-rpc interceptor for list_comments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_comments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_comments` interceptor runs + before the `post_list_comments_with_metadata` interceptor. """ return response + def post_list_comments_with_metadata( + self, + response: service.ListCommentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListCommentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_comments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_comments_with_metadata` + interceptor in new development instead of the `post_list_comments` interceptor. + When both interceptors are used, this `post_list_comments_with_metadata` interceptor runs after the + `post_list_comments` interceptor. The (possibly modified) response returned by + `post_list_comments` will be passed to + `post_list_comments_with_metadata`. + """ + return response, metadata + def pre_list_hardware( self, request: service.ListHardwareRequest, @@ -854,12 +1337,35 @@ def post_list_hardware( ) -> service.ListHardwareResponse: """Post-rpc interceptor for list_hardware - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_hardware_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_hardware` interceptor runs + before the `post_list_hardware_with_metadata` interceptor. """ return response + def post_list_hardware_with_metadata( + self, + response: service.ListHardwareResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListHardwareResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_hardware + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_hardware_with_metadata` + interceptor in new development instead of the `post_list_hardware` interceptor. + When both interceptors are used, this `post_list_hardware_with_metadata` interceptor runs after the + `post_list_hardware` interceptor. The (possibly modified) response returned by + `post_list_hardware` will be passed to + `post_list_hardware_with_metadata`. + """ + return response, metadata + def pre_list_hardware_groups( self, request: service.ListHardwareGroupsRequest, @@ -879,12 +1385,37 @@ def post_list_hardware_groups( ) -> service.ListHardwareGroupsResponse: """Post-rpc interceptor for list_hardware_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_hardware_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_hardware_groups` interceptor runs + before the `post_list_hardware_groups_with_metadata` interceptor. """ return response + def post_list_hardware_groups_with_metadata( + self, + response: service.ListHardwareGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListHardwareGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_hardware_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_hardware_groups_with_metadata` + interceptor in new development instead of the `post_list_hardware_groups` interceptor. + When both interceptors are used, this `post_list_hardware_groups_with_metadata` interceptor runs after the + `post_list_hardware_groups` interceptor. The (possibly modified) response returned by + `post_list_hardware_groups` will be passed to + `post_list_hardware_groups_with_metadata`. + """ + return response, metadata + def pre_list_orders( self, request: service.ListOrdersRequest, @@ -902,12 +1433,35 @@ def post_list_orders( ) -> service.ListOrdersResponse: """Post-rpc interceptor for list_orders - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_orders_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_orders` interceptor runs + before the `post_list_orders_with_metadata` interceptor. """ return response + def post_list_orders_with_metadata( + self, + response: service.ListOrdersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListOrdersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_orders + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_orders_with_metadata` + interceptor in new development instead of the `post_list_orders` interceptor. + When both interceptors are used, this `post_list_orders_with_metadata` interceptor runs after the + `post_list_orders` interceptor. The (possibly modified) response returned by + `post_list_orders` will be passed to + `post_list_orders_with_metadata`. + """ + return response, metadata + def pre_list_sites( self, request: service.ListSitesRequest, @@ -925,12 +1479,35 @@ def post_list_sites( ) -> service.ListSitesResponse: """Post-rpc interceptor for list_sites - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sites_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_sites` interceptor runs + before the `post_list_sites_with_metadata` interceptor. """ return response + def post_list_sites_with_metadata( + self, + response: service.ListSitesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListSitesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_sites + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_sites_with_metadata` + interceptor in new development instead of the `post_list_sites` interceptor. + When both interceptors are used, this `post_list_sites_with_metadata` interceptor runs after the + `post_list_sites` interceptor. The (possibly modified) response returned by + `post_list_sites` will be passed to + `post_list_sites_with_metadata`. + """ + return response, metadata + def pre_list_skus( self, request: service.ListSkusRequest, @@ -948,12 +1525,35 @@ def post_list_skus( ) -> service.ListSkusResponse: """Post-rpc interceptor for list_skus - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_skus_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_skus` interceptor runs + before the `post_list_skus_with_metadata` interceptor. """ return response + def post_list_skus_with_metadata( + self, + response: service.ListSkusResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListSkusResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_skus + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_skus_with_metadata` + interceptor in new development instead of the `post_list_skus` interceptor. + When both interceptors are used, this `post_list_skus_with_metadata` interceptor runs after the + `post_list_skus` interceptor. The (possibly modified) response returned by + `post_list_skus` will be passed to + `post_list_skus_with_metadata`. + """ + return response, metadata + def pre_list_zones( self, request: service.ListZonesRequest, @@ -971,12 +1571,35 @@ def post_list_zones( ) -> service.ListZonesResponse: """Post-rpc interceptor for list_zones - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_zones_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_list_zones` interceptor runs + before the `post_list_zones_with_metadata` interceptor. """ return response + def post_list_zones_with_metadata( + self, + response: service.ListZonesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListZonesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_zones + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_list_zones_with_metadata` + interceptor in new development instead of the `post_list_zones` interceptor. + When both interceptors are used, this `post_list_zones_with_metadata` interceptor runs after the + `post_list_zones` interceptor. The (possibly modified) response returned by + `post_list_zones` will be passed to + `post_list_zones_with_metadata`. + """ + return response, metadata + def pre_record_action_on_comment( self, request: service.RecordActionOnCommentRequest, @@ -996,12 +1619,35 @@ def post_record_action_on_comment( ) -> resources.Comment: """Post-rpc interceptor for record_action_on_comment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_record_action_on_comment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_record_action_on_comment` interceptor runs + before the `post_record_action_on_comment_with_metadata` interceptor. """ return response + def post_record_action_on_comment_with_metadata( + self, + response: resources.Comment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Comment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for record_action_on_comment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_record_action_on_comment_with_metadata` + interceptor in new development instead of the `post_record_action_on_comment` interceptor. + When both interceptors are used, this `post_record_action_on_comment_with_metadata` interceptor runs after the + `post_record_action_on_comment` interceptor. The (possibly modified) response returned by + `post_record_action_on_comment` will be passed to + `post_record_action_on_comment_with_metadata`. + """ + return response, metadata + def pre_signal_zone_state( self, request: service.SignalZoneStateRequest, @@ -1019,12 +1665,35 @@ def post_signal_zone_state( ) -> operations_pb2.Operation: """Post-rpc interceptor for signal_zone_state - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_signal_zone_state_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_signal_zone_state` interceptor runs + before the `post_signal_zone_state_with_metadata` interceptor. """ return response + def post_signal_zone_state_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for signal_zone_state + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_signal_zone_state_with_metadata` + interceptor in new development instead of the `post_signal_zone_state` interceptor. + When both interceptors are used, this `post_signal_zone_state_with_metadata` interceptor runs after the + `post_signal_zone_state` interceptor. The (possibly modified) response returned by + `post_signal_zone_state` will be passed to + `post_signal_zone_state_with_metadata`. + """ + return response, metadata + def pre_submit_order( self, request: service.SubmitOrderRequest, @@ -1042,12 +1711,35 @@ def post_submit_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for submit_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_submit_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_submit_order` interceptor runs + before the `post_submit_order_with_metadata` interceptor. """ return response + def post_submit_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for submit_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_submit_order_with_metadata` + interceptor in new development instead of the `post_submit_order` interceptor. + When both interceptors are used, this `post_submit_order_with_metadata` interceptor runs after the + `post_submit_order` interceptor. The (possibly modified) response returned by + `post_submit_order` will be passed to + `post_submit_order_with_metadata`. + """ + return response, metadata + def pre_update_hardware( self, request: service.UpdateHardwareRequest, @@ -1065,12 +1757,35 @@ def post_update_hardware( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_hardware - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_hardware_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_update_hardware` interceptor runs + before the `post_update_hardware_with_metadata` interceptor. """ return response + def post_update_hardware_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_hardware + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_update_hardware_with_metadata` + interceptor in new development instead of the `post_update_hardware` interceptor. + When both interceptors are used, this `post_update_hardware_with_metadata` interceptor runs after the + `post_update_hardware` interceptor. The (possibly modified) response returned by + `post_update_hardware` will be passed to + `post_update_hardware_with_metadata`. + """ + return response, metadata + def pre_update_hardware_group( self, request: service.UpdateHardwareGroupRequest, @@ -1090,12 +1805,35 @@ def post_update_hardware_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_hardware_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_hardware_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_update_hardware_group` interceptor runs + before the `post_update_hardware_group_with_metadata` interceptor. """ return response + def post_update_hardware_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_hardware_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_update_hardware_group_with_metadata` + interceptor in new development instead of the `post_update_hardware_group` interceptor. + When both interceptors are used, this `post_update_hardware_group_with_metadata` interceptor runs after the + `post_update_hardware_group` interceptor. The (possibly modified) response returned by + `post_update_hardware_group` will be passed to + `post_update_hardware_group_with_metadata`. + """ + return response, metadata + def pre_update_order( self, request: service.UpdateOrderRequest, @@ -1113,12 +1851,35 @@ def post_update_order( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_order - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_order_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_update_order` interceptor runs + before the `post_update_order_with_metadata` interceptor. """ return response + def post_update_order_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_order + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_update_order_with_metadata` + interceptor in new development instead of the `post_update_order` interceptor. + When both interceptors are used, this `post_update_order_with_metadata` interceptor runs after the + `post_update_order` interceptor. The (possibly modified) response returned by + `post_update_order` will be passed to + `post_update_order_with_metadata`. + """ + return response, metadata + def pre_update_site( self, request: service.UpdateSiteRequest, @@ -1136,12 +1897,35 @@ def post_update_site( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_site - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_site_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_update_site` interceptor runs + before the `post_update_site_with_metadata` interceptor. """ return response + def post_update_site_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_site + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_update_site_with_metadata` + interceptor in new development instead of the `post_update_site` interceptor. + When both interceptors are used, this `post_update_site_with_metadata` interceptor runs after the + `post_update_site` interceptor. The (possibly modified) response returned by + `post_update_site` will be passed to + `post_update_site_with_metadata`. + """ + return response, metadata + def pre_update_zone( self, request: service.UpdateZoneRequest, @@ -1159,12 +1943,35 @@ def post_update_zone( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_zone - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_zone_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GDCHardwareManagement server but before - it is returned to user code. + it is returned to user code. This `post_update_zone` interceptor runs + before the `post_update_zone_with_metadata` interceptor. """ return response + def post_update_zone_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_zone + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GDCHardwareManagement server but before it is returned to user code. + + We recommend only using this `post_update_zone_with_metadata` + interceptor in new development instead of the `post_update_zone` interceptor. + When both interceptors are used, this `post_update_zone_with_metadata` interceptor runs after the + `post_update_zone` interceptor. The (possibly modified) response returned by + `post_update_zone` will be passed to + `post_update_zone_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1577,6 +2384,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_comment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_comment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1725,6 +2536,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_hardware(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_hardware_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1877,6 +2692,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_hardware_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_hardware_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2025,6 +2844,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2173,6 +2996,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2321,6 +3148,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_zone_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2463,6 +3294,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_hardware(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_hardware_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2609,6 +3444,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_hardware_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_hardware_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2751,6 +3590,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2893,6 +3736,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3035,6 +3882,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_zone_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3182,6 +4033,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_change_log_entry(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_change_log_entry_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3323,6 +4178,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_comment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_comment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3466,6 +4325,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_hardware(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_hardware_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3614,6 +4477,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_hardware_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_hardware_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3755,6 +4622,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3898,6 +4769,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4041,6 +4916,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_sku(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_sku_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4182,6 +5061,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_zone_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4327,6 +5210,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_change_log_entries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_change_log_entries_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4470,6 +5357,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_comments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_comments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4611,6 +5502,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_hardware(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_hardware_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4756,6 +5651,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_hardware_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_hardware_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4899,6 +5798,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_orders(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_orders_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5040,6 +5943,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sites(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sites_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5181,6 +6088,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_skus(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_skus_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5322,6 +6233,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_zones(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_zones_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5474,6 +6389,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_record_action_on_comment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_record_action_on_comment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5627,6 +6546,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_signal_zone_state(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_signal_zone_state_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5775,6 +6698,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_submit_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_submit_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5923,6 +6850,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_hardware(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_hardware_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6075,6 +7006,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_hardware_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_hardware_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6223,6 +7158,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_order(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_order_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6371,6 +7310,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_site(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_site_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6519,6 +7462,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_zone(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_zone_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json index f140f2290e72..61627aa117e3 100644 --- a/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json +++ b/packages/google-cloud-gdchardwaremanagement/samples/generated_samples/snippet_metadata_google.cloud.gdchardwaremanagement.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gdchardwaremanagement", - "version": "0.1.9" + "version": "0.1.10" }, "snippets": [ { diff --git a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py index 7dbb7ddae2a9..f810ff2f674f 100644 --- a/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py +++ b/packages/google-cloud-gdchardwaremanagement/tests/unit/gapic/gdchardwaremanagement_v1alpha/test_gdc_hardware_management.py @@ -79,6 +79,13 @@ ) from google.cloud.gdchardwaremanagement_v1alpha.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -354,6 +361,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GDCHardwareManagementClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GDCHardwareManagementClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -23747,10 +23797,14 @@ def test_list_orders_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_orders" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_list_orders_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_orders" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListOrdersRequest.pb(service.ListOrdersRequest()) transcode.return_value = { "method": "post", @@ -23772,6 +23826,7 @@ def test_list_orders_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListOrdersResponse() + post_with_metadata.return_value = service.ListOrdersResponse(), metadata client.list_orders( request, @@ -23783,6 +23838,7 @@ def test_list_orders_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_order_rest_bad_request(request_type=service.GetOrderRequest): @@ -23881,10 +23937,13 @@ def test_get_order_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_order" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_order_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetOrderRequest.pb(service.GetOrderRequest()) transcode.return_value = { "method": "post", @@ -23906,6 +23965,7 @@ def test_get_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Order() + post_with_metadata.return_value = resources.Order(), metadata client.get_order( request, @@ -23917,6 +23977,7 @@ def test_get_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_order_rest_bad_request(request_type=service.CreateOrderRequest): @@ -24123,10 +24184,14 @@ def test_create_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_create_order" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_create_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_create_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateOrderRequest.pb(service.CreateOrderRequest()) transcode.return_value = { "method": "post", @@ -24148,6 +24213,7 @@ def test_create_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_order( request, @@ -24159,6 +24225,7 @@ def test_create_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_order_rest_bad_request(request_type=service.UpdateOrderRequest): @@ -24369,10 +24436,14 @@ def test_update_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_update_order" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_update_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_update_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateOrderRequest.pb(service.UpdateOrderRequest()) transcode.return_value = { "method": "post", @@ -24394,6 +24465,7 @@ def test_update_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_order( request, @@ -24405,6 +24477,7 @@ def test_update_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_order_rest_bad_request(request_type=service.DeleteOrderRequest): @@ -24483,10 +24556,14 @@ def test_delete_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_delete_order" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_delete_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_delete_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteOrderRequest.pb(service.DeleteOrderRequest()) transcode.return_value = { "method": "post", @@ -24508,6 +24585,7 @@ def test_delete_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_order( request, @@ -24519,6 +24597,7 @@ def test_delete_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_submit_order_rest_bad_request(request_type=service.SubmitOrderRequest): @@ -24597,10 +24676,14 @@ def test_submit_order_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_submit_order" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_submit_order_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_submit_order" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SubmitOrderRequest.pb(service.SubmitOrderRequest()) transcode.return_value = { "method": "post", @@ -24622,6 +24705,7 @@ def test_submit_order_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.submit_order( request, @@ -24633,6 +24717,7 @@ def test_submit_order_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sites_rest_bad_request(request_type=service.ListSitesRequest): @@ -24717,10 +24802,13 @@ def test_list_sites_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_sites" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_sites_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_sites" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListSitesRequest.pb(service.ListSitesRequest()) transcode.return_value = { "method": "post", @@ -24742,6 +24830,7 @@ def test_list_sites_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListSitesResponse() + post_with_metadata.return_value = service.ListSitesResponse(), metadata client.list_sites( request, @@ -24753,6 +24842,7 @@ def test_list_sites_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_site_rest_bad_request(request_type=service.GetSiteRequest): @@ -24845,10 +24935,13 @@ def test_get_site_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_site" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_site_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSiteRequest.pb(service.GetSiteRequest()) transcode.return_value = { "method": "post", @@ -24870,6 +24963,7 @@ def test_get_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Site() + post_with_metadata.return_value = resources.Site(), metadata client.get_site( request, @@ -24881,6 +24975,7 @@ def test_get_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_site_rest_bad_request(request_type=service.CreateSiteRequest): @@ -25076,10 +25171,14 @@ def test_create_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_create_site" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_create_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_create_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateSiteRequest.pb(service.CreateSiteRequest()) transcode.return_value = { "method": "post", @@ -25101,6 +25200,7 @@ def test_create_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_site( request, @@ -25112,6 +25212,7 @@ def test_create_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_site_rest_bad_request(request_type=service.UpdateSiteRequest): @@ -25311,10 +25412,14 @@ def test_update_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_update_site" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_update_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_update_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateSiteRequest.pb(service.UpdateSiteRequest()) transcode.return_value = { "method": "post", @@ -25336,6 +25441,7 @@ def test_update_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_site( request, @@ -25347,6 +25453,7 @@ def test_update_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_site_rest_bad_request(request_type=service.DeleteSiteRequest): @@ -25425,10 +25532,14 @@ def test_delete_site_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_delete_site" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_delete_site_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_delete_site" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteSiteRequest.pb(service.DeleteSiteRequest()) transcode.return_value = { "method": "post", @@ -25450,6 +25561,7 @@ def test_delete_site_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_site( request, @@ -25461,6 +25573,7 @@ def test_delete_site_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_hardware_groups_rest_bad_request( @@ -25547,10 +25660,14 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_hardware_groups" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_list_hardware_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware_groups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListHardwareGroupsRequest.pb( service.ListHardwareGroupsRequest() ) @@ -25576,6 +25693,7 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListHardwareGroupsResponse() + post_with_metadata.return_value = service.ListHardwareGroupsResponse(), metadata client.list_hardware_groups( request, @@ -25587,6 +25705,7 @@ def test_list_hardware_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_hardware_group_rest_bad_request( @@ -25683,10 +25802,14 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_hardware_group" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_get_hardware_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetHardwareGroupRequest.pb( service.GetHardwareGroupRequest() ) @@ -25710,6 +25833,7 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.HardwareGroup() + post_with_metadata.return_value = resources.HardwareGroup(), metadata client.get_hardware_group( request, @@ -25721,6 +25845,7 @@ def test_get_hardware_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_hardware_group_rest_bad_request( @@ -25884,10 +26009,14 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_create_hardware_group" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_create_hardware_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateHardwareGroupRequest.pb( service.CreateHardwareGroupRequest() ) @@ -25911,6 +26040,7 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_hardware_group( request, @@ -25922,6 +26052,7 @@ def test_create_hardware_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_hardware_group_rest_bad_request( @@ -26093,10 +26224,14 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_update_hardware_group" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_update_hardware_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateHardwareGroupRequest.pb( service.UpdateHardwareGroupRequest() ) @@ -26120,6 +26255,7 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_hardware_group( request, @@ -26131,6 +26267,7 @@ def test_update_hardware_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_hardware_group_rest_bad_request( @@ -26215,10 +26352,14 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware_group" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_delete_hardware_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteHardwareGroupRequest.pb( service.DeleteHardwareGroupRequest() ) @@ -26242,6 +26383,7 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_hardware_group( request, @@ -26253,6 +26395,7 @@ def test_delete_hardware_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_hardware_rest_bad_request(request_type=service.ListHardwareRequest): @@ -26337,10 +26480,14 @@ def test_list_hardware_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_hardware" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_list_hardware_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListHardwareRequest.pb(service.ListHardwareRequest()) transcode.return_value = { "method": "post", @@ -26364,6 +26511,7 @@ def test_list_hardware_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListHardwareResponse() + post_with_metadata.return_value = service.ListHardwareResponse(), metadata client.list_hardware( request, @@ -26375,6 +26523,7 @@ def test_list_hardware_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_hardware_rest_bad_request(request_type=service.GetHardwareRequest): @@ -26471,10 +26620,14 @@ def test_get_hardware_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_hardware" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_get_hardware_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetHardwareRequest.pb(service.GetHardwareRequest()) transcode.return_value = { "method": "post", @@ -26496,6 +26649,7 @@ def test_get_hardware_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Hardware() + post_with_metadata.return_value = resources.Hardware(), metadata client.get_hardware( request, @@ -26507,6 +26661,7 @@ def test_get_hardware_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_hardware_rest_bad_request(request_type=service.CreateHardwareRequest): @@ -26714,10 +26869,14 @@ def test_create_hardware_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_create_hardware" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_create_hardware_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_create_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateHardwareRequest.pb(service.CreateHardwareRequest()) transcode.return_value = { "method": "post", @@ -26739,6 +26898,7 @@ def test_create_hardware_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_hardware( request, @@ -26750,6 +26910,7 @@ def test_create_hardware_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_hardware_rest_bad_request(request_type=service.UpdateHardwareRequest): @@ -26961,10 +27122,14 @@ def test_update_hardware_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_update_hardware" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_update_hardware_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_update_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateHardwareRequest.pb(service.UpdateHardwareRequest()) transcode.return_value = { "method": "post", @@ -26986,6 +27151,7 @@ def test_update_hardware_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_hardware( request, @@ -26997,6 +27163,7 @@ def test_update_hardware_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_hardware_rest_bad_request(request_type=service.DeleteHardwareRequest): @@ -27075,10 +27242,14 @@ def test_delete_hardware_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_delete_hardware" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_delete_hardware_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_delete_hardware" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteHardwareRequest.pb(service.DeleteHardwareRequest()) transcode.return_value = { "method": "post", @@ -27100,6 +27271,7 @@ def test_delete_hardware_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_hardware( request, @@ -27111,6 +27283,7 @@ def test_delete_hardware_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_comments_rest_bad_request(request_type=service.ListCommentsRequest): @@ -27195,10 +27368,14 @@ def test_list_comments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_comments" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_list_comments_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_comments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListCommentsRequest.pb(service.ListCommentsRequest()) transcode.return_value = { "method": "post", @@ -27222,6 +27399,7 @@ def test_list_comments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListCommentsResponse() + post_with_metadata.return_value = service.ListCommentsResponse(), metadata client.list_comments( request, @@ -27233,6 +27411,7 @@ def test_list_comments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_comment_rest_bad_request(request_type=service.GetCommentRequest): @@ -27325,10 +27504,14 @@ def test_get_comment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_comment" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_get_comment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_comment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetCommentRequest.pb(service.GetCommentRequest()) transcode.return_value = { "method": "post", @@ -27350,6 +27533,7 @@ def test_get_comment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Comment() + post_with_metadata.return_value = resources.Comment(), metadata client.get_comment( request, @@ -27361,6 +27545,7 @@ def test_get_comment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_comment_rest_bad_request(request_type=service.CreateCommentRequest): @@ -27515,10 +27700,14 @@ def test_create_comment_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_create_comment" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_create_comment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_create_comment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateCommentRequest.pb(service.CreateCommentRequest()) transcode.return_value = { "method": "post", @@ -27540,6 +27729,7 @@ def test_create_comment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_comment( request, @@ -27551,6 +27741,7 @@ def test_create_comment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_record_action_on_comment_rest_bad_request( @@ -27645,10 +27836,14 @@ def test_record_action_on_comment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_record_action_on_comment" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_record_action_on_comment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_record_action_on_comment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RecordActionOnCommentRequest.pb( service.RecordActionOnCommentRequest() ) @@ -27672,6 +27867,7 @@ def test_record_action_on_comment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Comment() + post_with_metadata.return_value = resources.Comment(), metadata client.record_action_on_comment( request, @@ -27683,6 +27879,7 @@ def test_record_action_on_comment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_change_log_entries_rest_bad_request( @@ -27769,10 +27966,14 @@ def test_list_change_log_entries_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_change_log_entries" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_list_change_log_entries_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_change_log_entries" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListChangeLogEntriesRequest.pb( service.ListChangeLogEntriesRequest() ) @@ -27798,6 +27999,10 @@ def test_list_change_log_entries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListChangeLogEntriesResponse() + post_with_metadata.return_value = ( + service.ListChangeLogEntriesResponse(), + metadata, + ) client.list_change_log_entries( request, @@ -27809,6 +28014,7 @@ def test_list_change_log_entries_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_change_log_entry_rest_bad_request( @@ -27899,10 +28105,14 @@ def test_get_change_log_entry_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_change_log_entry" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_get_change_log_entry_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_change_log_entry" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetChangeLogEntryRequest.pb( service.GetChangeLogEntryRequest() ) @@ -27926,6 +28136,7 @@ def test_get_change_log_entry_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ChangeLogEntry() + post_with_metadata.return_value = resources.ChangeLogEntry(), metadata client.get_change_log_entry( request, @@ -27937,6 +28148,7 @@ def test_get_change_log_entry_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_skus_rest_bad_request(request_type=service.ListSkusRequest): @@ -28021,10 +28233,13 @@ def test_list_skus_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_skus" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_skus_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_skus" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListSkusRequest.pb(service.ListSkusRequest()) transcode.return_value = { "method": "post", @@ -28046,6 +28261,7 @@ def test_list_skus_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListSkusResponse() + post_with_metadata.return_value = service.ListSkusResponse(), metadata client.list_skus( request, @@ -28057,6 +28273,7 @@ def test_list_skus_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_sku_rest_bad_request(request_type=service.GetSkuRequest): @@ -28151,10 +28368,13 @@ def test_get_sku_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_sku" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_sku_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_sku" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetSkuRequest.pb(service.GetSkuRequest()) transcode.return_value = { "method": "post", @@ -28176,6 +28396,7 @@ def test_get_sku_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Sku() + post_with_metadata.return_value = resources.Sku(), metadata client.get_sku( request, @@ -28187,6 +28408,7 @@ def test_get_sku_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_zones_rest_bad_request(request_type=service.ListZonesRequest): @@ -28271,10 +28493,13 @@ def test_list_zones_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_list_zones" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_list_zones_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_list_zones" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListZonesRequest.pb(service.ListZonesRequest()) transcode.return_value = { "method": "post", @@ -28296,6 +28521,7 @@ def test_list_zones_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListZonesResponse() + post_with_metadata.return_value = service.ListZonesResponse(), metadata client.list_zones( request, @@ -28307,6 +28533,7 @@ def test_list_zones_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_zone_rest_bad_request(request_type=service.GetZoneRequest): @@ -28402,10 +28629,13 @@ def test_get_zone_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_get_zone" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, "post_get_zone_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_get_zone" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetZoneRequest.pb(service.GetZoneRequest()) transcode.return_value = { "method": "post", @@ -28427,6 +28657,7 @@ def test_get_zone_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Zone() + post_with_metadata.return_value = resources.Zone(), metadata client.get_zone( request, @@ -28438,6 +28669,7 @@ def test_get_zone_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_zone_rest_bad_request(request_type=service.CreateZoneRequest): @@ -28637,10 +28869,14 @@ def test_create_zone_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_create_zone" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_create_zone_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_create_zone" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateZoneRequest.pb(service.CreateZoneRequest()) transcode.return_value = { "method": "post", @@ -28662,6 +28898,7 @@ def test_create_zone_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_zone( request, @@ -28673,6 +28910,7 @@ def test_create_zone_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_zone_rest_bad_request(request_type=service.UpdateZoneRequest): @@ -28876,10 +29114,14 @@ def test_update_zone_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_update_zone" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_update_zone_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_update_zone" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateZoneRequest.pb(service.UpdateZoneRequest()) transcode.return_value = { "method": "post", @@ -28901,6 +29143,7 @@ def test_update_zone_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_zone( request, @@ -28912,6 +29155,7 @@ def test_update_zone_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_zone_rest_bad_request(request_type=service.DeleteZoneRequest): @@ -28990,10 +29234,14 @@ def test_delete_zone_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_delete_zone" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_delete_zone_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_delete_zone" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteZoneRequest.pb(service.DeleteZoneRequest()) transcode.return_value = { "method": "post", @@ -29015,6 +29263,7 @@ def test_delete_zone_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_zone( request, @@ -29026,6 +29275,7 @@ def test_delete_zone_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_signal_zone_state_rest_bad_request( @@ -29106,10 +29356,14 @@ def test_signal_zone_state_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "post_signal_zone_state" ) as post, mock.patch.object( + transports.GDCHardwareManagementRestInterceptor, + "post_signal_zone_state_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GDCHardwareManagementRestInterceptor, "pre_signal_zone_state" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.SignalZoneStateRequest.pb(service.SignalZoneStateRequest()) transcode.return_value = { "method": "post", @@ -29131,6 +29385,7 @@ def test_signal_zone_state_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.signal_zone_state( request, @@ -29142,6 +29397,7 @@ def test_signal_zone_state_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-gke-backup/CHANGELOG.md b/packages/google-cloud-gke-backup/CHANGELOG.md index a11417857d9f..68b1dbbfa5ae 100644 --- a/packages/google-cloud-gke-backup/CHANGELOG.md +++ b/packages/google-cloud-gke-backup/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.5.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-backup-v0.5.14...google-cloud-gke-backup-v0.5.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [0.5.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-backup-v0.5.13...google-cloud-gke-backup-v0.5.14) (2024-12-12) diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py index 0f3dcb10f73a..35c9af734238 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.14" # {x-release-please-version} +__version__ = "0.5.15" # {x-release-please-version} diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py index 81546677aa41..41300044f0b3 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -674,6 +676,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4088,16 +4117,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4143,16 +4176,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -4375,16 +4412,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -4497,16 +4538,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -4557,16 +4602,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -4612,16 +4661,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4667,16 +4720,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/transports/rest.py b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/transports/rest.py index 501b63887084..46690fbf9119 100644 --- a/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/transports/rest.py +++ b/packages/google-cloud-gke-backup/google/cloud/gke_backup_v1/services/backup_for_gke/transports/rest.py @@ -302,12 +302,35 @@ def post_create_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_create_backup` interceptor runs + before the `post_create_backup_with_metadata` interceptor. """ return response + def post_create_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_create_backup_with_metadata` + interceptor in new development instead of the `post_create_backup` interceptor. + When both interceptors are used, this `post_create_backup_with_metadata` interceptor runs after the + `post_create_backup` interceptor. The (possibly modified) response returned by + `post_create_backup` will be passed to + `post_create_backup_with_metadata`. + """ + return response, metadata + def pre_create_backup_plan( self, request: gkebackup.CreateBackupPlanRequest, @@ -327,12 +350,35 @@ def post_create_backup_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_create_backup_plan` interceptor runs + before the `post_create_backup_plan_with_metadata` interceptor. """ return response + def post_create_backup_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_create_backup_plan_with_metadata` + interceptor in new development instead of the `post_create_backup_plan` interceptor. + When both interceptors are used, this `post_create_backup_plan_with_metadata` interceptor runs after the + `post_create_backup_plan` interceptor. The (possibly modified) response returned by + `post_create_backup_plan` will be passed to + `post_create_backup_plan_with_metadata`. + """ + return response, metadata + def pre_create_restore( self, request: gkebackup.CreateRestoreRequest, @@ -350,12 +396,35 @@ def post_create_restore( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_restore - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_restore_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_create_restore` interceptor runs + before the `post_create_restore_with_metadata` interceptor. """ return response + def post_create_restore_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_restore + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_create_restore_with_metadata` + interceptor in new development instead of the `post_create_restore` interceptor. + When both interceptors are used, this `post_create_restore_with_metadata` interceptor runs after the + `post_create_restore` interceptor. The (possibly modified) response returned by + `post_create_restore` will be passed to + `post_create_restore_with_metadata`. + """ + return response, metadata + def pre_create_restore_plan( self, request: gkebackup.CreateRestorePlanRequest, @@ -375,12 +444,35 @@ def post_create_restore_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_restore_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_restore_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_create_restore_plan` interceptor runs + before the `post_create_restore_plan_with_metadata` interceptor. """ return response + def post_create_restore_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_restore_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_create_restore_plan_with_metadata` + interceptor in new development instead of the `post_create_restore_plan` interceptor. + When both interceptors are used, this `post_create_restore_plan_with_metadata` interceptor runs after the + `post_create_restore_plan` interceptor. The (possibly modified) response returned by + `post_create_restore_plan` will be passed to + `post_create_restore_plan_with_metadata`. + """ + return response, metadata + def pre_delete_backup( self, request: gkebackup.DeleteBackupRequest, @@ -398,12 +490,35 @@ def post_delete_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup` interceptor runs + before the `post_delete_backup_with_metadata` interceptor. """ return response + def post_delete_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_delete_backup_with_metadata` + interceptor in new development instead of the `post_delete_backup` interceptor. + When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the + `post_delete_backup` interceptor. The (possibly modified) response returned by + `post_delete_backup` will be passed to + `post_delete_backup_with_metadata`. + """ + return response, metadata + def pre_delete_backup_plan( self, request: gkebackup.DeleteBackupPlanRequest, @@ -423,12 +538,35 @@ def post_delete_backup_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_delete_backup_plan` interceptor runs + before the `post_delete_backup_plan_with_metadata` interceptor. """ return response + def post_delete_backup_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_delete_backup_plan_with_metadata` + interceptor in new development instead of the `post_delete_backup_plan` interceptor. + When both interceptors are used, this `post_delete_backup_plan_with_metadata` interceptor runs after the + `post_delete_backup_plan` interceptor. The (possibly modified) response returned by + `post_delete_backup_plan` will be passed to + `post_delete_backup_plan_with_metadata`. + """ + return response, metadata + def pre_delete_restore( self, request: gkebackup.DeleteRestoreRequest, @@ -446,12 +584,35 @@ def post_delete_restore( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_restore - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_restore_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_delete_restore` interceptor runs + before the `post_delete_restore_with_metadata` interceptor. """ return response + def post_delete_restore_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_restore + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_delete_restore_with_metadata` + interceptor in new development instead of the `post_delete_restore` interceptor. + When both interceptors are used, this `post_delete_restore_with_metadata` interceptor runs after the + `post_delete_restore` interceptor. The (possibly modified) response returned by + `post_delete_restore` will be passed to + `post_delete_restore_with_metadata`. + """ + return response, metadata + def pre_delete_restore_plan( self, request: gkebackup.DeleteRestorePlanRequest, @@ -471,12 +632,35 @@ def post_delete_restore_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_restore_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_restore_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_delete_restore_plan` interceptor runs + before the `post_delete_restore_plan_with_metadata` interceptor. """ return response + def post_delete_restore_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_restore_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_delete_restore_plan_with_metadata` + interceptor in new development instead of the `post_delete_restore_plan` interceptor. + When both interceptors are used, this `post_delete_restore_plan_with_metadata` interceptor runs after the + `post_delete_restore_plan` interceptor. The (possibly modified) response returned by + `post_delete_restore_plan` will be passed to + `post_delete_restore_plan_with_metadata`. + """ + return response, metadata + def pre_get_backup( self, request: gkebackup.GetBackupRequest, @@ -492,12 +676,33 @@ def pre_get_backup( def post_get_backup(self, response: backup.Backup) -> backup.Backup: """Post-rpc interceptor for get_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_backup` interceptor runs + before the `post_get_backup_with_metadata` interceptor. """ return response + def post_get_backup_with_metadata( + self, response: backup.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[backup.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_backup_with_metadata` + interceptor in new development instead of the `post_get_backup` interceptor. + When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the + `post_get_backup` interceptor. The (possibly modified) response returned by + `post_get_backup` will be passed to + `post_get_backup_with_metadata`. + """ + return response, metadata + def pre_get_backup_index_download_url( self, request: gkebackup.GetBackupIndexDownloadUrlRequest, @@ -518,12 +723,38 @@ def post_get_backup_index_download_url( ) -> gkebackup.GetBackupIndexDownloadUrlResponse: """Post-rpc interceptor for get_backup_index_download_url - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_index_download_url_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_backup_index_download_url` interceptor runs + before the `post_get_backup_index_download_url_with_metadata` interceptor. """ return response + def post_get_backup_index_download_url_with_metadata( + self, + response: gkebackup.GetBackupIndexDownloadUrlResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gkebackup.GetBackupIndexDownloadUrlResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_backup_index_download_url + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_backup_index_download_url_with_metadata` + interceptor in new development instead of the `post_get_backup_index_download_url` interceptor. + When both interceptors are used, this `post_get_backup_index_download_url_with_metadata` interceptor runs after the + `post_get_backup_index_download_url` interceptor. The (possibly modified) response returned by + `post_get_backup_index_download_url` will be passed to + `post_get_backup_index_download_url_with_metadata`. + """ + return response, metadata + def pre_get_backup_plan( self, request: gkebackup.GetBackupPlanRequest, @@ -541,12 +772,35 @@ def post_get_backup_plan( ) -> backup_plan.BackupPlan: """Post-rpc interceptor for get_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_backup_plan` interceptor runs + before the `post_get_backup_plan_with_metadata` interceptor. """ return response + def post_get_backup_plan_with_metadata( + self, + response: backup_plan.BackupPlan, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backup_plan.BackupPlan, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_backup_plan_with_metadata` + interceptor in new development instead of the `post_get_backup_plan` interceptor. + When both interceptors are used, this `post_get_backup_plan_with_metadata` interceptor runs after the + `post_get_backup_plan` interceptor. The (possibly modified) response returned by + `post_get_backup_plan` will be passed to + `post_get_backup_plan_with_metadata`. + """ + return response, metadata + def pre_get_restore( self, request: gkebackup.GetRestoreRequest, @@ -562,12 +816,35 @@ def pre_get_restore( def post_get_restore(self, response: restore.Restore) -> restore.Restore: """Post-rpc interceptor for get_restore - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_restore_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_restore` interceptor runs + before the `post_get_restore_with_metadata` interceptor. """ return response + def post_get_restore_with_metadata( + self, + response: restore.Restore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[restore.Restore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_restore + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_restore_with_metadata` + interceptor in new development instead of the `post_get_restore` interceptor. + When both interceptors are used, this `post_get_restore_with_metadata` interceptor runs after the + `post_get_restore` interceptor. The (possibly modified) response returned by + `post_get_restore` will be passed to + `post_get_restore_with_metadata`. + """ + return response, metadata + def pre_get_restore_plan( self, request: gkebackup.GetRestorePlanRequest, @@ -587,12 +864,35 @@ def post_get_restore_plan( ) -> restore_plan.RestorePlan: """Post-rpc interceptor for get_restore_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_restore_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_restore_plan` interceptor runs + before the `post_get_restore_plan_with_metadata` interceptor. """ return response + def post_get_restore_plan_with_metadata( + self, + response: restore_plan.RestorePlan, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[restore_plan.RestorePlan, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_restore_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_restore_plan_with_metadata` + interceptor in new development instead of the `post_get_restore_plan` interceptor. + When both interceptors are used, this `post_get_restore_plan_with_metadata` interceptor runs after the + `post_get_restore_plan` interceptor. The (possibly modified) response returned by + `post_get_restore_plan` will be passed to + `post_get_restore_plan_with_metadata`. + """ + return response, metadata + def pre_get_volume_backup( self, request: gkebackup.GetVolumeBackupRequest, @@ -612,12 +912,35 @@ def post_get_volume_backup( ) -> volume.VolumeBackup: """Post-rpc interceptor for get_volume_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_volume_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_volume_backup` interceptor runs + before the `post_get_volume_backup_with_metadata` interceptor. """ return response + def post_get_volume_backup_with_metadata( + self, + response: volume.VolumeBackup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[volume.VolumeBackup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_volume_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_volume_backup_with_metadata` + interceptor in new development instead of the `post_get_volume_backup` interceptor. + When both interceptors are used, this `post_get_volume_backup_with_metadata` interceptor runs after the + `post_get_volume_backup` interceptor. The (possibly modified) response returned by + `post_get_volume_backup` will be passed to + `post_get_volume_backup_with_metadata`. + """ + return response, metadata + def pre_get_volume_restore( self, request: gkebackup.GetVolumeRestoreRequest, @@ -637,12 +960,35 @@ def post_get_volume_restore( ) -> volume.VolumeRestore: """Post-rpc interceptor for get_volume_restore - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_volume_restore_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_get_volume_restore` interceptor runs + before the `post_get_volume_restore_with_metadata` interceptor. """ return response + def post_get_volume_restore_with_metadata( + self, + response: volume.VolumeRestore, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[volume.VolumeRestore, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_volume_restore + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_get_volume_restore_with_metadata` + interceptor in new development instead of the `post_get_volume_restore` interceptor. + When both interceptors are used, this `post_get_volume_restore_with_metadata` interceptor runs after the + `post_get_volume_restore` interceptor. The (possibly modified) response returned by + `post_get_volume_restore` will be passed to + `post_get_volume_restore_with_metadata`. + """ + return response, metadata + def pre_list_backup_plans( self, request: gkebackup.ListBackupPlansRequest, @@ -662,12 +1008,37 @@ def post_list_backup_plans( ) -> gkebackup.ListBackupPlansResponse: """Post-rpc interceptor for list_backup_plans - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backup_plans_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_list_backup_plans` interceptor runs + before the `post_list_backup_plans_with_metadata` interceptor. """ return response + def post_list_backup_plans_with_metadata( + self, + response: gkebackup.ListBackupPlansResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gkebackup.ListBackupPlansResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_backup_plans + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_list_backup_plans_with_metadata` + interceptor in new development instead of the `post_list_backup_plans` interceptor. + When both interceptors are used, this `post_list_backup_plans_with_metadata` interceptor runs after the + `post_list_backup_plans` interceptor. The (possibly modified) response returned by + `post_list_backup_plans` will be passed to + `post_list_backup_plans_with_metadata`. + """ + return response, metadata + def pre_list_backups( self, request: gkebackup.ListBackupsRequest, @@ -685,12 +1056,35 @@ def post_list_backups( ) -> gkebackup.ListBackupsResponse: """Post-rpc interceptor for list_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_list_backups` interceptor runs + before the `post_list_backups_with_metadata` interceptor. """ return response + def post_list_backups_with_metadata( + self, + response: gkebackup.ListBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gkebackup.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_list_backups_with_metadata` + interceptor in new development instead of the `post_list_backups` interceptor. + When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the + `post_list_backups` interceptor. The (possibly modified) response returned by + `post_list_backups` will be passed to + `post_list_backups_with_metadata`. + """ + return response, metadata + def pre_list_restore_plans( self, request: gkebackup.ListRestorePlansRequest, @@ -710,12 +1104,37 @@ def post_list_restore_plans( ) -> gkebackup.ListRestorePlansResponse: """Post-rpc interceptor for list_restore_plans - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_restore_plans_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_list_restore_plans` interceptor runs + before the `post_list_restore_plans_with_metadata` interceptor. """ return response + def post_list_restore_plans_with_metadata( + self, + response: gkebackup.ListRestorePlansResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gkebackup.ListRestorePlansResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_restore_plans + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_list_restore_plans_with_metadata` + interceptor in new development instead of the `post_list_restore_plans` interceptor. + When both interceptors are used, this `post_list_restore_plans_with_metadata` interceptor runs after the + `post_list_restore_plans` interceptor. The (possibly modified) response returned by + `post_list_restore_plans` will be passed to + `post_list_restore_plans_with_metadata`. + """ + return response, metadata + def pre_list_restores( self, request: gkebackup.ListRestoresRequest, @@ -733,12 +1152,35 @@ def post_list_restores( ) -> gkebackup.ListRestoresResponse: """Post-rpc interceptor for list_restores - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_restores_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_list_restores` interceptor runs + before the `post_list_restores_with_metadata` interceptor. """ return response + def post_list_restores_with_metadata( + self, + response: gkebackup.ListRestoresResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gkebackup.ListRestoresResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_restores + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_list_restores_with_metadata` + interceptor in new development instead of the `post_list_restores` interceptor. + When both interceptors are used, this `post_list_restores_with_metadata` interceptor runs after the + `post_list_restores` interceptor. The (possibly modified) response returned by + `post_list_restores` will be passed to + `post_list_restores_with_metadata`. + """ + return response, metadata + def pre_list_volume_backups( self, request: gkebackup.ListVolumeBackupsRequest, @@ -758,12 +1200,37 @@ def post_list_volume_backups( ) -> gkebackup.ListVolumeBackupsResponse: """Post-rpc interceptor for list_volume_backups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_volume_backups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_list_volume_backups` interceptor runs + before the `post_list_volume_backups_with_metadata` interceptor. """ return response + def post_list_volume_backups_with_metadata( + self, + response: gkebackup.ListVolumeBackupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gkebackup.ListVolumeBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_volume_backups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_list_volume_backups_with_metadata` + interceptor in new development instead of the `post_list_volume_backups` interceptor. + When both interceptors are used, this `post_list_volume_backups_with_metadata` interceptor runs after the + `post_list_volume_backups` interceptor. The (possibly modified) response returned by + `post_list_volume_backups` will be passed to + `post_list_volume_backups_with_metadata`. + """ + return response, metadata + def pre_list_volume_restores( self, request: gkebackup.ListVolumeRestoresRequest, @@ -783,12 +1250,37 @@ def post_list_volume_restores( ) -> gkebackup.ListVolumeRestoresResponse: """Post-rpc interceptor for list_volume_restores - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_volume_restores_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_list_volume_restores` interceptor runs + before the `post_list_volume_restores_with_metadata` interceptor. """ return response + def post_list_volume_restores_with_metadata( + self, + response: gkebackup.ListVolumeRestoresResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gkebackup.ListVolumeRestoresResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_volume_restores + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_list_volume_restores_with_metadata` + interceptor in new development instead of the `post_list_volume_restores` interceptor. + When both interceptors are used, this `post_list_volume_restores_with_metadata` interceptor runs after the + `post_list_volume_restores` interceptor. The (possibly modified) response returned by + `post_list_volume_restores` will be passed to + `post_list_volume_restores_with_metadata`. + """ + return response, metadata + def pre_update_backup( self, request: gkebackup.UpdateBackupRequest, @@ -806,12 +1298,35 @@ def post_update_backup( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_update_backup` interceptor runs + before the `post_update_backup_with_metadata` interceptor. """ return response + def post_update_backup_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_update_backup_with_metadata` + interceptor in new development instead of the `post_update_backup` interceptor. + When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the + `post_update_backup` interceptor. The (possibly modified) response returned by + `post_update_backup` will be passed to + `post_update_backup_with_metadata`. + """ + return response, metadata + def pre_update_backup_plan( self, request: gkebackup.UpdateBackupPlanRequest, @@ -831,12 +1346,35 @@ def post_update_backup_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_backup_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_update_backup_plan` interceptor runs + before the `post_update_backup_plan_with_metadata` interceptor. """ return response + def post_update_backup_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_update_backup_plan_with_metadata` + interceptor in new development instead of the `post_update_backup_plan` interceptor. + When both interceptors are used, this `post_update_backup_plan_with_metadata` interceptor runs after the + `post_update_backup_plan` interceptor. The (possibly modified) response returned by + `post_update_backup_plan` will be passed to + `post_update_backup_plan_with_metadata`. + """ + return response, metadata + def pre_update_restore( self, request: gkebackup.UpdateRestoreRequest, @@ -854,12 +1392,35 @@ def post_update_restore( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_restore - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_restore_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_update_restore` interceptor runs + before the `post_update_restore_with_metadata` interceptor. """ return response + def post_update_restore_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_restore + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_update_restore_with_metadata` + interceptor in new development instead of the `post_update_restore` interceptor. + When both interceptors are used, this `post_update_restore_with_metadata` interceptor runs after the + `post_update_restore` interceptor. The (possibly modified) response returned by + `post_update_restore` will be passed to + `post_update_restore_with_metadata`. + """ + return response, metadata + def pre_update_restore_plan( self, request: gkebackup.UpdateRestorePlanRequest, @@ -879,12 +1440,35 @@ def post_update_restore_plan( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_restore_plan - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_restore_plan_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupForGKE server but before - it is returned to user code. + it is returned to user code. This `post_update_restore_plan` interceptor runs + before the `post_update_restore_plan_with_metadata` interceptor. """ return response + def post_update_restore_plan_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_restore_plan + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupForGKE server but before it is returned to user code. + + We recommend only using this `post_update_restore_plan_with_metadata` + interceptor in new development instead of the `post_update_restore_plan` interceptor. + When both interceptors are used, this `post_update_restore_plan_with_metadata` interceptor runs after the + `post_update_restore_plan` interceptor. The (possibly modified) response returned by + `post_update_restore_plan` will be passed to + `post_update_restore_plan_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1374,6 +1958,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1523,6 +2111,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1670,6 +2262,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_restore(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_restore_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1820,6 +2416,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_restore_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_restore_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1963,6 +2563,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2106,6 +2710,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2247,6 +2855,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_restore(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_restore_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2391,6 +3003,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_restore_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_restore_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2542,6 +3158,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2691,6 +3311,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup_index_download_url(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_backup_index_download_url_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2835,6 +3462,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2983,6 +3614,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_restore(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_restore_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3129,6 +3764,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_restore_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_restore_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3276,6 +3915,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_volume_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_volume_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3420,6 +4063,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_volume_restore(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_volume_restore_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3562,6 +4209,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backup_plans(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_plans_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3708,6 +4359,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3852,6 +4507,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_restore_plans(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_restore_plans_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3996,6 +4655,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_restores(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_restores_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4141,6 +4804,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_volume_backups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_volume_backups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4288,6 +4955,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_volume_restores(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_volume_restores_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4441,6 +5112,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4590,6 +5265,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_backup_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_backup_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4737,6 +5416,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_restore(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_restore_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4887,6 +5570,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_restore_plan(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_restore_plan_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json b/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json index 5bb20f5d0447..3acaeae1c152 100644 --- a/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json +++ b/packages/google-cloud-gke-backup/samples/generated_samples/snippet_metadata_google.cloud.gkebackup.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-backup", - "version": "0.5.14" + "version": "0.5.15" }, "snippets": [ { diff --git a/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py b/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py index 09b5c43fbd45..d8d7ffc73421 100644 --- a/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py +++ b/packages/google-cloud-gke-backup/tests/unit/gapic/gke_backup_v1/test_backup_for_gke.py @@ -90,6 +90,13 @@ from google.cloud.gke_backup_v1.types import restore_plan as gcg_restore_plan from google.cloud.gke_backup_v1.types import volume +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -333,6 +340,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BackupForGKEClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BackupForGKEClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -17803,10 +17853,13 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_create_backup_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_create_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_create_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.CreateBackupPlanRequest.pb( gkebackup.CreateBackupPlanRequest() ) @@ -17830,6 +17883,7 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup_plan( request, @@ -17841,6 +17895,7 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backup_plans_rest_bad_request( @@ -17927,10 +17982,13 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_list_backup_plans" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_list_backup_plans_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_list_backup_plans" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.ListBackupPlansRequest.pb( gkebackup.ListBackupPlansRequest() ) @@ -17956,6 +18014,7 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.ListBackupPlansResponse() + post_with_metadata.return_value = gkebackup.ListBackupPlansResponse(), metadata client.list_backup_plans( request, @@ -17967,6 +18026,7 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_plan_rest_bad_request(request_type=gkebackup.GetBackupPlanRequest): @@ -18069,10 +18129,13 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_backup_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_get_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetBackupPlanRequest.pb(gkebackup.GetBackupPlanRequest()) transcode.return_value = { "method": "post", @@ -18094,6 +18157,7 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backup_plan.BackupPlan() + post_with_metadata.return_value = backup_plan.BackupPlan(), metadata client.get_backup_plan( request, @@ -18105,6 +18169,7 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_plan_rest_bad_request( @@ -18324,10 +18389,13 @@ def test_update_backup_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_update_backup_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_update_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_update_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.UpdateBackupPlanRequest.pb( gkebackup.UpdateBackupPlanRequest() ) @@ -18351,6 +18419,7 @@ def test_update_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup_plan( request, @@ -18362,6 +18431,7 @@ def test_update_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_plan_rest_bad_request( @@ -18442,10 +18512,13 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_delete_backup_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_delete_backup_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_delete_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.DeleteBackupPlanRequest.pb( gkebackup.DeleteBackupPlanRequest() ) @@ -18469,6 +18542,7 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup_plan( request, @@ -18480,6 +18554,7 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_backup_rest_bad_request(request_type=gkebackup.CreateBackupRequest): @@ -18665,10 +18740,13 @@ def test_create_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_create_backup" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_create_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_create_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.CreateBackupRequest.pb(gkebackup.CreateBackupRequest()) transcode.return_value = { "method": "post", @@ -18690,6 +18768,7 @@ def test_create_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_backup( request, @@ -18701,6 +18780,7 @@ def test_create_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_backups_rest_bad_request(request_type=gkebackup.ListBackupsRequest): @@ -18783,10 +18863,13 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_list_backups" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_list_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.ListBackupsRequest.pb(gkebackup.ListBackupsRequest()) transcode.return_value = { "method": "post", @@ -18810,6 +18893,7 @@ def test_list_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.ListBackupsResponse() + post_with_metadata.return_value = gkebackup.ListBackupsResponse(), metadata client.list_backups( request, @@ -18821,6 +18905,7 @@ def test_list_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_rest_bad_request(request_type=gkebackup.GetBackupRequest): @@ -18940,10 +19025,13 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_backup" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_get_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetBackupRequest.pb(gkebackup.GetBackupRequest()) transcode.return_value = { "method": "post", @@ -18965,6 +19053,7 @@ def test_get_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = backup.Backup() + post_with_metadata.return_value = backup.Backup(), metadata client.get_backup( request, @@ -18976,6 +19065,7 @@ def test_get_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_backup_rest_bad_request(request_type=gkebackup.UpdateBackupRequest): @@ -19169,10 +19259,13 @@ def test_update_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_update_backup" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_update_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.UpdateBackupRequest.pb(gkebackup.UpdateBackupRequest()) transcode.return_value = { "method": "post", @@ -19194,6 +19287,7 @@ def test_update_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_backup( request, @@ -19205,6 +19299,7 @@ def test_update_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_backup_rest_bad_request(request_type=gkebackup.DeleteBackupRequest): @@ -19287,10 +19382,13 @@ def test_delete_backup_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_delete_backup" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_delete_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.DeleteBackupRequest.pb(gkebackup.DeleteBackupRequest()) transcode.return_value = { "method": "post", @@ -19312,6 +19410,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_backup( request, @@ -19323,6 +19422,7 @@ def test_delete_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_volume_backups_rest_bad_request( @@ -19411,10 +19511,13 @@ def test_list_volume_backups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_list_volume_backups" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_list_volume_backups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_list_volume_backups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.ListVolumeBackupsRequest.pb( gkebackup.ListVolumeBackupsRequest() ) @@ -19440,6 +19543,10 @@ def test_list_volume_backups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.ListVolumeBackupsResponse() + post_with_metadata.return_value = ( + gkebackup.ListVolumeBackupsResponse(), + metadata, + ) client.list_volume_backups( request, @@ -19451,6 +19558,7 @@ def test_list_volume_backups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_volume_backup_rest_bad_request( @@ -19557,10 +19665,13 @@ def test_get_volume_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_volume_backup" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_get_volume_backup_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_volume_backup" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetVolumeBackupRequest.pb( gkebackup.GetVolumeBackupRequest() ) @@ -19584,6 +19695,7 @@ def test_get_volume_backup_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume.VolumeBackup() + post_with_metadata.return_value = volume.VolumeBackup(), metadata client.get_volume_backup( request, @@ -19595,6 +19707,7 @@ def test_get_volume_backup_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_restore_plan_rest_bad_request( @@ -19816,10 +19929,13 @@ def test_create_restore_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_create_restore_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_create_restore_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_create_restore_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.CreateRestorePlanRequest.pb( gkebackup.CreateRestorePlanRequest() ) @@ -19843,6 +19959,7 @@ def test_create_restore_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_restore_plan( request, @@ -19854,6 +19971,7 @@ def test_create_restore_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_restore_plans_rest_bad_request( @@ -19940,10 +20058,13 @@ def test_list_restore_plans_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_list_restore_plans" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_list_restore_plans_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_list_restore_plans" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.ListRestorePlansRequest.pb( gkebackup.ListRestorePlansRequest() ) @@ -19969,6 +20090,7 @@ def test_list_restore_plans_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.ListRestorePlansResponse() + post_with_metadata.return_value = gkebackup.ListRestorePlansResponse(), metadata client.list_restore_plans( request, @@ -19980,6 +20102,7 @@ def test_list_restore_plans_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_restore_plan_rest_bad_request( @@ -20078,10 +20201,13 @@ def test_get_restore_plan_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_restore_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_get_restore_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_restore_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetRestorePlanRequest.pb( gkebackup.GetRestorePlanRequest() ) @@ -20105,6 +20231,7 @@ def test_get_restore_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = restore_plan.RestorePlan() + post_with_metadata.return_value = restore_plan.RestorePlan(), metadata client.get_restore_plan( request, @@ -20116,6 +20243,7 @@ def test_get_restore_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_restore_plan_rest_bad_request( @@ -20345,10 +20473,13 @@ def test_update_restore_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_update_restore_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_update_restore_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_update_restore_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.UpdateRestorePlanRequest.pb( gkebackup.UpdateRestorePlanRequest() ) @@ -20372,6 +20503,7 @@ def test_update_restore_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_restore_plan( request, @@ -20383,6 +20515,7 @@ def test_update_restore_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_restore_plan_rest_bad_request( @@ -20463,10 +20596,13 @@ def test_delete_restore_plan_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_delete_restore_plan" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_delete_restore_plan_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_delete_restore_plan" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.DeleteRestorePlanRequest.pb( gkebackup.DeleteRestorePlanRequest() ) @@ -20490,6 +20626,7 @@ def test_delete_restore_plan_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_restore_plan( request, @@ -20501,6 +20638,7 @@ def test_delete_restore_plan_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_restore_rest_bad_request(request_type=gkebackup.CreateRestoreRequest): @@ -20737,10 +20875,13 @@ def test_create_restore_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_create_restore" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_create_restore_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_create_restore" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.CreateRestoreRequest.pb(gkebackup.CreateRestoreRequest()) transcode.return_value = { "method": "post", @@ -20762,6 +20903,7 @@ def test_create_restore_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_restore( request, @@ -20773,6 +20915,7 @@ def test_create_restore_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_restores_rest_bad_request(request_type=gkebackup.ListRestoresRequest): @@ -20857,10 +21000,13 @@ def test_list_restores_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_list_restores" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_list_restores_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_list_restores" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.ListRestoresRequest.pb(gkebackup.ListRestoresRequest()) transcode.return_value = { "method": "post", @@ -20884,6 +21030,7 @@ def test_list_restores_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.ListRestoresResponse() + post_with_metadata.return_value = gkebackup.ListRestoresResponse(), metadata client.list_restores( request, @@ -20895,6 +21042,7 @@ def test_list_restores_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_restore_rest_bad_request(request_type=gkebackup.GetRestoreRequest): @@ -21003,10 +21151,13 @@ def test_get_restore_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_restore" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_get_restore_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_restore" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetRestoreRequest.pb(gkebackup.GetRestoreRequest()) transcode.return_value = { "method": "post", @@ -21028,6 +21179,7 @@ def test_get_restore_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = restore.Restore() + post_with_metadata.return_value = restore.Restore(), metadata client.get_restore( request, @@ -21039,6 +21191,7 @@ def test_get_restore_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_restore_rest_bad_request(request_type=gkebackup.UpdateRestoreRequest): @@ -21283,10 +21436,13 @@ def test_update_restore_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_update_restore" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_update_restore_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_update_restore" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.UpdateRestoreRequest.pb(gkebackup.UpdateRestoreRequest()) transcode.return_value = { "method": "post", @@ -21308,6 +21464,7 @@ def test_update_restore_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_restore( request, @@ -21319,6 +21476,7 @@ def test_update_restore_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_restore_rest_bad_request(request_type=gkebackup.DeleteRestoreRequest): @@ -21401,10 +21559,13 @@ def test_delete_restore_rest_interceptors(null_interceptor): ), mock.patch.object( transports.BackupForGKERestInterceptor, "post_delete_restore" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_delete_restore_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_delete_restore" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.DeleteRestoreRequest.pb(gkebackup.DeleteRestoreRequest()) transcode.return_value = { "method": "post", @@ -21426,6 +21587,7 @@ def test_delete_restore_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_restore( request, @@ -21437,6 +21599,7 @@ def test_delete_restore_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_volume_restores_rest_bad_request( @@ -21525,10 +21688,14 @@ def test_list_volume_restores_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_list_volume_restores" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, + "post_list_volume_restores_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_list_volume_restores" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.ListVolumeRestoresRequest.pb( gkebackup.ListVolumeRestoresRequest() ) @@ -21554,6 +21721,10 @@ def test_list_volume_restores_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.ListVolumeRestoresResponse() + post_with_metadata.return_value = ( + gkebackup.ListVolumeRestoresResponse(), + metadata, + ) client.list_volume_restores( request, @@ -21565,6 +21736,7 @@ def test_list_volume_restores_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_volume_restore_rest_bad_request( @@ -21667,10 +21839,13 @@ def test_get_volume_restore_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_volume_restore" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, "post_get_volume_restore_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_volume_restore" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetVolumeRestoreRequest.pb( gkebackup.GetVolumeRestoreRequest() ) @@ -21694,6 +21869,7 @@ def test_get_volume_restore_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = volume.VolumeRestore() + post_with_metadata.return_value = volume.VolumeRestore(), metadata client.get_volume_restore( request, @@ -21705,6 +21881,7 @@ def test_get_volume_restore_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_backup_index_download_url_rest_bad_request( @@ -21793,10 +21970,14 @@ def test_get_backup_index_download_url_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.BackupForGKERestInterceptor, "post_get_backup_index_download_url" ) as post, mock.patch.object( + transports.BackupForGKERestInterceptor, + "post_get_backup_index_download_url_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.BackupForGKERestInterceptor, "pre_get_backup_index_download_url" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gkebackup.GetBackupIndexDownloadUrlRequest.pb( gkebackup.GetBackupIndexDownloadUrlRequest() ) @@ -21822,6 +22003,10 @@ def test_get_backup_index_download_url_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gkebackup.GetBackupIndexDownloadUrlResponse() + post_with_metadata.return_value = ( + gkebackup.GetBackupIndexDownloadUrlResponse(), + metadata, + ) client.get_backup_index_download_url( request, @@ -21833,6 +22018,7 @@ def test_get_backup_index_download_url_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-gke-connect-gateway/CHANGELOG.md b/packages/google-cloud-gke-connect-gateway/CHANGELOG.md index ead9f1fcf345..412bb0b56389 100644 --- a/packages/google-cloud-gke-connect-gateway/CHANGELOG.md +++ b/packages/google-cloud-gke-connect-gateway/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.10.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.10.1...google-cloud-gke-connect-gateway-v0.10.2) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [0.10.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-connect-gateway-v0.10.0...google-cloud-gke-connect-gateway-v0.10.1) (2024-12-12) diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py index 1ccbb45b89d1..4ed3049a7cd1 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.1" # {x-release-please-version} +__version__ = "0.10.2" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py index 1ccbb45b89d1..4ed3049a7cd1 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.1" # {x-release-please-version} +__version__ = "0.10.2" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py index bf73e65baf23..d779aa2ccb3b 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -457,6 +459,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py index 1650f0577c2e..a5ea07c43dbe 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1/services/gateway_control/transports/rest.py @@ -101,12 +101,37 @@ def post_generate_credentials( ) -> control.GenerateCredentialsResponse: """Post-rpc interceptor for generate_credentials - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_credentials_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GatewayControl server but before - it is returned to user code. + it is returned to user code. This `post_generate_credentials` interceptor runs + before the `post_generate_credentials_with_metadata` interceptor. """ return response + def post_generate_credentials_with_metadata( + self, + response: control.GenerateCredentialsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + control.GenerateCredentialsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_credentials + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GatewayControl server but before it is returned to user code. + + We recommend only using this `post_generate_credentials_with_metadata` + interceptor in new development instead of the `post_generate_credentials` interceptor. + When both interceptors are used, this `post_generate_credentials_with_metadata` interceptor runs after the + `post_generate_credentials` interceptor. The (possibly modified) response returned by + `post_generate_credentials` will be passed to + `post_generate_credentials_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GatewayControlRestStub: @@ -317,6 +342,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_credentials(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_credentials_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py index 1ccbb45b89d1..4ed3049a7cd1 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.10.1" # {x-release-please-version} +__version__ = "0.10.2" # {x-release-please-version} diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py index 3b304055fc84..c1e114934db0 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -457,6 +459,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/rest.py b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/rest.py index ca453da213a7..3537fb393450 100644 --- a/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/rest.py +++ b/packages/google-cloud-gke-connect-gateway/google/cloud/gkeconnect/gateway_v1beta1/services/gateway_control/transports/rest.py @@ -101,12 +101,37 @@ def post_generate_credentials( ) -> control.GenerateCredentialsResponse: """Post-rpc interceptor for generate_credentials - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_credentials_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GatewayControl server but before - it is returned to user code. + it is returned to user code. This `post_generate_credentials` interceptor runs + before the `post_generate_credentials_with_metadata` interceptor. """ return response + def post_generate_credentials_with_metadata( + self, + response: control.GenerateCredentialsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + control.GenerateCredentialsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_credentials + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GatewayControl server but before it is returned to user code. + + We recommend only using this `post_generate_credentials_with_metadata` + interceptor in new development instead of the `post_generate_credentials` interceptor. + When both interceptors are used, this `post_generate_credentials_with_metadata` interceptor runs after the + `post_generate_credentials` interceptor. The (possibly modified) response returned by + `post_generate_credentials` will be passed to + `post_generate_credentials_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GatewayControlRestStub: @@ -321,6 +346,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_credentials(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_credentials_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json index 426a7ec4de6a..220dd01c4d3c 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-connect-gateway", - "version": "0.10.1" + "version": "0.10.2" }, "snippets": [ { diff --git a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json index 66be1a6d4cef..36c958bcff1f 100644 --- a/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json +++ b/packages/google-cloud-gke-connect-gateway/samples/generated_samples/snippet_metadata_google.cloud.gkeconnect.gateway.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-connect-gateway", - "version": "0.10.1" + "version": "0.10.2" }, "snippets": [ { diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py index e671b17a415f..86b997e7227c 100644 --- a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1/test_gateway_control.py @@ -58,6 +58,13 @@ ) from google.cloud.gkeconnect.gateway_v1.types import control +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +318,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GatewayControlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GatewayControlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1203,10 +1253,14 @@ def test_generate_credentials_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GatewayControlRestInterceptor, "post_generate_credentials" ) as post, mock.patch.object( + transports.GatewayControlRestInterceptor, + "post_generate_credentials_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GatewayControlRestInterceptor, "pre_generate_credentials" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control.GenerateCredentialsRequest.pb( control.GenerateCredentialsRequest() ) @@ -1232,6 +1286,10 @@ def test_generate_credentials_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control.GenerateCredentialsResponse() + post_with_metadata.return_value = ( + control.GenerateCredentialsResponse(), + metadata, + ) client.generate_credentials( request, @@ -1243,6 +1301,7 @@ def test_generate_credentials_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py index 9f83dd709980..88b9101c95fd 100644 --- a/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py +++ b/packages/google-cloud-gke-connect-gateway/tests/unit/gapic/gateway_v1beta1/test_gateway_control.py @@ -58,6 +58,13 @@ ) from google.cloud.gkeconnect.gateway_v1beta1.types import control +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -311,6 +318,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GatewayControlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GatewayControlClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1203,10 +1253,14 @@ def test_generate_credentials_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GatewayControlRestInterceptor, "post_generate_credentials" ) as post, mock.patch.object( + transports.GatewayControlRestInterceptor, + "post_generate_credentials_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GatewayControlRestInterceptor, "pre_generate_credentials" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = control.GenerateCredentialsRequest.pb( control.GenerateCredentialsRequest() ) @@ -1232,6 +1286,10 @@ def test_generate_credentials_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = control.GenerateCredentialsResponse() + post_with_metadata.return_value = ( + control.GenerateCredentialsResponse(), + metadata, + ) client.generate_credentials( request, @@ -1243,6 +1301,7 @@ def test_generate_credentials_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-gke-hub/CHANGELOG.md b/packages/google-cloud-gke-hub/CHANGELOG.md index e7b9b140fb68..5413e7fde20b 100644 --- a/packages/google-cloud-gke-hub/CHANGELOG.md +++ b/packages/google-cloud-gke-hub/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-hub-v1.16.0...google-cloud-gke-hub-v1.17.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-hub-v1.15.1...google-cloud-gke-hub-v1.16.0) (2024-12-12) diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/configmanagement_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/multiclusteringress_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py index 0ac2de2daecf..29fa98fd5bc9 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -529,6 +531,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/transports/rest.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/transports/rest.py index 93bf74b1275f..1cd5d673f042 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/transports/rest.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1/services/gke_hub/transports/rest.py @@ -180,12 +180,35 @@ def post_create_feature( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_feature - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_feature_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_create_feature` interceptor runs + before the `post_create_feature_with_metadata` interceptor. """ return response + def post_create_feature_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_feature + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_create_feature_with_metadata` + interceptor in new development instead of the `post_create_feature` interceptor. + When both interceptors are used, this `post_create_feature_with_metadata` interceptor runs after the + `post_create_feature` interceptor. The (possibly modified) response returned by + `post_create_feature` will be passed to + `post_create_feature_with_metadata`. + """ + return response, metadata + def pre_create_membership( self, request: service.CreateMembershipRequest, @@ -205,12 +228,35 @@ def post_create_membership( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_create_membership` interceptor runs + before the `post_create_membership_with_metadata` interceptor. """ return response + def post_create_membership_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_create_membership_with_metadata` + interceptor in new development instead of the `post_create_membership` interceptor. + When both interceptors are used, this `post_create_membership_with_metadata` interceptor runs after the + `post_create_membership` interceptor. The (possibly modified) response returned by + `post_create_membership` will be passed to + `post_create_membership_with_metadata`. + """ + return response, metadata + def pre_delete_feature( self, request: service.DeleteFeatureRequest, @@ -228,12 +274,35 @@ def post_delete_feature( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_feature - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_feature_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_delete_feature` interceptor runs + before the `post_delete_feature_with_metadata` interceptor. """ return response + def post_delete_feature_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_feature + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_delete_feature_with_metadata` + interceptor in new development instead of the `post_delete_feature` interceptor. + When both interceptors are used, this `post_delete_feature_with_metadata` interceptor runs after the + `post_delete_feature` interceptor. The (possibly modified) response returned by + `post_delete_feature` will be passed to + `post_delete_feature_with_metadata`. + """ + return response, metadata + def pre_delete_membership( self, request: service.DeleteMembershipRequest, @@ -253,12 +322,35 @@ def post_delete_membership( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_delete_membership` interceptor runs + before the `post_delete_membership_with_metadata` interceptor. """ return response + def post_delete_membership_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_delete_membership_with_metadata` + interceptor in new development instead of the `post_delete_membership` interceptor. + When both interceptors are used, this `post_delete_membership_with_metadata` interceptor runs after the + `post_delete_membership` interceptor. The (possibly modified) response returned by + `post_delete_membership` will be passed to + `post_delete_membership_with_metadata`. + """ + return response, metadata + def pre_generate_connect_manifest( self, request: service.GenerateConnectManifestRequest, @@ -278,12 +370,37 @@ def post_generate_connect_manifest( ) -> service.GenerateConnectManifestResponse: """Post-rpc interceptor for generate_connect_manifest - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_connect_manifest_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_generate_connect_manifest` interceptor runs + before the `post_generate_connect_manifest_with_metadata` interceptor. """ return response + def post_generate_connect_manifest_with_metadata( + self, + response: service.GenerateConnectManifestResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateConnectManifestResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_connect_manifest + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_generate_connect_manifest_with_metadata` + interceptor in new development instead of the `post_generate_connect_manifest` interceptor. + When both interceptors are used, this `post_generate_connect_manifest_with_metadata` interceptor runs after the + `post_generate_connect_manifest` interceptor. The (possibly modified) response returned by + `post_generate_connect_manifest` will be passed to + `post_generate_connect_manifest_with_metadata`. + """ + return response, metadata + def pre_get_feature( self, request: service.GetFeatureRequest, @@ -299,12 +416,35 @@ def pre_get_feature( def post_get_feature(self, response: feature.Feature) -> feature.Feature: """Post-rpc interceptor for get_feature - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_feature_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_get_feature` interceptor runs + before the `post_get_feature_with_metadata` interceptor. """ return response + def post_get_feature_with_metadata( + self, + response: feature.Feature, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[feature.Feature, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_feature + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_get_feature_with_metadata` + interceptor in new development instead of the `post_get_feature` interceptor. + When both interceptors are used, this `post_get_feature_with_metadata` interceptor runs after the + `post_get_feature` interceptor. The (possibly modified) response returned by + `post_get_feature` will be passed to + `post_get_feature_with_metadata`. + """ + return response, metadata + def pre_get_membership( self, request: service.GetMembershipRequest, @@ -322,12 +462,35 @@ def post_get_membership( ) -> membership.Membership: """Post-rpc interceptor for get_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_get_membership` interceptor runs + before the `post_get_membership_with_metadata` interceptor. """ return response + def post_get_membership_with_metadata( + self, + response: membership.Membership, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[membership.Membership, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_get_membership_with_metadata` + interceptor in new development instead of the `post_get_membership` interceptor. + When both interceptors are used, this `post_get_membership_with_metadata` interceptor runs after the + `post_get_membership` interceptor. The (possibly modified) response returned by + `post_get_membership` will be passed to + `post_get_membership_with_metadata`. + """ + return response, metadata + def pre_list_features( self, request: service.ListFeaturesRequest, @@ -345,12 +508,35 @@ def post_list_features( ) -> service.ListFeaturesResponse: """Post-rpc interceptor for list_features - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_features_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_list_features` interceptor runs + before the `post_list_features_with_metadata` interceptor. """ return response + def post_list_features_with_metadata( + self, + response: service.ListFeaturesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListFeaturesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_features + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_list_features_with_metadata` + interceptor in new development instead of the `post_list_features` interceptor. + When both interceptors are used, this `post_list_features_with_metadata` interceptor runs after the + `post_list_features` interceptor. The (possibly modified) response returned by + `post_list_features` will be passed to + `post_list_features_with_metadata`. + """ + return response, metadata + def pre_list_memberships( self, request: service.ListMembershipsRequest, @@ -368,12 +554,37 @@ def post_list_memberships( ) -> service.ListMembershipsResponse: """Post-rpc interceptor for list_memberships - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_memberships_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_list_memberships` interceptor runs + before the `post_list_memberships_with_metadata` interceptor. """ return response + def post_list_memberships_with_metadata( + self, + response: service.ListMembershipsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListMembershipsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_memberships + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_list_memberships_with_metadata` + interceptor in new development instead of the `post_list_memberships` interceptor. + When both interceptors are used, this `post_list_memberships_with_metadata` interceptor runs after the + `post_list_memberships` interceptor. The (possibly modified) response returned by + `post_list_memberships` will be passed to + `post_list_memberships_with_metadata`. + """ + return response, metadata + def pre_update_feature( self, request: service.UpdateFeatureRequest, @@ -391,12 +602,35 @@ def post_update_feature( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_feature - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_feature_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_update_feature` interceptor runs + before the `post_update_feature_with_metadata` interceptor. """ return response + def post_update_feature_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_feature + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_update_feature_with_metadata` + interceptor in new development instead of the `post_update_feature` interceptor. + When both interceptors are used, this `post_update_feature_with_metadata` interceptor runs after the + `post_update_feature` interceptor. The (possibly modified) response returned by + `post_update_feature` will be passed to + `post_update_feature_with_metadata`. + """ + return response, metadata + def pre_update_membership( self, request: service.UpdateMembershipRequest, @@ -416,12 +650,35 @@ def post_update_membership( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHub server but before - it is returned to user code. + it is returned to user code. This `post_update_membership` interceptor runs + before the `post_update_membership_with_metadata` interceptor. """ return response + def post_update_membership_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHub server but before it is returned to user code. + + We recommend only using this `post_update_membership_with_metadata` + interceptor in new development instead of the `post_update_membership` interceptor. + When both interceptors are used, this `post_update_membership_with_metadata` interceptor runs after the + `post_update_membership` interceptor. The (possibly modified) response returned by + `post_update_membership` will be passed to + `post_update_membership_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class GkeHubRestStub: @@ -705,6 +962,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_feature(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_feature_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -861,6 +1122,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1004,6 +1269,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_feature(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_feature_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1151,6 +1420,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1297,6 +1570,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_connect_manifest(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_connect_manifest_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1441,6 +1718,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_feature(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_feature_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1585,6 +1866,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1727,6 +2012,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_features(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_features_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1875,6 +2164,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_memberships(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_memberships_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2024,6 +2317,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_feature(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_feature_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2179,6 +2476,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py index 3e0ea3b28f0a..007d6040cbe0 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.16.0" # {x-release-please-version} +__version__ = "1.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py index 5571143ca50c..8c6b649482fe 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -509,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1714,16 +1743,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1769,16 +1802,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2001,16 +2038,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2123,16 +2164,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2183,16 +2228,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -2238,16 +2287,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2293,16 +2346,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/transports/rest.py b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/transports/rest.py index 70bf55c4c422..39cb5149260e 100644 --- a/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/transports/rest.py +++ b/packages/google-cloud-gke-hub/google/cloud/gkehub_v1beta1/services/gke_hub_membership_service/transports/rest.py @@ -161,12 +161,35 @@ def post_create_membership( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_create_membership` interceptor runs + before the `post_create_membership_with_metadata` interceptor. """ return response + def post_create_membership_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_create_membership_with_metadata` + interceptor in new development instead of the `post_create_membership` interceptor. + When both interceptors are used, this `post_create_membership_with_metadata` interceptor runs after the + `post_create_membership` interceptor. The (possibly modified) response returned by + `post_create_membership` will be passed to + `post_create_membership_with_metadata`. + """ + return response, metadata + def pre_delete_membership( self, request: membership.DeleteMembershipRequest, @@ -186,12 +209,35 @@ def post_delete_membership( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_delete_membership` interceptor runs + before the `post_delete_membership_with_metadata` interceptor. """ return response + def post_delete_membership_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_delete_membership_with_metadata` + interceptor in new development instead of the `post_delete_membership` interceptor. + When both interceptors are used, this `post_delete_membership_with_metadata` interceptor runs after the + `post_delete_membership` interceptor. The (possibly modified) response returned by + `post_delete_membership` will be passed to + `post_delete_membership_with_metadata`. + """ + return response, metadata + def pre_generate_connect_manifest( self, request: membership.GenerateConnectManifestRequest, @@ -212,12 +258,38 @@ def post_generate_connect_manifest( ) -> membership.GenerateConnectManifestResponse: """Post-rpc interceptor for generate_connect_manifest - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_connect_manifest_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_generate_connect_manifest` interceptor runs + before the `post_generate_connect_manifest_with_metadata` interceptor. """ return response + def post_generate_connect_manifest_with_metadata( + self, + response: membership.GenerateConnectManifestResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + membership.GenerateConnectManifestResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_connect_manifest + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_generate_connect_manifest_with_metadata` + interceptor in new development instead of the `post_generate_connect_manifest` interceptor. + When both interceptors are used, this `post_generate_connect_manifest_with_metadata` interceptor runs after the + `post_generate_connect_manifest` interceptor. The (possibly modified) response returned by + `post_generate_connect_manifest` will be passed to + `post_generate_connect_manifest_with_metadata`. + """ + return response, metadata + def pre_generate_exclusivity_manifest( self, request: membership.GenerateExclusivityManifestRequest, @@ -238,12 +310,38 @@ def post_generate_exclusivity_manifest( ) -> membership.GenerateExclusivityManifestResponse: """Post-rpc interceptor for generate_exclusivity_manifest - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_exclusivity_manifest_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_generate_exclusivity_manifest` interceptor runs + before the `post_generate_exclusivity_manifest_with_metadata` interceptor. """ return response + def post_generate_exclusivity_manifest_with_metadata( + self, + response: membership.GenerateExclusivityManifestResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + membership.GenerateExclusivityManifestResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_exclusivity_manifest + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_generate_exclusivity_manifest_with_metadata` + interceptor in new development instead of the `post_generate_exclusivity_manifest` interceptor. + When both interceptors are used, this `post_generate_exclusivity_manifest_with_metadata` interceptor runs after the + `post_generate_exclusivity_manifest` interceptor. The (possibly modified) response returned by + `post_generate_exclusivity_manifest` will be passed to + `post_generate_exclusivity_manifest_with_metadata`. + """ + return response, metadata + def pre_get_membership( self, request: membership.GetMembershipRequest, @@ -263,12 +361,35 @@ def post_get_membership( ) -> membership.Membership: """Post-rpc interceptor for get_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_get_membership` interceptor runs + before the `post_get_membership_with_metadata` interceptor. """ return response + def post_get_membership_with_metadata( + self, + response: membership.Membership, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[membership.Membership, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_get_membership_with_metadata` + interceptor in new development instead of the `post_get_membership` interceptor. + When both interceptors are used, this `post_get_membership_with_metadata` interceptor runs after the + `post_get_membership` interceptor. The (possibly modified) response returned by + `post_get_membership` will be passed to + `post_get_membership_with_metadata`. + """ + return response, metadata + def pre_list_memberships( self, request: membership.ListMembershipsRequest, @@ -288,12 +409,37 @@ def post_list_memberships( ) -> membership.ListMembershipsResponse: """Post-rpc interceptor for list_memberships - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_memberships_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_list_memberships` interceptor runs + before the `post_list_memberships_with_metadata` interceptor. """ return response + def post_list_memberships_with_metadata( + self, + response: membership.ListMembershipsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + membership.ListMembershipsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_memberships + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_list_memberships_with_metadata` + interceptor in new development instead of the `post_list_memberships` interceptor. + When both interceptors are used, this `post_list_memberships_with_metadata` interceptor runs after the + `post_list_memberships` interceptor. The (possibly modified) response returned by + `post_list_memberships` will be passed to + `post_list_memberships_with_metadata`. + """ + return response, metadata + def pre_update_membership( self, request: membership.UpdateMembershipRequest, @@ -313,12 +459,35 @@ def post_update_membership( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_membership - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_membership_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_update_membership` interceptor runs + before the `post_update_membership_with_metadata` interceptor. """ return response + def post_update_membership_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_membership + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_update_membership_with_metadata` + interceptor in new development instead of the `post_update_membership` interceptor. + When both interceptors are used, this `post_update_membership_with_metadata` interceptor runs after the + `post_update_membership` interceptor. The (possibly modified) response returned by + `post_update_membership` will be passed to + `post_update_membership_with_metadata`. + """ + return response, metadata + def pre_validate_exclusivity( self, request: membership.ValidateExclusivityRequest, @@ -338,12 +507,37 @@ def post_validate_exclusivity( ) -> membership.ValidateExclusivityResponse: """Post-rpc interceptor for validate_exclusivity - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_exclusivity_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GkeHubMembershipService server but before - it is returned to user code. + it is returned to user code. This `post_validate_exclusivity` interceptor runs + before the `post_validate_exclusivity_with_metadata` interceptor. """ return response + def post_validate_exclusivity_with_metadata( + self, + response: membership.ValidateExclusivityResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + membership.ValidateExclusivityResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for validate_exclusivity + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GkeHubMembershipService server but before it is returned to user code. + + We recommend only using this `post_validate_exclusivity_with_metadata` + interceptor in new development instead of the `post_validate_exclusivity` interceptor. + When both interceptors are used, this `post_validate_exclusivity_with_metadata` interceptor runs after the + `post_validate_exclusivity` interceptor. The (possibly modified) response returned by + `post_validate_exclusivity` will be passed to + `post_validate_exclusivity_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -842,6 +1036,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -989,6 +1187,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1137,6 +1339,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_connect_manifest(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_connect_manifest_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1289,6 +1495,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_exclusivity_manifest(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_exclusivity_manifest_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1437,6 +1650,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1585,6 +1802,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_memberships(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_memberships_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1740,6 +1961,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_membership(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_membership_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1889,6 +2114,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_exclusivity(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_exclusivity_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json index a66564b1700a..793370c23006 100644 --- a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json +++ b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-hub", - "version": "1.16.0" + "version": "1.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json index fb45110c83ae..f68b495fcd01 100644 --- a/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json +++ b/packages/google-cloud-gke-hub/samples/generated_samples/snippet_metadata_google.cloud.gkehub.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-hub", - "version": "1.16.0" + "version": "1.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py index 2643827165af..6dc315152599 100644 --- a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py +++ b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1/test_gke_hub.py @@ -73,6 +73,13 @@ ) from google.cloud.gkehub_v1.types import feature, membership, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -295,6 +302,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GkeHubClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GkeHubClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7649,10 +7699,13 @@ def test_list_memberships_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubRestInterceptor, "post_list_memberships" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_list_memberships_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_list_memberships" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListMembershipsRequest.pb(service.ListMembershipsRequest()) transcode.return_value = { "method": "post", @@ -7676,6 +7729,7 @@ def test_list_memberships_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListMembershipsResponse() + post_with_metadata.return_value = service.ListMembershipsResponse(), metadata client.list_memberships( request, @@ -7687,6 +7741,7 @@ def test_list_memberships_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_features_rest_bad_request(request_type=service.ListFeaturesRequest): @@ -7767,10 +7822,13 @@ def test_list_features_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubRestInterceptor, "post_list_features" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_list_features_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_list_features" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListFeaturesRequest.pb(service.ListFeaturesRequest()) transcode.return_value = { "method": "post", @@ -7794,6 +7852,7 @@ def test_list_features_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListFeaturesResponse() + post_with_metadata.return_value = service.ListFeaturesResponse(), metadata client.list_features( request, @@ -7805,6 +7864,7 @@ def test_list_features_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_membership_rest_bad_request(request_type=service.GetMembershipRequest): @@ -7891,10 +7951,13 @@ def test_get_membership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubRestInterceptor, "post_get_membership" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_get_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_get_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetMembershipRequest.pb(service.GetMembershipRequest()) transcode.return_value = { "method": "post", @@ -7916,6 +7979,7 @@ def test_get_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.Membership() + post_with_metadata.return_value = membership.Membership(), metadata client.get_membership( request, @@ -7927,6 +7991,7 @@ def test_get_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_feature_rest_bad_request(request_type=service.GetFeatureRequest): @@ -8007,10 +8072,13 @@ def test_get_feature_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubRestInterceptor, "post_get_feature" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_get_feature_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_get_feature" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetFeatureRequest.pb(service.GetFeatureRequest()) transcode.return_value = { "method": "post", @@ -8032,6 +8100,7 @@ def test_get_feature_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = feature.Feature() + post_with_metadata.return_value = feature.Feature(), metadata client.get_feature( request, @@ -8043,6 +8112,7 @@ def test_get_feature_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_membership_rest_bad_request( @@ -8240,10 +8310,13 @@ def test_create_membership_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubRestInterceptor, "post_create_membership" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_create_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_create_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateMembershipRequest.pb( service.CreateMembershipRequest() ) @@ -8267,6 +8340,7 @@ def test_create_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_membership( request, @@ -8278,6 +8352,7 @@ def test_create_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_feature_rest_bad_request(request_type=service.CreateFeatureRequest): @@ -8441,10 +8516,13 @@ def test_create_feature_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubRestInterceptor, "post_create_feature" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_create_feature_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_create_feature" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateFeatureRequest.pb(service.CreateFeatureRequest()) transcode.return_value = { "method": "post", @@ -8466,6 +8544,7 @@ def test_create_feature_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_feature( request, @@ -8477,6 +8556,7 @@ def test_create_feature_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_membership_rest_bad_request( @@ -8555,10 +8635,13 @@ def test_delete_membership_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubRestInterceptor, "post_delete_membership" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_delete_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_delete_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteMembershipRequest.pb( service.DeleteMembershipRequest() ) @@ -8582,6 +8665,7 @@ def test_delete_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_membership( request, @@ -8593,6 +8677,7 @@ def test_delete_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_feature_rest_bad_request(request_type=service.DeleteFeatureRequest): @@ -8669,10 +8754,13 @@ def test_delete_feature_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubRestInterceptor, "post_delete_feature" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_delete_feature_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_delete_feature" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DeleteFeatureRequest.pb(service.DeleteFeatureRequest()) transcode.return_value = { "method": "post", @@ -8694,6 +8782,7 @@ def test_delete_feature_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_feature( request, @@ -8705,6 +8794,7 @@ def test_delete_feature_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_membership_rest_bad_request( @@ -8902,10 +8992,13 @@ def test_update_membership_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubRestInterceptor, "post_update_membership" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_update_membership_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_update_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateMembershipRequest.pb( service.UpdateMembershipRequest() ) @@ -8929,6 +9022,7 @@ def test_update_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_membership( request, @@ -8940,6 +9034,7 @@ def test_update_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_feature_rest_bad_request(request_type=service.UpdateFeatureRequest): @@ -9103,10 +9198,13 @@ def test_update_feature_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubRestInterceptor, "post_update_feature" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_update_feature_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_update_feature" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateFeatureRequest.pb(service.UpdateFeatureRequest()) transcode.return_value = { "method": "post", @@ -9128,6 +9226,7 @@ def test_update_feature_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_feature( request, @@ -9139,6 +9238,7 @@ def test_update_feature_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_connect_manifest_rest_bad_request( @@ -9218,10 +9318,13 @@ def test_generate_connect_manifest_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubRestInterceptor, "post_generate_connect_manifest" ) as post, mock.patch.object( + transports.GkeHubRestInterceptor, "post_generate_connect_manifest_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GkeHubRestInterceptor, "pre_generate_connect_manifest" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateConnectManifestRequest.pb( service.GenerateConnectManifestRequest() ) @@ -9247,6 +9350,10 @@ def test_generate_connect_manifest_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateConnectManifestResponse() + post_with_metadata.return_value = ( + service.GenerateConnectManifestResponse(), + metadata, + ) client.generate_connect_manifest( request, @@ -9258,6 +9365,7 @@ def test_generate_connect_manifest_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py index 252a41e9047d..0935863daf35 100644 --- a/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py +++ b/packages/google-cloud-gke-hub/tests/unit/gapic/gkehub_v1beta1/test_gke_hub_membership_service.py @@ -78,6 +78,13 @@ ) from google.cloud.gkehub_v1beta1.types import membership +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -353,6 +360,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GkeHubMembershipServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GkeHubMembershipServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5991,10 +6041,14 @@ def test_list_memberships_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "post_list_memberships" ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_list_memberships_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_list_memberships" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.ListMembershipsRequest.pb( membership.ListMembershipsRequest() ) @@ -6020,6 +6074,7 @@ def test_list_memberships_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.ListMembershipsResponse() + post_with_metadata.return_value = membership.ListMembershipsResponse(), metadata client.list_memberships( request, @@ -6031,6 +6086,7 @@ def test_list_memberships_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_membership_rest_bad_request(request_type=membership.GetMembershipRequest): @@ -6123,10 +6179,14 @@ def test_get_membership_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "post_get_membership" ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_get_membership_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_get_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.GetMembershipRequest.pb( membership.GetMembershipRequest() ) @@ -6150,6 +6210,7 @@ def test_get_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.Membership() + post_with_metadata.return_value = membership.Membership(), metadata client.get_membership( request, @@ -6161,6 +6222,7 @@ def test_get_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_membership_rest_bad_request( @@ -6372,10 +6434,14 @@ def test_create_membership_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "post_create_membership" ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_create_membership_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_create_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.CreateMembershipRequest.pb( membership.CreateMembershipRequest() ) @@ -6399,6 +6465,7 @@ def test_create_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_membership( request, @@ -6410,6 +6477,7 @@ def test_create_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_membership_rest_bad_request( @@ -6490,10 +6558,14 @@ def test_delete_membership_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "post_delete_membership" ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_delete_membership_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_delete_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.DeleteMembershipRequest.pb( membership.DeleteMembershipRequest() ) @@ -6517,6 +6589,7 @@ def test_delete_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_membership( request, @@ -6528,6 +6601,7 @@ def test_delete_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_membership_rest_bad_request( @@ -6739,10 +6813,14 @@ def test_update_membership_rest_interceptors(null_interceptor): ), mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "post_update_membership" ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_update_membership_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_update_membership" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.UpdateMembershipRequest.pb( membership.UpdateMembershipRequest() ) @@ -6766,6 +6844,7 @@ def test_update_membership_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_membership( request, @@ -6777,6 +6856,7 @@ def test_update_membership_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_connect_manifest_rest_bad_request( @@ -6859,11 +6939,15 @@ def test_generate_connect_manifest_rest_interceptors(null_interceptor): transports.GkeHubMembershipServiceRestInterceptor, "post_generate_connect_manifest", ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_generate_connect_manifest_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_generate_connect_manifest", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.GenerateConnectManifestRequest.pb( membership.GenerateConnectManifestRequest() ) @@ -6889,6 +6973,10 @@ def test_generate_connect_manifest_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.GenerateConnectManifestResponse() + post_with_metadata.return_value = ( + membership.GenerateConnectManifestResponse(), + metadata, + ) client.generate_connect_manifest( request, @@ -6900,6 +6988,7 @@ def test_generate_connect_manifest_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_exclusivity_rest_bad_request( @@ -6981,10 +7070,14 @@ def test_validate_exclusivity_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "post_validate_exclusivity" ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_validate_exclusivity_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_validate_exclusivity" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.ValidateExclusivityRequest.pb( membership.ValidateExclusivityRequest() ) @@ -7010,6 +7103,10 @@ def test_validate_exclusivity_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.ValidateExclusivityResponse() + post_with_metadata.return_value = ( + membership.ValidateExclusivityResponse(), + metadata, + ) client.validate_exclusivity( request, @@ -7021,6 +7118,7 @@ def test_validate_exclusivity_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_exclusivity_manifest_rest_bad_request( @@ -7108,11 +7206,15 @@ def test_generate_exclusivity_manifest_rest_interceptors(null_interceptor): transports.GkeHubMembershipServiceRestInterceptor, "post_generate_exclusivity_manifest", ) as post, mock.patch.object( + transports.GkeHubMembershipServiceRestInterceptor, + "post_generate_exclusivity_manifest_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.GkeHubMembershipServiceRestInterceptor, "pre_generate_exclusivity_manifest", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = membership.GenerateExclusivityManifestRequest.pb( membership.GenerateExclusivityManifestRequest() ) @@ -7138,6 +7240,10 @@ def test_generate_exclusivity_manifest_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = membership.GenerateExclusivityManifestResponse() + post_with_metadata.return_value = ( + membership.GenerateExclusivityManifestResponse(), + metadata, + ) client.generate_exclusivity_manifest( request, @@ -7149,6 +7255,7 @@ def test_generate_exclusivity_manifest_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-gke-multicloud/CHANGELOG.md b/packages/google-cloud-gke-multicloud/CHANGELOG.md index d5a675f23d8c..a968151de4f4 100644 --- a/packages/google-cloud-gke-multicloud/CHANGELOG.md +++ b/packages/google-cloud-gke-multicloud/CHANGELOG.md @@ -1,5 +1,33 @@ # Changelog +## [0.6.19](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.18...google-cloud-gke-multicloud-v0.6.19) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + +## [0.6.18](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.17...google-cloud-gke-multicloud-v0.6.18) (2025-01-13) + + +### Documentation + +* [google-cloud-gke-multicloud] fix comments of existing field ([#13417](https://github.com/googleapis/google-cloud-python/issues/13417)) ([3a9a8fb](https://github.com/googleapis/google-cloud-python/commit/3a9a8fb2be1304ff8d4593320236e1ea008ee696)) + +## [0.6.17](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.16...google-cloud-gke-multicloud-v0.6.17) (2025-01-02) + + +### Features + +* added support for optionally disabling built-in GKE metrics ([57232b6](https://github.com/googleapis/google-cloud-python/commit/57232b6b38c004c5136a8ad8051fa1f667d2353d)) +* added tag bindings support for Attached Clusters ([57232b6](https://github.com/googleapis/google-cloud-python/commit/57232b6b38c004c5136a8ad8051fa1f667d2353d)) + + +### Documentation + +* updated comments of existing fields ([57232b6](https://github.com/googleapis/google-cloud-python/commit/57232b6b38c004c5136a8ad8051fa1f667d2353d)) + ## [0.6.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gke-multicloud-v0.6.15...google-cloud-gke-multicloud-v0.6.16) (2024-12-12) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py index bd6a6e31b887..92ba1ed33ad7 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py @@ -168,6 +168,7 @@ ) from google.cloud.gke_multicloud_v1.types.common_resources import ( BinaryAuthorization, + CloudMonitoringConfig, Fleet, Jwk, LoggingComponentConfig, @@ -308,6 +309,7 @@ "UpdateAzureClusterRequest", "UpdateAzureNodePoolRequest", "BinaryAuthorization", + "CloudMonitoringConfig", "Fleet", "Jwk", "LoggingComponentConfig", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py index a22e7bbe7e4a..f5e2b06d8dfc 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.16" # {x-release-please-version} +__version__ = "0.6.19" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py index 88648899fa1c..64e8356572a3 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py @@ -156,6 +156,7 @@ ) from .types.common_resources import ( BinaryAuthorization, + CloudMonitoringConfig, Fleet, Jwk, LoggingComponentConfig, @@ -235,6 +236,7 @@ "AzureServicesAuthentication", "AzureSshConfig", "BinaryAuthorization", + "CloudMonitoringConfig", "CreateAttachedClusterRequest", "CreateAwsClusterRequest", "CreateAwsNodePoolRequest", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py index a22e7bbe7e4a..f5e2b06d8dfc 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.16" # {x-release-please-version} +__version__ = "0.6.19" # {x-release-please-version} diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py index b077a13a26cc..2c8b1c35160c 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/async_client.py @@ -568,6 +568,7 @@ async def sample_update_attached_cluster(): - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. - ``security_posture_config.vulnerability_mode`` + - ``monitoring_config.cloud_monitoring_config.enabled`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py index a03435a90514..3e0495aafae3 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -518,6 +520,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -972,6 +1001,7 @@ def sample_update_attached_cluster(): - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. - ``security_posture_config.vulnerability_mode`` + - ``monitoring_config.cloud_monitoring_config.enabled`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2017,16 +2047,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2072,16 +2106,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py index a0abb9a1531f..950b1abb1c49 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/attached_clusters/transports/rest.py @@ -167,12 +167,35 @@ def post_create_attached_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_attached_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_attached_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_create_attached_cluster` interceptor runs + before the `post_create_attached_cluster_with_metadata` interceptor. """ return response + def post_create_attached_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_attached_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_create_attached_cluster_with_metadata` + interceptor in new development instead of the `post_create_attached_cluster` interceptor. + When both interceptors are used, this `post_create_attached_cluster_with_metadata` interceptor runs after the + `post_create_attached_cluster` interceptor. The (possibly modified) response returned by + `post_create_attached_cluster` will be passed to + `post_create_attached_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_attached_cluster( self, request: attached_service.DeleteAttachedClusterRequest, @@ -193,12 +216,35 @@ def post_delete_attached_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_attached_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_attached_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_delete_attached_cluster` interceptor runs + before the `post_delete_attached_cluster_with_metadata` interceptor. """ return response + def post_delete_attached_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_attached_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_delete_attached_cluster_with_metadata` + interceptor in new development instead of the `post_delete_attached_cluster` interceptor. + When both interceptors are used, this `post_delete_attached_cluster_with_metadata` interceptor runs after the + `post_delete_attached_cluster` interceptor. The (possibly modified) response returned by + `post_delete_attached_cluster` will be passed to + `post_delete_attached_cluster_with_metadata`. + """ + return response, metadata + def pre_generate_attached_cluster_agent_token( self, request: attached_service.GenerateAttachedClusterAgentTokenRequest, @@ -219,12 +265,38 @@ def post_generate_attached_cluster_agent_token( ) -> attached_service.GenerateAttachedClusterAgentTokenResponse: """Post-rpc interceptor for generate_attached_cluster_agent_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_attached_cluster_agent_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_generate_attached_cluster_agent_token` interceptor runs + before the `post_generate_attached_cluster_agent_token_with_metadata` interceptor. """ return response + def post_generate_attached_cluster_agent_token_with_metadata( + self, + response: attached_service.GenerateAttachedClusterAgentTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attached_service.GenerateAttachedClusterAgentTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_attached_cluster_agent_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_generate_attached_cluster_agent_token_with_metadata` + interceptor in new development instead of the `post_generate_attached_cluster_agent_token` interceptor. + When both interceptors are used, this `post_generate_attached_cluster_agent_token_with_metadata` interceptor runs after the + `post_generate_attached_cluster_agent_token` interceptor. The (possibly modified) response returned by + `post_generate_attached_cluster_agent_token` will be passed to + `post_generate_attached_cluster_agent_token_with_metadata`. + """ + return response, metadata + def pre_generate_attached_cluster_install_manifest( self, request: attached_service.GenerateAttachedClusterInstallManifestRequest, @@ -245,12 +317,38 @@ def post_generate_attached_cluster_install_manifest( ) -> attached_service.GenerateAttachedClusterInstallManifestResponse: """Post-rpc interceptor for generate_attached_cluster_install_manifest - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_attached_cluster_install_manifest_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_generate_attached_cluster_install_manifest` interceptor runs + before the `post_generate_attached_cluster_install_manifest_with_metadata` interceptor. """ return response + def post_generate_attached_cluster_install_manifest_with_metadata( + self, + response: attached_service.GenerateAttachedClusterInstallManifestResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attached_service.GenerateAttachedClusterInstallManifestResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_attached_cluster_install_manifest + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_generate_attached_cluster_install_manifest_with_metadata` + interceptor in new development instead of the `post_generate_attached_cluster_install_manifest` interceptor. + When both interceptors are used, this `post_generate_attached_cluster_install_manifest_with_metadata` interceptor runs after the + `post_generate_attached_cluster_install_manifest` interceptor. The (possibly modified) response returned by + `post_generate_attached_cluster_install_manifest` will be passed to + `post_generate_attached_cluster_install_manifest_with_metadata`. + """ + return response, metadata + def pre_get_attached_cluster( self, request: attached_service.GetAttachedClusterRequest, @@ -271,12 +369,37 @@ def post_get_attached_cluster( ) -> attached_resources.AttachedCluster: """Post-rpc interceptor for get_attached_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attached_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_attached_cluster` interceptor runs + before the `post_get_attached_cluster_with_metadata` interceptor. """ return response + def post_get_attached_cluster_with_metadata( + self, + response: attached_resources.AttachedCluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attached_resources.AttachedCluster, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_attached_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_get_attached_cluster_with_metadata` + interceptor in new development instead of the `post_get_attached_cluster` interceptor. + When both interceptors are used, this `post_get_attached_cluster_with_metadata` interceptor runs after the + `post_get_attached_cluster` interceptor. The (possibly modified) response returned by + `post_get_attached_cluster` will be passed to + `post_get_attached_cluster_with_metadata`. + """ + return response, metadata + def pre_get_attached_server_config( self, request: attached_service.GetAttachedServerConfigRequest, @@ -297,12 +420,37 @@ def post_get_attached_server_config( ) -> attached_resources.AttachedServerConfig: """Post-rpc interceptor for get_attached_server_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_attached_server_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_attached_server_config` interceptor runs + before the `post_get_attached_server_config_with_metadata` interceptor. """ return response + def post_get_attached_server_config_with_metadata( + self, + response: attached_resources.AttachedServerConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attached_resources.AttachedServerConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_attached_server_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_get_attached_server_config_with_metadata` + interceptor in new development instead of the `post_get_attached_server_config` interceptor. + When both interceptors are used, this `post_get_attached_server_config_with_metadata` interceptor runs after the + `post_get_attached_server_config` interceptor. The (possibly modified) response returned by + `post_get_attached_server_config` will be passed to + `post_get_attached_server_config_with_metadata`. + """ + return response, metadata + def pre_import_attached_cluster( self, request: attached_service.ImportAttachedClusterRequest, @@ -323,12 +471,35 @@ def post_import_attached_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for import_attached_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_attached_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_import_attached_cluster` interceptor runs + before the `post_import_attached_cluster_with_metadata` interceptor. """ return response + def post_import_attached_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_attached_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_import_attached_cluster_with_metadata` + interceptor in new development instead of the `post_import_attached_cluster` interceptor. + When both interceptors are used, this `post_import_attached_cluster_with_metadata` interceptor runs after the + `post_import_attached_cluster` interceptor. The (possibly modified) response returned by + `post_import_attached_cluster` will be passed to + `post_import_attached_cluster_with_metadata`. + """ + return response, metadata + def pre_list_attached_clusters( self, request: attached_service.ListAttachedClustersRequest, @@ -349,12 +520,38 @@ def post_list_attached_clusters( ) -> attached_service.ListAttachedClustersResponse: """Post-rpc interceptor for list_attached_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_attached_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_list_attached_clusters` interceptor runs + before the `post_list_attached_clusters_with_metadata` interceptor. """ return response + def post_list_attached_clusters_with_metadata( + self, + response: attached_service.ListAttachedClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + attached_service.ListAttachedClustersResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_attached_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_list_attached_clusters_with_metadata` + interceptor in new development instead of the `post_list_attached_clusters` interceptor. + When both interceptors are used, this `post_list_attached_clusters_with_metadata` interceptor runs after the + `post_list_attached_clusters` interceptor. The (possibly modified) response returned by + `post_list_attached_clusters` will be passed to + `post_list_attached_clusters_with_metadata`. + """ + return response, metadata + def pre_update_attached_cluster( self, request: attached_service.UpdateAttachedClusterRequest, @@ -375,12 +572,35 @@ def post_update_attached_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_attached_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_attached_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AttachedClusters server but before - it is returned to user code. + it is returned to user code. This `post_update_attached_cluster` interceptor runs + before the `post_update_attached_cluster_with_metadata` interceptor. """ return response + def post_update_attached_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_attached_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AttachedClusters server but before it is returned to user code. + + We recommend only using this `post_update_attached_cluster_with_metadata` + interceptor in new development instead of the `post_update_attached_cluster` interceptor. + When both interceptors are used, this `post_update_attached_cluster_with_metadata` interceptor runs after the + `post_update_attached_cluster` interceptor. The (possibly modified) response returned by + `post_update_attached_cluster` will be passed to + `post_update_attached_cluster_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -754,6 +974,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_attached_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_attached_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -901,6 +1125,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_attached_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_attached_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1058,6 +1286,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_attached_cluster_agent_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_attached_cluster_agent_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1218,6 +1453,13 @@ def __call__( resp = self._interceptor.post_generate_attached_cluster_install_manifest( resp ) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_attached_cluster_install_manifest_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1366,6 +1608,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attached_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attached_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1518,6 +1764,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_attached_server_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_attached_server_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1673,6 +1923,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_attached_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_attached_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1821,6 +2075,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_attached_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_attached_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1976,6 +2234,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_attached_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_attached_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py index 76659710f47f..da147a019a06 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -542,6 +544,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2891,16 +2920,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2946,16 +2979,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py index 85da61b76edd..a7a1c3ab23e1 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/transports/rest.py @@ -222,12 +222,35 @@ def post_create_aws_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_aws_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_aws_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_create_aws_cluster` interceptor runs + before the `post_create_aws_cluster_with_metadata` interceptor. """ return response + def post_create_aws_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_aws_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_create_aws_cluster_with_metadata` + interceptor in new development instead of the `post_create_aws_cluster` interceptor. + When both interceptors are used, this `post_create_aws_cluster_with_metadata` interceptor runs after the + `post_create_aws_cluster` interceptor. The (possibly modified) response returned by + `post_create_aws_cluster` will be passed to + `post_create_aws_cluster_with_metadata`. + """ + return response, metadata + def pre_create_aws_node_pool( self, request: aws_service.CreateAwsNodePoolRequest, @@ -247,12 +270,35 @@ def post_create_aws_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_aws_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_aws_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_create_aws_node_pool` interceptor runs + before the `post_create_aws_node_pool_with_metadata` interceptor. """ return response + def post_create_aws_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_aws_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_create_aws_node_pool_with_metadata` + interceptor in new development instead of the `post_create_aws_node_pool` interceptor. + When both interceptors are used, this `post_create_aws_node_pool_with_metadata` interceptor runs after the + `post_create_aws_node_pool` interceptor. The (possibly modified) response returned by + `post_create_aws_node_pool` will be passed to + `post_create_aws_node_pool_with_metadata`. + """ + return response, metadata + def pre_delete_aws_cluster( self, request: aws_service.DeleteAwsClusterRequest, @@ -272,12 +318,35 @@ def post_delete_aws_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_aws_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_aws_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_delete_aws_cluster` interceptor runs + before the `post_delete_aws_cluster_with_metadata` interceptor. """ return response + def post_delete_aws_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_aws_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_delete_aws_cluster_with_metadata` + interceptor in new development instead of the `post_delete_aws_cluster` interceptor. + When both interceptors are used, this `post_delete_aws_cluster_with_metadata` interceptor runs after the + `post_delete_aws_cluster` interceptor. The (possibly modified) response returned by + `post_delete_aws_cluster` will be passed to + `post_delete_aws_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_aws_node_pool( self, request: aws_service.DeleteAwsNodePoolRequest, @@ -297,12 +366,35 @@ def post_delete_aws_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_aws_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_aws_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_delete_aws_node_pool` interceptor runs + before the `post_delete_aws_node_pool_with_metadata` interceptor. """ return response + def post_delete_aws_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_aws_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_delete_aws_node_pool_with_metadata` + interceptor in new development instead of the `post_delete_aws_node_pool` interceptor. + When both interceptors are used, this `post_delete_aws_node_pool_with_metadata` interceptor runs after the + `post_delete_aws_node_pool` interceptor. The (possibly modified) response returned by + `post_delete_aws_node_pool` will be passed to + `post_delete_aws_node_pool_with_metadata`. + """ + return response, metadata + def pre_generate_aws_access_token( self, request: aws_service.GenerateAwsAccessTokenRequest, @@ -323,12 +415,38 @@ def post_generate_aws_access_token( ) -> aws_service.GenerateAwsAccessTokenResponse: """Post-rpc interceptor for generate_aws_access_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_aws_access_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_generate_aws_access_token` interceptor runs + before the `post_generate_aws_access_token_with_metadata` interceptor. """ return response + def post_generate_aws_access_token_with_metadata( + self, + response: aws_service.GenerateAwsAccessTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + aws_service.GenerateAwsAccessTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_aws_access_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_generate_aws_access_token_with_metadata` + interceptor in new development instead of the `post_generate_aws_access_token` interceptor. + When both interceptors are used, this `post_generate_aws_access_token_with_metadata` interceptor runs after the + `post_generate_aws_access_token` interceptor. The (possibly modified) response returned by + `post_generate_aws_access_token` will be passed to + `post_generate_aws_access_token_with_metadata`. + """ + return response, metadata + def pre_generate_aws_cluster_agent_token( self, request: aws_service.GenerateAwsClusterAgentTokenRequest, @@ -349,12 +467,38 @@ def post_generate_aws_cluster_agent_token( ) -> aws_service.GenerateAwsClusterAgentTokenResponse: """Post-rpc interceptor for generate_aws_cluster_agent_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_aws_cluster_agent_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_generate_aws_cluster_agent_token` interceptor runs + before the `post_generate_aws_cluster_agent_token_with_metadata` interceptor. """ return response + def post_generate_aws_cluster_agent_token_with_metadata( + self, + response: aws_service.GenerateAwsClusterAgentTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + aws_service.GenerateAwsClusterAgentTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_aws_cluster_agent_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_generate_aws_cluster_agent_token_with_metadata` + interceptor in new development instead of the `post_generate_aws_cluster_agent_token` interceptor. + When both interceptors are used, this `post_generate_aws_cluster_agent_token_with_metadata` interceptor runs after the + `post_generate_aws_cluster_agent_token` interceptor. The (possibly modified) response returned by + `post_generate_aws_cluster_agent_token` will be passed to + `post_generate_aws_cluster_agent_token_with_metadata`. + """ + return response, metadata + def pre_get_aws_cluster( self, request: aws_service.GetAwsClusterRequest, @@ -374,12 +518,35 @@ def post_get_aws_cluster( ) -> aws_resources.AwsCluster: """Post-rpc interceptor for get_aws_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_aws_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_aws_cluster` interceptor runs + before the `post_get_aws_cluster_with_metadata` interceptor. """ return response + def post_get_aws_cluster_with_metadata( + self, + response: aws_resources.AwsCluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[aws_resources.AwsCluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_aws_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_get_aws_cluster_with_metadata` + interceptor in new development instead of the `post_get_aws_cluster` interceptor. + When both interceptors are used, this `post_get_aws_cluster_with_metadata` interceptor runs after the + `post_get_aws_cluster` interceptor. The (possibly modified) response returned by + `post_get_aws_cluster` will be passed to + `post_get_aws_cluster_with_metadata`. + """ + return response, metadata + def pre_get_aws_json_web_keys( self, request: aws_service.GetAwsJsonWebKeysRequest, @@ -399,12 +566,35 @@ def post_get_aws_json_web_keys( ) -> aws_resources.AwsJsonWebKeys: """Post-rpc interceptor for get_aws_json_web_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_aws_json_web_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_aws_json_web_keys` interceptor runs + before the `post_get_aws_json_web_keys_with_metadata` interceptor. """ return response + def post_get_aws_json_web_keys_with_metadata( + self, + response: aws_resources.AwsJsonWebKeys, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[aws_resources.AwsJsonWebKeys, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_aws_json_web_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_get_aws_json_web_keys_with_metadata` + interceptor in new development instead of the `post_get_aws_json_web_keys` interceptor. + When both interceptors are used, this `post_get_aws_json_web_keys_with_metadata` interceptor runs after the + `post_get_aws_json_web_keys` interceptor. The (possibly modified) response returned by + `post_get_aws_json_web_keys` will be passed to + `post_get_aws_json_web_keys_with_metadata`. + """ + return response, metadata + def pre_get_aws_node_pool( self, request: aws_service.GetAwsNodePoolRequest, @@ -424,12 +614,35 @@ def post_get_aws_node_pool( ) -> aws_resources.AwsNodePool: """Post-rpc interceptor for get_aws_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_aws_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_aws_node_pool` interceptor runs + before the `post_get_aws_node_pool_with_metadata` interceptor. """ return response + def post_get_aws_node_pool_with_metadata( + self, + response: aws_resources.AwsNodePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[aws_resources.AwsNodePool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_aws_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_get_aws_node_pool_with_metadata` + interceptor in new development instead of the `post_get_aws_node_pool` interceptor. + When both interceptors are used, this `post_get_aws_node_pool_with_metadata` interceptor runs after the + `post_get_aws_node_pool` interceptor. The (possibly modified) response returned by + `post_get_aws_node_pool` will be passed to + `post_get_aws_node_pool_with_metadata`. + """ + return response, metadata + def pre_get_aws_open_id_config( self, request: aws_service.GetAwsOpenIdConfigRequest, @@ -449,12 +662,35 @@ def post_get_aws_open_id_config( ) -> aws_resources.AwsOpenIdConfig: """Post-rpc interceptor for get_aws_open_id_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_aws_open_id_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_aws_open_id_config` interceptor runs + before the `post_get_aws_open_id_config_with_metadata` interceptor. """ return response + def post_get_aws_open_id_config_with_metadata( + self, + response: aws_resources.AwsOpenIdConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[aws_resources.AwsOpenIdConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_aws_open_id_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_get_aws_open_id_config_with_metadata` + interceptor in new development instead of the `post_get_aws_open_id_config` interceptor. + When both interceptors are used, this `post_get_aws_open_id_config_with_metadata` interceptor runs after the + `post_get_aws_open_id_config` interceptor. The (possibly modified) response returned by + `post_get_aws_open_id_config` will be passed to + `post_get_aws_open_id_config_with_metadata`. + """ + return response, metadata + def pre_get_aws_server_config( self, request: aws_service.GetAwsServerConfigRequest, @@ -474,12 +710,35 @@ def post_get_aws_server_config( ) -> aws_resources.AwsServerConfig: """Post-rpc interceptor for get_aws_server_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_aws_server_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_aws_server_config` interceptor runs + before the `post_get_aws_server_config_with_metadata` interceptor. """ return response + def post_get_aws_server_config_with_metadata( + self, + response: aws_resources.AwsServerConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[aws_resources.AwsServerConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_aws_server_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_get_aws_server_config_with_metadata` + interceptor in new development instead of the `post_get_aws_server_config` interceptor. + When both interceptors are used, this `post_get_aws_server_config_with_metadata` interceptor runs after the + `post_get_aws_server_config` interceptor. The (possibly modified) response returned by + `post_get_aws_server_config` will be passed to + `post_get_aws_server_config_with_metadata`. + """ + return response, metadata + def pre_list_aws_clusters( self, request: aws_service.ListAwsClustersRequest, @@ -499,12 +758,37 @@ def post_list_aws_clusters( ) -> aws_service.ListAwsClustersResponse: """Post-rpc interceptor for list_aws_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_aws_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_list_aws_clusters` interceptor runs + before the `post_list_aws_clusters_with_metadata` interceptor. """ return response + def post_list_aws_clusters_with_metadata( + self, + response: aws_service.ListAwsClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + aws_service.ListAwsClustersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_aws_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_list_aws_clusters_with_metadata` + interceptor in new development instead of the `post_list_aws_clusters` interceptor. + When both interceptors are used, this `post_list_aws_clusters_with_metadata` interceptor runs after the + `post_list_aws_clusters` interceptor. The (possibly modified) response returned by + `post_list_aws_clusters` will be passed to + `post_list_aws_clusters_with_metadata`. + """ + return response, metadata + def pre_list_aws_node_pools( self, request: aws_service.ListAwsNodePoolsRequest, @@ -524,12 +808,37 @@ def post_list_aws_node_pools( ) -> aws_service.ListAwsNodePoolsResponse: """Post-rpc interceptor for list_aws_node_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_aws_node_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_list_aws_node_pools` interceptor runs + before the `post_list_aws_node_pools_with_metadata` interceptor. """ return response + def post_list_aws_node_pools_with_metadata( + self, + response: aws_service.ListAwsNodePoolsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + aws_service.ListAwsNodePoolsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_aws_node_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_list_aws_node_pools_with_metadata` + interceptor in new development instead of the `post_list_aws_node_pools` interceptor. + When both interceptors are used, this `post_list_aws_node_pools_with_metadata` interceptor runs after the + `post_list_aws_node_pools` interceptor. The (possibly modified) response returned by + `post_list_aws_node_pools` will be passed to + `post_list_aws_node_pools_with_metadata`. + """ + return response, metadata + def pre_rollback_aws_node_pool_update( self, request: aws_service.RollbackAwsNodePoolUpdateRequest, @@ -550,12 +859,35 @@ def post_rollback_aws_node_pool_update( ) -> operations_pb2.Operation: """Post-rpc interceptor for rollback_aws_node_pool_update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_aws_node_pool_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_rollback_aws_node_pool_update` interceptor runs + before the `post_rollback_aws_node_pool_update_with_metadata` interceptor. """ return response + def post_rollback_aws_node_pool_update_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback_aws_node_pool_update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_rollback_aws_node_pool_update_with_metadata` + interceptor in new development instead of the `post_rollback_aws_node_pool_update` interceptor. + When both interceptors are used, this `post_rollback_aws_node_pool_update_with_metadata` interceptor runs after the + `post_rollback_aws_node_pool_update` interceptor. The (possibly modified) response returned by + `post_rollback_aws_node_pool_update` will be passed to + `post_rollback_aws_node_pool_update_with_metadata`. + """ + return response, metadata + def pre_update_aws_cluster( self, request: aws_service.UpdateAwsClusterRequest, @@ -575,12 +907,35 @@ def post_update_aws_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_aws_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_aws_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_update_aws_cluster` interceptor runs + before the `post_update_aws_cluster_with_metadata` interceptor. """ return response + def post_update_aws_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_aws_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_update_aws_cluster_with_metadata` + interceptor in new development instead of the `post_update_aws_cluster` interceptor. + When both interceptors are used, this `post_update_aws_cluster_with_metadata` interceptor runs after the + `post_update_aws_cluster` interceptor. The (possibly modified) response returned by + `post_update_aws_cluster` will be passed to + `post_update_aws_cluster_with_metadata`. + """ + return response, metadata + def pre_update_aws_node_pool( self, request: aws_service.UpdateAwsNodePoolRequest, @@ -600,12 +955,35 @@ def post_update_aws_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_aws_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_aws_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AwsClusters server but before - it is returned to user code. + it is returned to user code. This `post_update_aws_node_pool` interceptor runs + before the `post_update_aws_node_pool_with_metadata` interceptor. """ return response + def post_update_aws_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_aws_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AwsClusters server but before it is returned to user code. + + We recommend only using this `post_update_aws_node_pool_with_metadata` + interceptor in new development instead of the `post_update_aws_node_pool` interceptor. + When both interceptors are used, this `post_update_aws_node_pool_with_metadata` interceptor runs after the + `post_update_aws_node_pool` interceptor. The (possibly modified) response returned by + `post_update_aws_node_pool` will be passed to + `post_update_aws_node_pool_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -976,6 +1354,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_aws_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_aws_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1126,6 +1508,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_aws_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_aws_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1270,6 +1656,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_aws_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_aws_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1414,6 +1804,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_aws_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_aws_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1559,6 +1953,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_aws_access_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_aws_access_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1713,6 +2111,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_aws_cluster_agent_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_aws_cluster_agent_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1860,6 +2265,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_aws_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_aws_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2008,6 +2417,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_aws_json_web_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_aws_json_web_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2151,6 +2564,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_aws_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_aws_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2300,6 +2717,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_aws_open_id_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_aws_open_id_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2445,6 +2866,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_aws_server_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_aws_server_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2590,6 +3015,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_aws_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_aws_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2737,6 +3166,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_aws_node_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_aws_node_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2893,6 +3326,13 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_rollback_aws_node_pool_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_rollback_aws_node_pool_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3043,6 +3483,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_aws_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_aws_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3193,6 +3637,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_aws_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_aws_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py index dff3306181ee..bbe92d996a0f 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -562,6 +564,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3387,16 +3416,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3442,16 +3475,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py index 24c6888187ee..aaae045fe490 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/transports/rest.py @@ -246,12 +246,35 @@ def post_create_azure_client( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_azure_client - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_azure_client_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_create_azure_client` interceptor runs + before the `post_create_azure_client_with_metadata` interceptor. """ return response + def post_create_azure_client_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_azure_client + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_create_azure_client_with_metadata` + interceptor in new development instead of the `post_create_azure_client` interceptor. + When both interceptors are used, this `post_create_azure_client_with_metadata` interceptor runs after the + `post_create_azure_client` interceptor. The (possibly modified) response returned by + `post_create_azure_client` will be passed to + `post_create_azure_client_with_metadata`. + """ + return response, metadata + def pre_create_azure_cluster( self, request: azure_service.CreateAzureClusterRequest, @@ -271,12 +294,35 @@ def post_create_azure_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_azure_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_azure_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_create_azure_cluster` interceptor runs + before the `post_create_azure_cluster_with_metadata` interceptor. """ return response + def post_create_azure_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_azure_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_create_azure_cluster_with_metadata` + interceptor in new development instead of the `post_create_azure_cluster` interceptor. + When both interceptors are used, this `post_create_azure_cluster_with_metadata` interceptor runs after the + `post_create_azure_cluster` interceptor. The (possibly modified) response returned by + `post_create_azure_cluster` will be passed to + `post_create_azure_cluster_with_metadata`. + """ + return response, metadata + def pre_create_azure_node_pool( self, request: azure_service.CreateAzureNodePoolRequest, @@ -297,12 +343,35 @@ def post_create_azure_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_azure_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_azure_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_create_azure_node_pool` interceptor runs + before the `post_create_azure_node_pool_with_metadata` interceptor. """ return response + def post_create_azure_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_azure_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_create_azure_node_pool_with_metadata` + interceptor in new development instead of the `post_create_azure_node_pool` interceptor. + When both interceptors are used, this `post_create_azure_node_pool_with_metadata` interceptor runs after the + `post_create_azure_node_pool` interceptor. The (possibly modified) response returned by + `post_create_azure_node_pool` will be passed to + `post_create_azure_node_pool_with_metadata`. + """ + return response, metadata + def pre_delete_azure_client( self, request: azure_service.DeleteAzureClientRequest, @@ -322,12 +391,35 @@ def post_delete_azure_client( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_azure_client - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_azure_client_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_delete_azure_client` interceptor runs + before the `post_delete_azure_client_with_metadata` interceptor. """ return response + def post_delete_azure_client_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_azure_client + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_delete_azure_client_with_metadata` + interceptor in new development instead of the `post_delete_azure_client` interceptor. + When both interceptors are used, this `post_delete_azure_client_with_metadata` interceptor runs after the + `post_delete_azure_client` interceptor. The (possibly modified) response returned by + `post_delete_azure_client` will be passed to + `post_delete_azure_client_with_metadata`. + """ + return response, metadata + def pre_delete_azure_cluster( self, request: azure_service.DeleteAzureClusterRequest, @@ -347,12 +439,35 @@ def post_delete_azure_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_azure_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_azure_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_delete_azure_cluster` interceptor runs + before the `post_delete_azure_cluster_with_metadata` interceptor. """ return response + def post_delete_azure_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_azure_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_delete_azure_cluster_with_metadata` + interceptor in new development instead of the `post_delete_azure_cluster` interceptor. + When both interceptors are used, this `post_delete_azure_cluster_with_metadata` interceptor runs after the + `post_delete_azure_cluster` interceptor. The (possibly modified) response returned by + `post_delete_azure_cluster` will be passed to + `post_delete_azure_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_azure_node_pool( self, request: azure_service.DeleteAzureNodePoolRequest, @@ -373,12 +488,35 @@ def post_delete_azure_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_azure_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_azure_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_delete_azure_node_pool` interceptor runs + before the `post_delete_azure_node_pool_with_metadata` interceptor. """ return response + def post_delete_azure_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_azure_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_delete_azure_node_pool_with_metadata` + interceptor in new development instead of the `post_delete_azure_node_pool` interceptor. + When both interceptors are used, this `post_delete_azure_node_pool_with_metadata` interceptor runs after the + `post_delete_azure_node_pool` interceptor. The (possibly modified) response returned by + `post_delete_azure_node_pool` will be passed to + `post_delete_azure_node_pool_with_metadata`. + """ + return response, metadata + def pre_generate_azure_access_token( self, request: azure_service.GenerateAzureAccessTokenRequest, @@ -399,12 +537,38 @@ def post_generate_azure_access_token( ) -> azure_service.GenerateAzureAccessTokenResponse: """Post-rpc interceptor for generate_azure_access_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_azure_access_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_generate_azure_access_token` interceptor runs + before the `post_generate_azure_access_token_with_metadata` interceptor. """ return response + def post_generate_azure_access_token_with_metadata( + self, + response: azure_service.GenerateAzureAccessTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_service.GenerateAzureAccessTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_azure_access_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_generate_azure_access_token_with_metadata` + interceptor in new development instead of the `post_generate_azure_access_token` interceptor. + When both interceptors are used, this `post_generate_azure_access_token_with_metadata` interceptor runs after the + `post_generate_azure_access_token` interceptor. The (possibly modified) response returned by + `post_generate_azure_access_token` will be passed to + `post_generate_azure_access_token_with_metadata`. + """ + return response, metadata + def pre_generate_azure_cluster_agent_token( self, request: azure_service.GenerateAzureClusterAgentTokenRequest, @@ -425,12 +589,38 @@ def post_generate_azure_cluster_agent_token( ) -> azure_service.GenerateAzureClusterAgentTokenResponse: """Post-rpc interceptor for generate_azure_cluster_agent_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_azure_cluster_agent_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_generate_azure_cluster_agent_token` interceptor runs + before the `post_generate_azure_cluster_agent_token_with_metadata` interceptor. """ return response + def post_generate_azure_cluster_agent_token_with_metadata( + self, + response: azure_service.GenerateAzureClusterAgentTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_service.GenerateAzureClusterAgentTokenResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for generate_azure_cluster_agent_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_generate_azure_cluster_agent_token_with_metadata` + interceptor in new development instead of the `post_generate_azure_cluster_agent_token` interceptor. + When both interceptors are used, this `post_generate_azure_cluster_agent_token_with_metadata` interceptor runs after the + `post_generate_azure_cluster_agent_token` interceptor. The (possibly modified) response returned by + `post_generate_azure_cluster_agent_token` will be passed to + `post_generate_azure_cluster_agent_token_with_metadata`. + """ + return response, metadata + def pre_get_azure_client( self, request: azure_service.GetAzureClientRequest, @@ -450,12 +640,35 @@ def post_get_azure_client( ) -> azure_resources.AzureClient: """Post-rpc interceptor for get_azure_client - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_azure_client_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_azure_client` interceptor runs + before the `post_get_azure_client_with_metadata` interceptor. """ return response + def post_get_azure_client_with_metadata( + self, + response: azure_resources.AzureClient, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[azure_resources.AzureClient, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_azure_client + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_get_azure_client_with_metadata` + interceptor in new development instead of the `post_get_azure_client` interceptor. + When both interceptors are used, this `post_get_azure_client_with_metadata` interceptor runs after the + `post_get_azure_client` interceptor. The (possibly modified) response returned by + `post_get_azure_client` will be passed to + `post_get_azure_client_with_metadata`. + """ + return response, metadata + def pre_get_azure_cluster( self, request: azure_service.GetAzureClusterRequest, @@ -475,12 +688,35 @@ def post_get_azure_cluster( ) -> azure_resources.AzureCluster: """Post-rpc interceptor for get_azure_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_azure_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_azure_cluster` interceptor runs + before the `post_get_azure_cluster_with_metadata` interceptor. """ return response + def post_get_azure_cluster_with_metadata( + self, + response: azure_resources.AzureCluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[azure_resources.AzureCluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_azure_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_get_azure_cluster_with_metadata` + interceptor in new development instead of the `post_get_azure_cluster` interceptor. + When both interceptors are used, this `post_get_azure_cluster_with_metadata` interceptor runs after the + `post_get_azure_cluster` interceptor. The (possibly modified) response returned by + `post_get_azure_cluster` will be passed to + `post_get_azure_cluster_with_metadata`. + """ + return response, metadata + def pre_get_azure_json_web_keys( self, request: azure_service.GetAzureJsonWebKeysRequest, @@ -501,12 +737,37 @@ def post_get_azure_json_web_keys( ) -> azure_resources.AzureJsonWebKeys: """Post-rpc interceptor for get_azure_json_web_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_azure_json_web_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_azure_json_web_keys` interceptor runs + before the `post_get_azure_json_web_keys_with_metadata` interceptor. """ return response + def post_get_azure_json_web_keys_with_metadata( + self, + response: azure_resources.AzureJsonWebKeys, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_resources.AzureJsonWebKeys, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_azure_json_web_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_get_azure_json_web_keys_with_metadata` + interceptor in new development instead of the `post_get_azure_json_web_keys` interceptor. + When both interceptors are used, this `post_get_azure_json_web_keys_with_metadata` interceptor runs after the + `post_get_azure_json_web_keys` interceptor. The (possibly modified) response returned by + `post_get_azure_json_web_keys` will be passed to + `post_get_azure_json_web_keys_with_metadata`. + """ + return response, metadata + def pre_get_azure_node_pool( self, request: azure_service.GetAzureNodePoolRequest, @@ -526,12 +787,35 @@ def post_get_azure_node_pool( ) -> azure_resources.AzureNodePool: """Post-rpc interceptor for get_azure_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_azure_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_azure_node_pool` interceptor runs + before the `post_get_azure_node_pool_with_metadata` interceptor. """ return response + def post_get_azure_node_pool_with_metadata( + self, + response: azure_resources.AzureNodePool, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[azure_resources.AzureNodePool, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_azure_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_get_azure_node_pool_with_metadata` + interceptor in new development instead of the `post_get_azure_node_pool` interceptor. + When both interceptors are used, this `post_get_azure_node_pool_with_metadata` interceptor runs after the + `post_get_azure_node_pool` interceptor. The (possibly modified) response returned by + `post_get_azure_node_pool` will be passed to + `post_get_azure_node_pool_with_metadata`. + """ + return response, metadata + def pre_get_azure_open_id_config( self, request: azure_service.GetAzureOpenIdConfigRequest, @@ -552,12 +836,37 @@ def post_get_azure_open_id_config( ) -> azure_resources.AzureOpenIdConfig: """Post-rpc interceptor for get_azure_open_id_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_azure_open_id_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_azure_open_id_config` interceptor runs + before the `post_get_azure_open_id_config_with_metadata` interceptor. """ return response + def post_get_azure_open_id_config_with_metadata( + self, + response: azure_resources.AzureOpenIdConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_resources.AzureOpenIdConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_azure_open_id_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_get_azure_open_id_config_with_metadata` + interceptor in new development instead of the `post_get_azure_open_id_config` interceptor. + When both interceptors are used, this `post_get_azure_open_id_config_with_metadata` interceptor runs after the + `post_get_azure_open_id_config` interceptor. The (possibly modified) response returned by + `post_get_azure_open_id_config` will be passed to + `post_get_azure_open_id_config_with_metadata`. + """ + return response, metadata + def pre_get_azure_server_config( self, request: azure_service.GetAzureServerConfigRequest, @@ -578,12 +887,37 @@ def post_get_azure_server_config( ) -> azure_resources.AzureServerConfig: """Post-rpc interceptor for get_azure_server_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_azure_server_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_get_azure_server_config` interceptor runs + before the `post_get_azure_server_config_with_metadata` interceptor. """ return response + def post_get_azure_server_config_with_metadata( + self, + response: azure_resources.AzureServerConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_resources.AzureServerConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_azure_server_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_get_azure_server_config_with_metadata` + interceptor in new development instead of the `post_get_azure_server_config` interceptor. + When both interceptors are used, this `post_get_azure_server_config_with_metadata` interceptor runs after the + `post_get_azure_server_config` interceptor. The (possibly modified) response returned by + `post_get_azure_server_config` will be passed to + `post_get_azure_server_config_with_metadata`. + """ + return response, metadata + def pre_list_azure_clients( self, request: azure_service.ListAzureClientsRequest, @@ -603,12 +937,37 @@ def post_list_azure_clients( ) -> azure_service.ListAzureClientsResponse: """Post-rpc interceptor for list_azure_clients - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_azure_clients_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_list_azure_clients` interceptor runs + before the `post_list_azure_clients_with_metadata` interceptor. """ return response + def post_list_azure_clients_with_metadata( + self, + response: azure_service.ListAzureClientsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_service.ListAzureClientsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_azure_clients + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_list_azure_clients_with_metadata` + interceptor in new development instead of the `post_list_azure_clients` interceptor. + When both interceptors are used, this `post_list_azure_clients_with_metadata` interceptor runs after the + `post_list_azure_clients` interceptor. The (possibly modified) response returned by + `post_list_azure_clients` will be passed to + `post_list_azure_clients_with_metadata`. + """ + return response, metadata + def pre_list_azure_clusters( self, request: azure_service.ListAzureClustersRequest, @@ -628,12 +987,37 @@ def post_list_azure_clusters( ) -> azure_service.ListAzureClustersResponse: """Post-rpc interceptor for list_azure_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_azure_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_list_azure_clusters` interceptor runs + before the `post_list_azure_clusters_with_metadata` interceptor. """ return response + def post_list_azure_clusters_with_metadata( + self, + response: azure_service.ListAzureClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_service.ListAzureClustersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_azure_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_list_azure_clusters_with_metadata` + interceptor in new development instead of the `post_list_azure_clusters` interceptor. + When both interceptors are used, this `post_list_azure_clusters_with_metadata` interceptor runs after the + `post_list_azure_clusters` interceptor. The (possibly modified) response returned by + `post_list_azure_clusters` will be passed to + `post_list_azure_clusters_with_metadata`. + """ + return response, metadata + def pre_list_azure_node_pools( self, request: azure_service.ListAzureNodePoolsRequest, @@ -653,12 +1037,38 @@ def post_list_azure_node_pools( ) -> azure_service.ListAzureNodePoolsResponse: """Post-rpc interceptor for list_azure_node_pools - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_azure_node_pools_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_list_azure_node_pools` interceptor runs + before the `post_list_azure_node_pools_with_metadata` interceptor. """ return response + def post_list_azure_node_pools_with_metadata( + self, + response: azure_service.ListAzureNodePoolsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + azure_service.ListAzureNodePoolsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_azure_node_pools + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_list_azure_node_pools_with_metadata` + interceptor in new development instead of the `post_list_azure_node_pools` interceptor. + When both interceptors are used, this `post_list_azure_node_pools_with_metadata` interceptor runs after the + `post_list_azure_node_pools` interceptor. The (possibly modified) response returned by + `post_list_azure_node_pools` will be passed to + `post_list_azure_node_pools_with_metadata`. + """ + return response, metadata + def pre_update_azure_cluster( self, request: azure_service.UpdateAzureClusterRequest, @@ -678,12 +1088,35 @@ def post_update_azure_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_azure_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_azure_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_update_azure_cluster` interceptor runs + before the `post_update_azure_cluster_with_metadata` interceptor. """ return response + def post_update_azure_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_azure_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_update_azure_cluster_with_metadata` + interceptor in new development instead of the `post_update_azure_cluster` interceptor. + When both interceptors are used, this `post_update_azure_cluster_with_metadata` interceptor runs after the + `post_update_azure_cluster` interceptor. The (possibly modified) response returned by + `post_update_azure_cluster` will be passed to + `post_update_azure_cluster_with_metadata`. + """ + return response, metadata + def pre_update_azure_node_pool( self, request: azure_service.UpdateAzureNodePoolRequest, @@ -704,12 +1137,35 @@ def post_update_azure_node_pool( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_azure_node_pool - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_azure_node_pool_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AzureClusters server but before - it is returned to user code. + it is returned to user code. This `post_update_azure_node_pool` interceptor runs + before the `post_update_azure_node_pool_with_metadata` interceptor. """ return response + def post_update_azure_node_pool_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_azure_node_pool + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AzureClusters server but before it is returned to user code. + + We recommend only using this `post_update_azure_node_pool_with_metadata` + interceptor in new development instead of the `post_update_azure_node_pool` interceptor. + When both interceptors are used, this `post_update_azure_node_pool_with_metadata` interceptor runs after the + `post_update_azure_node_pool` interceptor. The (possibly modified) response returned by + `post_update_azure_node_pool` will be passed to + `post_update_azure_node_pool_with_metadata`. + """ + return response, metadata + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, @@ -1080,6 +1536,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_azure_client(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_azure_client_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1230,6 +1690,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_azure_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_azure_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1380,6 +1844,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_azure_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_azure_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1524,6 +1992,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_azure_client(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_azure_client_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1668,6 +2140,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_azure_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_azure_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1812,6 +2288,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_azure_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_azure_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1961,6 +2441,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_azure_access_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_azure_access_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2116,6 +2600,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_azure_cluster_agent_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_generate_azure_cluster_agent_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2279,6 +2770,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_azure_client(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_azure_client_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2422,6 +2917,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_azure_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_azure_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2570,6 +3069,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_azure_json_web_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_azure_json_web_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2715,6 +3218,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_azure_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_azure_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2864,6 +3371,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_azure_open_id_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_azure_open_id_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3013,6 +3524,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_azure_server_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_azure_server_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3160,6 +3675,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_azure_clients(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_azure_clients_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3307,6 +3826,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_azure_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_azure_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3454,6 +3977,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_azure_node_pools(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_azure_node_pools_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3606,6 +4133,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_azure_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_azure_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3756,6 +4287,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_azure_node_pool(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_azure_node_pool_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py index 1f7c9d3c38e8..5c8f43585423 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py @@ -145,6 +145,7 @@ ) from .common_resources import ( BinaryAuthorization, + CloudMonitoringConfig, Fleet, Jwk, LoggingComponentConfig, @@ -279,6 +280,7 @@ "UpdateAzureClusterRequest", "UpdateAzureNodePoolRequest", "BinaryAuthorization", + "CloudMonitoringConfig", "Fleet", "Jwk", "LoggingComponentConfig", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py index de4ea8f3bddc..72960a4d07c8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py @@ -137,6 +137,22 @@ class AttachedCluster(proto.Message): security_posture_config (google.cloud.gke_multicloud_v1.types.SecurityPostureConfig): Optional. Security Posture configuration for this cluster. + tags (MutableMapping[str, str]): + Optional. Input only. Tag keys/values directly bound to this + resource. + + Tag key must be specified in the format / where the tag + namespace is the ID of the organization or name of the + project that the tag key is defined in. The short name of a + tag key or value can have a maximum length of 256 + characters. The permitted character set for the short name + includes UTF-8 encoded Unicode characters except single + quotes ('), double quotes ("), backslashes (), and forward + slashes (/). + + See + `Tags `__ + for more details on Google Cloud Platform tags. """ class State(proto.Enum): @@ -280,6 +296,11 @@ class State(proto.Enum): number=26, message=common_resources.SecurityPostureConfig, ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=27, + ) class AttachedClustersAuthorization(proto.Message): @@ -449,6 +470,10 @@ class AttachedProxyConfig(proto.Message): the HTTP(S) proxy configuration. The secret must be a JSON encoded proxy configuration as described in + https://cloud.google.com/kubernetes-engine/multi-cloud/docs/attached/eks/how-to/use-a-proxy#configure-proxy-support + for EKS clusters and + https://cloud.google.com/kubernetes-engine/multi-cloud/docs/attached/aks/how-to/use-a-proxy#configure-proxy-support + for AKS clusters. """ kubernetes_secret: "KubernetesSecret" = proto.Field( diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py index e0791affdff6..83b8dabe186d 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_service.py @@ -273,6 +273,7 @@ class UpdateAttachedClusterRequest(proto.Message): - ``proxy_config.kubernetes_secret.name``. - ``proxy_config.kubernetes_secret.namespace``. - ``security_posture_config.vulnerability_mode`` + - ``monitoring_config.cloud_monitoring_config.enabled`` """ attached_cluster: attached_resources.AttachedCluster = proto.Field( diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py index 58f053bff12c..17caf8d9ee8b 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py @@ -34,6 +34,7 @@ "LoggingComponentConfig", "MonitoringConfig", "ManagedPrometheusConfig", + "CloudMonitoringConfig", "BinaryAuthorization", "SecurityPostureConfig", }, @@ -172,9 +173,11 @@ class OperationMetadata(proto.Message): requested_cancellation (bool): Output only. Identifies whether it has been requested cancellation for the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. + successfully been cancelled have + [google.longrunning.Operation.error][google.longrunning.Operation.error] + value with a + [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. """ create_time: timestamp_pb2.Timestamp = proto.Field( @@ -448,6 +451,9 @@ class MonitoringConfig(proto.Message): managed_prometheus_config (google.cloud.gke_multicloud_v1.types.ManagedPrometheusConfig): Enable Google Cloud Managed Service for Prometheus in the cluster. + cloud_monitoring_config (google.cloud.gke_multicloud_v1.types.CloudMonitoringConfig): + Optionally enable GKE metrics. + Only for Attached Clusters. """ managed_prometheus_config: "ManagedPrometheusConfig" = proto.Field( @@ -455,6 +461,11 @@ class MonitoringConfig(proto.Message): number=2, message="ManagedPrometheusConfig", ) + cloud_monitoring_config: "CloudMonitoringConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="CloudMonitoringConfig", + ) class ManagedPrometheusConfig(proto.Message): @@ -472,6 +483,29 @@ class ManagedPrometheusConfig(proto.Message): ) +class CloudMonitoringConfig(proto.Message): + r"""CloudMonitoringConfig defines the configuration for + built-in Cloud Logging and Monitoring. + Only for Attached Clusters. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + enabled (bool): + Enable GKE-native logging and metrics. + Only for Attached Clusters. + + This field is a member of `oneof`_ ``_enabled``. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + + class BinaryAuthorization(proto.Message): r"""Configuration for Binary Authorization. diff --git a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json index 778818fc5c8a..27344a9f8aad 100644 --- a/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json +++ b/packages/google-cloud-gke-multicloud/samples/generated_samples/snippet_metadata_google.cloud.gkemulticloud.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gke-multicloud", - "version": "0.6.16" + "version": "0.6.19" }, "snippets": [ { diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index 9fd037b70d5d..a66cc321cabd 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -77,6 +77,13 @@ common_resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -336,6 +343,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AttachedClustersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AttachedClustersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -7006,12 +7056,16 @@ def test_create_attached_cluster_rest_call_success(request_type): "admin_users": [{"username": "username_value"}], "admin_groups": [{"group": "group_value"}], }, - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "monitoring_config": { + "managed_prometheus_config": {"enabled": True}, + "cloud_monitoring_config": {"enabled": True}, + }, "proxy_config": { "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} }, "binary_authorization": {"evaluation_mode": 1}, "security_posture_config": {"vulnerability_mode": 1}, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -7121,10 +7175,14 @@ def test_create_attached_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AttachedClustersRestInterceptor, "post_create_attached_cluster" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_create_attached_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_create_attached_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.CreateAttachedClusterRequest.pb( attached_service.CreateAttachedClusterRequest() ) @@ -7148,6 +7206,7 @@ def test_create_attached_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_attached_cluster( request, @@ -7159,6 +7218,7 @@ def test_create_attached_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_attached_cluster_rest_bad_request( @@ -7235,12 +7295,16 @@ def test_update_attached_cluster_rest_call_success(request_type): "admin_users": [{"username": "username_value"}], "admin_groups": [{"group": "group_value"}], }, - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "monitoring_config": { + "managed_prometheus_config": {"enabled": True}, + "cloud_monitoring_config": {"enabled": True}, + }, "proxy_config": { "kubernetes_secret": {"name": "name_value", "namespace": "namespace_value"} }, "binary_authorization": {"evaluation_mode": 1}, "security_posture_config": {"vulnerability_mode": 1}, + "tags": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -7350,10 +7414,14 @@ def test_update_attached_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AttachedClustersRestInterceptor, "post_update_attached_cluster" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_update_attached_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_update_attached_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.UpdateAttachedClusterRequest.pb( attached_service.UpdateAttachedClusterRequest() ) @@ -7377,6 +7445,7 @@ def test_update_attached_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_attached_cluster( request, @@ -7388,6 +7457,7 @@ def test_update_attached_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_attached_cluster_rest_bad_request( @@ -7468,10 +7538,14 @@ def test_import_attached_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AttachedClustersRestInterceptor, "post_import_attached_cluster" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_import_attached_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_import_attached_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.ImportAttachedClusterRequest.pb( attached_service.ImportAttachedClusterRequest() ) @@ -7495,6 +7569,7 @@ def test_import_attached_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_attached_cluster( request, @@ -7506,6 +7581,7 @@ def test_import_attached_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attached_cluster_rest_bad_request( @@ -7612,10 +7688,14 @@ def test_get_attached_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AttachedClustersRestInterceptor, "post_get_attached_cluster" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_get_attached_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_get_attached_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.GetAttachedClusterRequest.pb( attached_service.GetAttachedClusterRequest() ) @@ -7641,6 +7721,7 @@ def test_get_attached_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attached_resources.AttachedCluster() + post_with_metadata.return_value = attached_resources.AttachedCluster(), metadata client.get_attached_cluster( request, @@ -7652,6 +7733,7 @@ def test_get_attached_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_attached_clusters_rest_bad_request( @@ -7736,10 +7818,14 @@ def test_list_attached_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AttachedClustersRestInterceptor, "post_list_attached_clusters" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_list_attached_clusters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_list_attached_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.ListAttachedClustersRequest.pb( attached_service.ListAttachedClustersRequest() ) @@ -7765,6 +7851,10 @@ def test_list_attached_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attached_service.ListAttachedClustersResponse() + post_with_metadata.return_value = ( + attached_service.ListAttachedClustersResponse(), + metadata, + ) client.list_attached_clusters( request, @@ -7776,6 +7866,7 @@ def test_list_attached_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_attached_cluster_rest_bad_request( @@ -7860,10 +7951,14 @@ def test_delete_attached_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AttachedClustersRestInterceptor, "post_delete_attached_cluster" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_delete_attached_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_delete_attached_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.DeleteAttachedClusterRequest.pb( attached_service.DeleteAttachedClusterRequest() ) @@ -7887,6 +7982,7 @@ def test_delete_attached_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_attached_cluster( request, @@ -7898,6 +7994,7 @@ def test_delete_attached_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_attached_server_config_rest_bad_request( @@ -7982,10 +8079,14 @@ def test_get_attached_server_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AttachedClustersRestInterceptor, "post_get_attached_server_config" ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_get_attached_server_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_get_attached_server_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.GetAttachedServerConfigRequest.pb( attached_service.GetAttachedServerConfigRequest() ) @@ -8011,6 +8112,10 @@ def test_get_attached_server_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = attached_resources.AttachedServerConfig() + post_with_metadata.return_value = ( + attached_resources.AttachedServerConfig(), + metadata, + ) client.get_attached_server_config( request, @@ -8022,6 +8127,7 @@ def test_get_attached_server_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_attached_cluster_install_manifest_rest_bad_request( @@ -8113,11 +8219,15 @@ def test_generate_attached_cluster_install_manifest_rest_interceptors(null_inter transports.AttachedClustersRestInterceptor, "post_generate_attached_cluster_install_manifest", ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_generate_attached_cluster_install_manifest_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_generate_attached_cluster_install_manifest", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.GenerateAttachedClusterInstallManifestRequest.pb( attached_service.GenerateAttachedClusterInstallManifestRequest() ) @@ -8147,6 +8257,10 @@ def test_generate_attached_cluster_install_manifest_rest_interceptors(null_inter post.return_value = ( attached_service.GenerateAttachedClusterInstallManifestResponse() ) + post_with_metadata.return_value = ( + attached_service.GenerateAttachedClusterInstallManifestResponse(), + metadata, + ) client.generate_attached_cluster_install_manifest( request, @@ -8158,6 +8272,7 @@ def test_generate_attached_cluster_install_manifest_rest_interceptors(null_inter pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_attached_cluster_agent_token_rest_bad_request( @@ -8255,11 +8370,15 @@ def test_generate_attached_cluster_agent_token_rest_interceptors(null_intercepto transports.AttachedClustersRestInterceptor, "post_generate_attached_cluster_agent_token", ) as post, mock.patch.object( + transports.AttachedClustersRestInterceptor, + "post_generate_attached_cluster_agent_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AttachedClustersRestInterceptor, "pre_generate_attached_cluster_agent_token", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = attached_service.GenerateAttachedClusterAgentTokenRequest.pb( attached_service.GenerateAttachedClusterAgentTokenRequest() ) @@ -8287,6 +8406,10 @@ def test_generate_attached_cluster_agent_token_rest_interceptors(null_intercepto ] pre.return_value = request, metadata post.return_value = attached_service.GenerateAttachedClusterAgentTokenResponse() + post_with_metadata.return_value = ( + attached_service.GenerateAttachedClusterAgentTokenResponse(), + metadata, + ) client.generate_attached_cluster_agent_token( request, @@ -8298,6 +8421,7 @@ def test_generate_attached_cluster_agent_token_rest_interceptors(null_intercepto pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py index 390452b49155..0480f36c41d1 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_aws_clusters.py @@ -77,6 +77,13 @@ common_resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -316,6 +323,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AwsClustersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AwsClustersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10849,7 +10899,10 @@ def test_create_aws_cluster_rest_call_success(request_type): "fleet": {"project": "project_value", "membership": "membership_value"}, "logging_config": {"component_config": {"enable_components": [1]}}, "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "monitoring_config": { + "managed_prometheus_config": {"enabled": True}, + "cloud_monitoring_config": {"enabled": True}, + }, "binary_authorization": {"evaluation_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -10958,10 +11011,13 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_create_aws_cluster" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_create_aws_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_create_aws_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.CreateAwsClusterRequest.pb( aws_service.CreateAwsClusterRequest() ) @@ -10985,6 +11041,7 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_aws_cluster( request, @@ -10996,6 +11053,7 @@ def test_create_aws_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_aws_cluster_rest_bad_request( @@ -11113,7 +11171,10 @@ def test_update_aws_cluster_rest_call_success(request_type): "fleet": {"project": "project_value", "membership": "membership_value"}, "logging_config": {"component_config": {"enable_components": [1]}}, "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "monitoring_config": { + "managed_prometheus_config": {"enabled": True}, + "cloud_monitoring_config": {"enabled": True}, + }, "binary_authorization": {"evaluation_mode": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -11222,10 +11283,13 @@ def test_update_aws_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_update_aws_cluster" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_update_aws_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_update_aws_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.UpdateAwsClusterRequest.pb( aws_service.UpdateAwsClusterRequest() ) @@ -11249,6 +11313,7 @@ def test_update_aws_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_aws_cluster( request, @@ -11260,6 +11325,7 @@ def test_update_aws_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_aws_cluster_rest_bad_request( @@ -11360,10 +11426,13 @@ def test_get_aws_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_get_aws_cluster" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_get_aws_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_get_aws_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GetAwsClusterRequest.pb( aws_service.GetAwsClusterRequest() ) @@ -11387,6 +11456,7 @@ def test_get_aws_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_resources.AwsCluster() + post_with_metadata.return_value = aws_resources.AwsCluster(), metadata client.get_aws_cluster( request, @@ -11398,6 +11468,7 @@ def test_get_aws_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_aws_clusters_rest_bad_request( @@ -11482,10 +11553,13 @@ def test_list_aws_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_list_aws_clusters" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_list_aws_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_list_aws_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.ListAwsClustersRequest.pb( aws_service.ListAwsClustersRequest() ) @@ -11511,6 +11585,10 @@ def test_list_aws_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_service.ListAwsClustersResponse() + post_with_metadata.return_value = ( + aws_service.ListAwsClustersResponse(), + metadata, + ) client.list_aws_clusters( request, @@ -11522,6 +11600,7 @@ def test_list_aws_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_aws_cluster_rest_bad_request( @@ -11602,10 +11681,13 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_delete_aws_cluster" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_delete_aws_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_delete_aws_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.DeleteAwsClusterRequest.pb( aws_service.DeleteAwsClusterRequest() ) @@ -11629,6 +11711,7 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_aws_cluster( request, @@ -11640,6 +11723,7 @@ def test_delete_aws_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_aws_cluster_agent_token_rest_bad_request( @@ -11732,10 +11816,14 @@ def test_generate_aws_cluster_agent_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_generate_aws_cluster_agent_token" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, + "post_generate_aws_cluster_agent_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_generate_aws_cluster_agent_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GenerateAwsClusterAgentTokenRequest.pb( aws_service.GenerateAwsClusterAgentTokenRequest() ) @@ -11761,6 +11849,10 @@ def test_generate_aws_cluster_agent_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_service.GenerateAwsClusterAgentTokenResponse() + post_with_metadata.return_value = ( + aws_service.GenerateAwsClusterAgentTokenResponse(), + metadata, + ) client.generate_aws_cluster_agent_token( request, @@ -11772,6 +11864,7 @@ def test_generate_aws_cluster_agent_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_aws_access_token_rest_bad_request( @@ -11860,10 +11953,14 @@ def test_generate_aws_access_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_generate_aws_access_token" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, + "post_generate_aws_access_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_generate_aws_access_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GenerateAwsAccessTokenRequest.pb( aws_service.GenerateAwsAccessTokenRequest() ) @@ -11889,6 +11986,10 @@ def test_generate_aws_access_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_service.GenerateAwsAccessTokenResponse() + post_with_metadata.return_value = ( + aws_service.GenerateAwsAccessTokenResponse(), + metadata, + ) client.generate_aws_access_token( request, @@ -11900,6 +12001,7 @@ def test_generate_aws_access_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_aws_node_pool_rest_bad_request( @@ -12106,10 +12208,13 @@ def test_create_aws_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_create_aws_node_pool" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_create_aws_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_create_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.CreateAwsNodePoolRequest.pb( aws_service.CreateAwsNodePoolRequest() ) @@ -12133,6 +12238,7 @@ def test_create_aws_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_aws_node_pool( request, @@ -12144,6 +12250,7 @@ def test_create_aws_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_aws_node_pool_rest_bad_request( @@ -12358,10 +12465,13 @@ def test_update_aws_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_update_aws_node_pool" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_update_aws_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_update_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.UpdateAwsNodePoolRequest.pb( aws_service.UpdateAwsNodePoolRequest() ) @@ -12385,6 +12495,7 @@ def test_update_aws_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_aws_node_pool( request, @@ -12396,6 +12507,7 @@ def test_update_aws_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_aws_node_pool_update_rest_bad_request( @@ -12480,10 +12592,14 @@ def test_rollback_aws_node_pool_update_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_rollback_aws_node_pool_update" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, + "post_rollback_aws_node_pool_update_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_rollback_aws_node_pool_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.RollbackAwsNodePoolUpdateRequest.pb( aws_service.RollbackAwsNodePoolUpdateRequest() ) @@ -12507,6 +12623,7 @@ def test_rollback_aws_node_pool_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.rollback_aws_node_pool_update( request, @@ -12518,6 +12635,7 @@ def test_rollback_aws_node_pool_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_aws_node_pool_rest_bad_request( @@ -12618,10 +12736,13 @@ def test_get_aws_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_get_aws_node_pool" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_get_aws_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_get_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GetAwsNodePoolRequest.pb( aws_service.GetAwsNodePoolRequest() ) @@ -12645,6 +12766,7 @@ def test_get_aws_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_resources.AwsNodePool() + post_with_metadata.return_value = aws_resources.AwsNodePool(), metadata client.get_aws_node_pool( request, @@ -12656,6 +12778,7 @@ def test_get_aws_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_aws_node_pools_rest_bad_request( @@ -12740,10 +12863,13 @@ def test_list_aws_node_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_list_aws_node_pools" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_list_aws_node_pools_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_list_aws_node_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.ListAwsNodePoolsRequest.pb( aws_service.ListAwsNodePoolsRequest() ) @@ -12769,6 +12895,10 @@ def test_list_aws_node_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_service.ListAwsNodePoolsResponse() + post_with_metadata.return_value = ( + aws_service.ListAwsNodePoolsResponse(), + metadata, + ) client.list_aws_node_pools( request, @@ -12780,6 +12910,7 @@ def test_list_aws_node_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_aws_node_pool_rest_bad_request( @@ -12864,10 +12995,13 @@ def test_delete_aws_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AwsClustersRestInterceptor, "post_delete_aws_node_pool" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, "post_delete_aws_node_pool_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_delete_aws_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.DeleteAwsNodePoolRequest.pb( aws_service.DeleteAwsNodePoolRequest() ) @@ -12891,6 +13025,7 @@ def test_delete_aws_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_aws_node_pool( request, @@ -12902,6 +13037,7 @@ def test_delete_aws_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_aws_open_id_config_rest_bad_request( @@ -13006,10 +13142,14 @@ def test_get_aws_open_id_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_get_aws_open_id_config" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, + "post_get_aws_open_id_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_get_aws_open_id_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GetAwsOpenIdConfigRequest.pb( aws_service.GetAwsOpenIdConfigRequest() ) @@ -13035,6 +13175,7 @@ def test_get_aws_open_id_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_resources.AwsOpenIdConfig() + post_with_metadata.return_value = aws_resources.AwsOpenIdConfig(), metadata client.get_aws_open_id_config( request, @@ -13046,6 +13187,7 @@ def test_get_aws_open_id_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_aws_json_web_keys_rest_bad_request( @@ -13131,10 +13273,14 @@ def test_get_aws_json_web_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_get_aws_json_web_keys" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, + "post_get_aws_json_web_keys_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_get_aws_json_web_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GetAwsJsonWebKeysRequest.pb( aws_service.GetAwsJsonWebKeysRequest() ) @@ -13160,6 +13306,7 @@ def test_get_aws_json_web_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_resources.AwsJsonWebKeys() + post_with_metadata.return_value = aws_resources.AwsJsonWebKeys(), metadata client.get_aws_json_web_keys( request, @@ -13171,6 +13318,7 @@ def test_get_aws_json_web_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_aws_server_config_rest_bad_request( @@ -13257,10 +13405,14 @@ def test_get_aws_server_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AwsClustersRestInterceptor, "post_get_aws_server_config" ) as post, mock.patch.object( + transports.AwsClustersRestInterceptor, + "post_get_aws_server_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AwsClustersRestInterceptor, "pre_get_aws_server_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = aws_service.GetAwsServerConfigRequest.pb( aws_service.GetAwsServerConfigRequest() ) @@ -13286,6 +13438,7 @@ def test_get_aws_server_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = aws_resources.AwsServerConfig() + post_with_metadata.return_value = aws_resources.AwsServerConfig(), metadata client.get_aws_server_config( request, @@ -13297,6 +13450,7 @@ def test_get_aws_server_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py index bea7ba54bd5c..4f9e2d40831f 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_azure_clusters.py @@ -77,6 +77,13 @@ common_resources, ) +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AzureClustersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AzureClustersClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -13372,10 +13422,14 @@ def test_create_azure_client_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_create_azure_client" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_create_azure_client_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_create_azure_client" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.CreateAzureClientRequest.pb( azure_service.CreateAzureClientRequest() ) @@ -13399,6 +13453,7 @@ def test_create_azure_client_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_azure_client( request, @@ -13410,6 +13465,7 @@ def test_create_azure_client_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_azure_client_rest_bad_request( @@ -13504,10 +13560,13 @@ def test_get_azure_client_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_get_azure_client" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, "post_get_azure_client_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_get_azure_client" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GetAzureClientRequest.pb( azure_service.GetAzureClientRequest() ) @@ -13533,6 +13592,7 @@ def test_get_azure_client_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_resources.AzureClient() + post_with_metadata.return_value = azure_resources.AzureClient(), metadata client.get_azure_client( request, @@ -13544,6 +13604,7 @@ def test_get_azure_client_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_azure_clients_rest_bad_request( @@ -13628,10 +13689,13 @@ def test_list_azure_clients_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_list_azure_clients" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, "post_list_azure_clients_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_list_azure_clients" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.ListAzureClientsRequest.pb( azure_service.ListAzureClientsRequest() ) @@ -13657,6 +13721,10 @@ def test_list_azure_clients_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_service.ListAzureClientsResponse() + post_with_metadata.return_value = ( + azure_service.ListAzureClientsResponse(), + metadata, + ) client.list_azure_clients( request, @@ -13668,6 +13736,7 @@ def test_list_azure_clients_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_azure_client_rest_bad_request( @@ -13748,10 +13817,14 @@ def test_delete_azure_client_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_delete_azure_client" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_delete_azure_client_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_delete_azure_client" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.DeleteAzureClientRequest.pb( azure_service.DeleteAzureClientRequest() ) @@ -13775,6 +13848,7 @@ def test_delete_azure_client_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_azure_client( request, @@ -13786,6 +13860,7 @@ def test_delete_azure_client_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_azure_cluster_rest_bad_request( @@ -13899,7 +13974,10 @@ def test_create_azure_cluster_rest_call_success(request_type): }, "logging_config": {"component_config": {"enable_components": [1]}}, "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "monitoring_config": { + "managed_prometheus_config": {"enabled": True}, + "cloud_monitoring_config": {"enabled": True}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -14007,10 +14085,14 @@ def test_create_azure_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_create_azure_cluster" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_create_azure_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_create_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.CreateAzureClusterRequest.pb( azure_service.CreateAzureClusterRequest() ) @@ -14034,6 +14116,7 @@ def test_create_azure_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_azure_cluster( request, @@ -14045,6 +14128,7 @@ def test_create_azure_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_azure_cluster_rest_bad_request( @@ -14166,7 +14250,10 @@ def test_update_azure_cluster_rest_call_success(request_type): }, "logging_config": {"component_config": {"enable_components": [1]}}, "errors": [{"message": "message_value"}], - "monitoring_config": {"managed_prometheus_config": {"enabled": True}}, + "monitoring_config": { + "managed_prometheus_config": {"enabled": True}, + "cloud_monitoring_config": {"enabled": True}, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -14274,10 +14361,14 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_update_azure_cluster" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_update_azure_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_update_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.UpdateAzureClusterRequest.pb( azure_service.UpdateAzureClusterRequest() ) @@ -14301,6 +14392,7 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_azure_cluster( request, @@ -14312,6 +14404,7 @@ def test_update_azure_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_azure_cluster_rest_bad_request( @@ -14416,10 +14509,13 @@ def test_get_azure_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_get_azure_cluster" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, "post_get_azure_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_get_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GetAzureClusterRequest.pb( azure_service.GetAzureClusterRequest() ) @@ -14445,6 +14541,7 @@ def test_get_azure_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_resources.AzureCluster() + post_with_metadata.return_value = azure_resources.AzureCluster(), metadata client.get_azure_cluster( request, @@ -14456,6 +14553,7 @@ def test_get_azure_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_azure_clusters_rest_bad_request( @@ -14540,10 +14638,14 @@ def test_list_azure_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_list_azure_clusters" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_list_azure_clusters_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_list_azure_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.ListAzureClustersRequest.pb( azure_service.ListAzureClustersRequest() ) @@ -14569,6 +14671,10 @@ def test_list_azure_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_service.ListAzureClustersResponse() + post_with_metadata.return_value = ( + azure_service.ListAzureClustersResponse(), + metadata, + ) client.list_azure_clusters( request, @@ -14580,6 +14686,7 @@ def test_list_azure_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_azure_cluster_rest_bad_request( @@ -14660,10 +14767,14 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_delete_azure_cluster" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_delete_azure_cluster_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_delete_azure_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.DeleteAzureClusterRequest.pb( azure_service.DeleteAzureClusterRequest() ) @@ -14687,6 +14798,7 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_azure_cluster( request, @@ -14698,6 +14810,7 @@ def test_delete_azure_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_azure_cluster_agent_token_rest_bad_request( @@ -14793,11 +14906,15 @@ def test_generate_azure_cluster_agent_token_rest_interceptors(null_interceptor): transports.AzureClustersRestInterceptor, "post_generate_azure_cluster_agent_token", ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_generate_azure_cluster_agent_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_generate_azure_cluster_agent_token", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GenerateAzureClusterAgentTokenRequest.pb( azure_service.GenerateAzureClusterAgentTokenRequest() ) @@ -14823,6 +14940,10 @@ def test_generate_azure_cluster_agent_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_service.GenerateAzureClusterAgentTokenResponse() + post_with_metadata.return_value = ( + azure_service.GenerateAzureClusterAgentTokenResponse(), + metadata, + ) client.generate_azure_cluster_agent_token( request, @@ -14834,6 +14955,7 @@ def test_generate_azure_cluster_agent_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_azure_access_token_rest_bad_request( @@ -14922,10 +15044,14 @@ def test_generate_azure_access_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_generate_azure_access_token" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_generate_azure_access_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_generate_azure_access_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GenerateAzureAccessTokenRequest.pb( azure_service.GenerateAzureAccessTokenRequest() ) @@ -14951,6 +15077,10 @@ def test_generate_azure_access_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_service.GenerateAzureAccessTokenResponse() + post_with_metadata.return_value = ( + azure_service.GenerateAzureAccessTokenResponse(), + metadata, + ) client.generate_azure_access_token( request, @@ -14962,6 +15092,7 @@ def test_generate_azure_access_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_azure_node_pool_rest_bad_request( @@ -15147,10 +15278,14 @@ def test_create_azure_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_create_azure_node_pool" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_create_azure_node_pool_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_create_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.CreateAzureNodePoolRequest.pb( azure_service.CreateAzureNodePoolRequest() ) @@ -15174,6 +15309,7 @@ def test_create_azure_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_azure_node_pool( request, @@ -15185,6 +15321,7 @@ def test_create_azure_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_azure_node_pool_rest_bad_request( @@ -15374,10 +15511,14 @@ def test_update_azure_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_update_azure_node_pool" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_update_azure_node_pool_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_update_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.UpdateAzureNodePoolRequest.pb( azure_service.UpdateAzureNodePoolRequest() ) @@ -15401,6 +15542,7 @@ def test_update_azure_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_azure_node_pool( request, @@ -15412,6 +15554,7 @@ def test_update_azure_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_azure_node_pool_rest_bad_request( @@ -15514,10 +15657,14 @@ def test_get_azure_node_pool_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_get_azure_node_pool" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_get_azure_node_pool_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_get_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GetAzureNodePoolRequest.pb( azure_service.GetAzureNodePoolRequest() ) @@ -15543,6 +15690,7 @@ def test_get_azure_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_resources.AzureNodePool() + post_with_metadata.return_value = azure_resources.AzureNodePool(), metadata client.get_azure_node_pool( request, @@ -15554,6 +15702,7 @@ def test_get_azure_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_azure_node_pools_rest_bad_request( @@ -15642,10 +15791,14 @@ def test_list_azure_node_pools_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_list_azure_node_pools" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_list_azure_node_pools_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_list_azure_node_pools" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.ListAzureNodePoolsRequest.pb( azure_service.ListAzureNodePoolsRequest() ) @@ -15671,6 +15824,10 @@ def test_list_azure_node_pools_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_service.ListAzureNodePoolsResponse() + post_with_metadata.return_value = ( + azure_service.ListAzureNodePoolsResponse(), + metadata, + ) client.list_azure_node_pools( request, @@ -15682,6 +15839,7 @@ def test_list_azure_node_pools_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_azure_node_pool_rest_bad_request( @@ -15766,10 +15924,14 @@ def test_delete_azure_node_pool_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AzureClustersRestInterceptor, "post_delete_azure_node_pool" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_delete_azure_node_pool_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_delete_azure_node_pool" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.DeleteAzureNodePoolRequest.pb( azure_service.DeleteAzureNodePoolRequest() ) @@ -15793,6 +15955,7 @@ def test_delete_azure_node_pool_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_azure_node_pool( request, @@ -15804,6 +15967,7 @@ def test_delete_azure_node_pool_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_azure_open_id_config_rest_bad_request( @@ -15908,10 +16072,14 @@ def test_get_azure_open_id_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_get_azure_open_id_config" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_get_azure_open_id_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_get_azure_open_id_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GetAzureOpenIdConfigRequest.pb( azure_service.GetAzureOpenIdConfigRequest() ) @@ -15937,6 +16105,7 @@ def test_get_azure_open_id_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_resources.AzureOpenIdConfig() + post_with_metadata.return_value = azure_resources.AzureOpenIdConfig(), metadata client.get_azure_open_id_config( request, @@ -15948,6 +16117,7 @@ def test_get_azure_open_id_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_azure_json_web_keys_rest_bad_request( @@ -16033,10 +16203,14 @@ def test_get_azure_json_web_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_get_azure_json_web_keys" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_get_azure_json_web_keys_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_get_azure_json_web_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GetAzureJsonWebKeysRequest.pb( azure_service.GetAzureJsonWebKeysRequest() ) @@ -16062,6 +16236,7 @@ def test_get_azure_json_web_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_resources.AzureJsonWebKeys() + post_with_metadata.return_value = azure_resources.AzureJsonWebKeys(), metadata client.get_azure_json_web_keys( request, @@ -16073,6 +16248,7 @@ def test_get_azure_json_web_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_azure_server_config_rest_bad_request( @@ -16159,10 +16335,14 @@ def test_get_azure_server_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AzureClustersRestInterceptor, "post_get_azure_server_config" ) as post, mock.patch.object( + transports.AzureClustersRestInterceptor, + "post_get_azure_server_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AzureClustersRestInterceptor, "pre_get_azure_server_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = azure_service.GetAzureServerConfigRequest.pb( azure_service.GetAzureServerConfigRequest() ) @@ -16188,6 +16368,7 @@ def test_get_azure_server_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = azure_resources.AzureServerConfig() + post_with_metadata.return_value = azure_resources.AzureServerConfig(), metadata client.get_azure_server_config( request, @@ -16199,6 +16380,7 @@ def test_get_azure_server_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_cancel_operation_rest_bad_request( diff --git a/packages/google-cloud-gsuiteaddons/CHANGELOG.md b/packages/google-cloud-gsuiteaddons/CHANGELOG.md index 942db15ff451..deda8c352087 100644 --- a/packages/google-cloud-gsuiteaddons/CHANGELOG.md +++ b/packages/google-cloud-gsuiteaddons/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.3.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gsuiteaddons-v0.3.14...google-cloud-gsuiteaddons-v0.3.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + +## [0.3.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gsuiteaddons-v0.3.13...google-cloud-gsuiteaddons-v0.3.14) (2025-01-27) + + +### Documentation + +* [google-cloud-gsuiteaddons] Minor documentation edits ([#13463](https://github.com/googleapis/google-cloud-python/issues/13463)) ([a2b6d21](https://github.com/googleapis/google-cloud-python/commit/a2b6d219070f85878ad0eac626cca565789d0764)) + ## [0.3.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-gsuiteaddons-v0.3.12...google-cloud-gsuiteaddons-v0.3.13) (2024-12-12) diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py index fb3463bbb3c2..7d28791e7569 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.3.15" # {x-release-please-version} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py index fb3463bbb3c2..7d28791e7569 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.3.13" # {x-release-please-version} +__version__ = "0.3.15" # {x-release-please-version} diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py index 68b23a4a4dcd..0cff718f8da7 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/async_client.py @@ -63,36 +63,36 @@ class GSuiteAddOnsAsyncClient: - """A service for managing Google Workspace Add-ons deployments. + """A service for managing Google Workspace add-ons deployments. - A Google Workspace Add-on is a third-party embedded component + A Google Workspace add-on is a third-party embedded component that can be installed in Google Workspace Applications like Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides - editors. Google Workspace Add-ons can display UI cards, receive + editors. Google Workspace add-ons can display UI cards, receive contextual information from the host application, and perform actions in the host application (See: https://developers.google.com/gsuite/add-ons/overview for more information). - A Google Workspace Add-on deployment resource specifies metadata + A Google Workspace add-on deployment resource specifies metadata about the add-on, including a specification of the entry points in the host application that trigger add-on executions (see: https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). - Add-on deployments defined via the Google Workspace Add-ons API + Add-on deployments defined via the Google Workspace add-ons API define their entrypoints using HTTPS URLs (See: https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), - A Google Workspace Add-on deployment can be installed in + A Google Workspace add-on deployment can be installed in developer mode, which allows an add-on developer to test the experience an end-user would see when installing and running the add-on in their G Suite applications. When running in developer mode, more detailed error messages are exposed in the add-on UI to aid in debugging. - A Google Workspace Add-on deployment can be published to Google + A Google Workspace add-on deployment can be published to Google Workspace Marketplace, which allows other Google Workspace users to discover and install the add-on. See: @@ -366,11 +366,11 @@ async def sample_get_authorization(): Args: request (Optional[Union[google.cloud.gsuiteaddons_v1.types.GetAuthorizationRequest, dict]]): The request object. Request message to get Google - Workspace Add-ons authorization + Workspace add-ons authorization information. name (:class:`str`): Required. Name of the project for which to get the - Google Workspace Add-ons authorization information. + Google Workspace add-ons authorization information. Example: ``projects/my_project/authorization``. @@ -516,7 +516,7 @@ async def sample_create_deployment(): Returns: google.cloud.gsuiteaddons_v1.types.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -626,7 +626,7 @@ async def sample_replace_deployment(): Returns: google.cloud.gsuiteaddons_v1.types.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -736,7 +736,7 @@ async def sample_get_deployment(): Returns: google.cloud.gsuiteaddons_v1.types.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py index f5f3d6f64111..33107560b1c0 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -105,36 +107,36 @@ def get_transport_class( class GSuiteAddOnsClient(metaclass=GSuiteAddOnsClientMeta): - """A service for managing Google Workspace Add-ons deployments. + """A service for managing Google Workspace add-ons deployments. - A Google Workspace Add-on is a third-party embedded component + A Google Workspace add-on is a third-party embedded component that can be installed in Google Workspace Applications like Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides - editors. Google Workspace Add-ons can display UI cards, receive + editors. Google Workspace add-ons can display UI cards, receive contextual information from the host application, and perform actions in the host application (See: https://developers.google.com/gsuite/add-ons/overview for more information). - A Google Workspace Add-on deployment resource specifies metadata + A Google Workspace add-on deployment resource specifies metadata about the add-on, including a specification of the entry points in the host application that trigger add-on executions (see: https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). - Add-on deployments defined via the Google Workspace Add-ons API + Add-on deployments defined via the Google Workspace add-ons API define their entrypoints using HTTPS URLs (See: https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), - A Google Workspace Add-on deployment can be installed in + A Google Workspace add-on deployment can be installed in developer mode, which allows an add-on developer to test the experience an end-user would see when installing and running the add-on in their G Suite applications. When running in developer mode, more detailed error messages are exposed in the add-on UI to aid in debugging. - A Google Workspace Add-on deployment can be published to Google + A Google Workspace add-on deployment can be published to Google Workspace Marketplace, which allows other Google Workspace users to discover and install the add-on. See: @@ -551,6 +553,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -785,11 +814,11 @@ def sample_get_authorization(): Args: request (Union[google.cloud.gsuiteaddons_v1.types.GetAuthorizationRequest, dict]): The request object. Request message to get Google - Workspace Add-ons authorization + Workspace add-ons authorization information. name (str): Required. Name of the project for which to get the - Google Workspace Add-ons authorization information. + Google Workspace add-ons authorization information. Example: ``projects/my_project/authorization``. @@ -932,7 +961,7 @@ def sample_create_deployment(): Returns: google.cloud.gsuiteaddons_v1.types.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1039,7 +1068,7 @@ def sample_replace_deployment(): Returns: google.cloud.gsuiteaddons_v1.types.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1146,7 +1175,7 @@ def sample_get_deployment(): Returns: google.cloud.gsuiteaddons_v1.types.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py index 53dc4e066b90..87dd98108297 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc.py @@ -112,36 +112,36 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class GSuiteAddOnsGrpcTransport(GSuiteAddOnsTransport): """gRPC backend transport for GSuiteAddOns. - A service for managing Google Workspace Add-ons deployments. + A service for managing Google Workspace add-ons deployments. - A Google Workspace Add-on is a third-party embedded component + A Google Workspace add-on is a third-party embedded component that can be installed in Google Workspace Applications like Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides - editors. Google Workspace Add-ons can display UI cards, receive + editors. Google Workspace add-ons can display UI cards, receive contextual information from the host application, and perform actions in the host application (See: https://developers.google.com/gsuite/add-ons/overview for more information). - A Google Workspace Add-on deployment resource specifies metadata + A Google Workspace add-on deployment resource specifies metadata about the add-on, including a specification of the entry points in the host application that trigger add-on executions (see: https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). - Add-on deployments defined via the Google Workspace Add-ons API + Add-on deployments defined via the Google Workspace add-ons API define their entrypoints using HTTPS URLs (See: https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), - A Google Workspace Add-on deployment can be installed in + A Google Workspace add-on deployment can be installed in developer mode, which allows an add-on developer to test the experience an end-user would see when installing and running the add-on in their G Suite applications. When running in developer mode, more detailed error messages are exposed in the add-on UI to aid in debugging. - A Google Workspace Add-on deployment can be published to Google + A Google Workspace add-on deployment can be published to Google Workspace Marketplace, which allows other Google Workspace users to discover and install the add-on. See: diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py index b4c9d2fa9767..db0c2f97bcca 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/grpc_asyncio.py @@ -117,36 +117,36 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class GSuiteAddOnsGrpcAsyncIOTransport(GSuiteAddOnsTransport): """gRPC AsyncIO backend transport for GSuiteAddOns. - A service for managing Google Workspace Add-ons deployments. + A service for managing Google Workspace add-ons deployments. - A Google Workspace Add-on is a third-party embedded component + A Google Workspace add-on is a third-party embedded component that can be installed in Google Workspace Applications like Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides - editors. Google Workspace Add-ons can display UI cards, receive + editors. Google Workspace add-ons can display UI cards, receive contextual information from the host application, and perform actions in the host application (See: https://developers.google.com/gsuite/add-ons/overview for more information). - A Google Workspace Add-on deployment resource specifies metadata + A Google Workspace add-on deployment resource specifies metadata about the add-on, including a specification of the entry points in the host application that trigger add-on executions (see: https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). - Add-on deployments defined via the Google Workspace Add-ons API + Add-on deployments defined via the Google Workspace add-ons API define their entrypoints using HTTPS URLs (See: https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), - A Google Workspace Add-on deployment can be installed in + A Google Workspace add-on deployment can be installed in developer mode, which allows an add-on developer to test the experience an end-user would see when installing and running the add-on in their G Suite applications. When running in developer mode, more detailed error messages are exposed in the add-on UI to aid in debugging. - A Google Workspace Add-on deployment can be published to Google + A Google Workspace add-on deployment can be published to Google Workspace Marketplace, which allows other Google Workspace users to discover and install the add-on. See: diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py index 3875b737d098..c867e865dc4b 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/services/g_suite_add_ons/transports/rest.py @@ -154,12 +154,35 @@ def post_create_deployment( ) -> gsuiteaddons.Deployment: """Post-rpc interceptor for create_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GSuiteAddOns server but before - it is returned to user code. + it is returned to user code. This `post_create_deployment` interceptor runs + before the `post_create_deployment_with_metadata` interceptor. """ return response + def post_create_deployment_with_metadata( + self, + response: gsuiteaddons.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gsuiteaddons.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GSuiteAddOns server but before it is returned to user code. + + We recommend only using this `post_create_deployment_with_metadata` + interceptor in new development instead of the `post_create_deployment` interceptor. + When both interceptors are used, this `post_create_deployment_with_metadata` interceptor runs after the + `post_create_deployment` interceptor. The (possibly modified) response returned by + `post_create_deployment` will be passed to + `post_create_deployment_with_metadata`. + """ + return response, metadata + def pre_delete_deployment( self, request: gsuiteaddons.DeleteDeploymentRequest, @@ -193,12 +216,35 @@ def post_get_authorization( ) -> gsuiteaddons.Authorization: """Post-rpc interceptor for get_authorization - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_authorization_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GSuiteAddOns server but before - it is returned to user code. + it is returned to user code. This `post_get_authorization` interceptor runs + before the `post_get_authorization_with_metadata` interceptor. """ return response + def post_get_authorization_with_metadata( + self, + response: gsuiteaddons.Authorization, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gsuiteaddons.Authorization, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_authorization + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GSuiteAddOns server but before it is returned to user code. + + We recommend only using this `post_get_authorization_with_metadata` + interceptor in new development instead of the `post_get_authorization` interceptor. + When both interceptors are used, this `post_get_authorization_with_metadata` interceptor runs after the + `post_get_authorization` interceptor. The (possibly modified) response returned by + `post_get_authorization` will be passed to + `post_get_authorization_with_metadata`. + """ + return response, metadata + def pre_get_deployment( self, request: gsuiteaddons.GetDeploymentRequest, @@ -218,12 +264,35 @@ def post_get_deployment( ) -> gsuiteaddons.Deployment: """Post-rpc interceptor for get_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GSuiteAddOns server but before - it is returned to user code. + it is returned to user code. This `post_get_deployment` interceptor runs + before the `post_get_deployment_with_metadata` interceptor. """ return response + def post_get_deployment_with_metadata( + self, + response: gsuiteaddons.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gsuiteaddons.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GSuiteAddOns server but before it is returned to user code. + + We recommend only using this `post_get_deployment_with_metadata` + interceptor in new development instead of the `post_get_deployment` interceptor. + When both interceptors are used, this `post_get_deployment_with_metadata` interceptor runs after the + `post_get_deployment` interceptor. The (possibly modified) response returned by + `post_get_deployment` will be passed to + `post_get_deployment_with_metadata`. + """ + return response, metadata + def pre_get_install_status( self, request: gsuiteaddons.GetInstallStatusRequest, @@ -243,12 +312,35 @@ def post_get_install_status( ) -> gsuiteaddons.InstallStatus: """Post-rpc interceptor for get_install_status - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_install_status_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GSuiteAddOns server but before - it is returned to user code. + it is returned to user code. This `post_get_install_status` interceptor runs + before the `post_get_install_status_with_metadata` interceptor. """ return response + def post_get_install_status_with_metadata( + self, + response: gsuiteaddons.InstallStatus, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gsuiteaddons.InstallStatus, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_install_status + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GSuiteAddOns server but before it is returned to user code. + + We recommend only using this `post_get_install_status_with_metadata` + interceptor in new development instead of the `post_get_install_status` interceptor. + When both interceptors are used, this `post_get_install_status_with_metadata` interceptor runs after the + `post_get_install_status` interceptor. The (possibly modified) response returned by + `post_get_install_status` will be passed to + `post_get_install_status_with_metadata`. + """ + return response, metadata + def pre_install_deployment( self, request: gsuiteaddons.InstallDeploymentRequest, @@ -282,12 +374,37 @@ def post_list_deployments( ) -> gsuiteaddons.ListDeploymentsResponse: """Post-rpc interceptor for list_deployments - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GSuiteAddOns server but before - it is returned to user code. + it is returned to user code. This `post_list_deployments` interceptor runs + before the `post_list_deployments_with_metadata` interceptor. """ return response + def post_list_deployments_with_metadata( + self, + response: gsuiteaddons.ListDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gsuiteaddons.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GSuiteAddOns server but before it is returned to user code. + + We recommend only using this `post_list_deployments_with_metadata` + interceptor in new development instead of the `post_list_deployments` interceptor. + When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the + `post_list_deployments` interceptor. The (possibly modified) response returned by + `post_list_deployments` will be passed to + `post_list_deployments_with_metadata`. + """ + return response, metadata + def pre_replace_deployment( self, request: gsuiteaddons.ReplaceDeploymentRequest, @@ -307,12 +424,35 @@ def post_replace_deployment( ) -> gsuiteaddons.Deployment: """Post-rpc interceptor for replace_deployment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_replace_deployment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the GSuiteAddOns server but before - it is returned to user code. + it is returned to user code. This `post_replace_deployment` interceptor runs + before the `post_replace_deployment_with_metadata` interceptor. """ return response + def post_replace_deployment_with_metadata( + self, + response: gsuiteaddons.Deployment, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gsuiteaddons.Deployment, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for replace_deployment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the GSuiteAddOns server but before it is returned to user code. + + We recommend only using this `post_replace_deployment_with_metadata` + interceptor in new development instead of the `post_replace_deployment` interceptor. + When both interceptors are used, this `post_replace_deployment_with_metadata` interceptor runs after the + `post_replace_deployment` interceptor. The (possibly modified) response returned by + `post_replace_deployment` will be passed to + `post_replace_deployment_with_metadata`. + """ + return response, metadata + def pre_uninstall_deployment( self, request: gsuiteaddons.UninstallDeploymentRequest, @@ -338,36 +478,36 @@ class GSuiteAddOnsRestStub: class GSuiteAddOnsRestTransport(_BaseGSuiteAddOnsRestTransport): """REST backend synchronous transport for GSuiteAddOns. - A service for managing Google Workspace Add-ons deployments. + A service for managing Google Workspace add-ons deployments. - A Google Workspace Add-on is a third-party embedded component + A Google Workspace add-on is a third-party embedded component that can be installed in Google Workspace Applications like Gmail, Calendar, Drive, and the Google Docs, Sheets, and Slides - editors. Google Workspace Add-ons can display UI cards, receive + editors. Google Workspace add-ons can display UI cards, receive contextual information from the host application, and perform actions in the host application (See: https://developers.google.com/gsuite/add-ons/overview for more information). - A Google Workspace Add-on deployment resource specifies metadata + A Google Workspace add-on deployment resource specifies metadata about the add-on, including a specification of the entry points in the host application that trigger add-on executions (see: https://developers.google.com/gsuite/add-ons/concepts/gsuite-manifests). - Add-on deployments defined via the Google Workspace Add-ons API + Add-on deployments defined via the Google Workspace add-ons API define their entrypoints using HTTPS URLs (See: https://developers.google.com/gsuite/add-ons/guides/alternate-runtimes), - A Google Workspace Add-on deployment can be installed in + A Google Workspace add-on deployment can be installed in developer mode, which allows an add-on developer to test the experience an end-user would see when installing and running the add-on in their G Suite applications. When running in developer mode, more detailed error messages are exposed in the add-on UI to aid in debugging. - A Google Workspace Add-on deployment can be published to Google + A Google Workspace add-on deployment can be published to Google Workspace Marketplace, which allows other Google Workspace users to discover and install the add-on. See: @@ -501,7 +641,7 @@ def __call__( Returns: ~.gsuiteaddons.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ http_options = ( @@ -574,6 +714,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -747,7 +891,7 @@ def __call__( Args: request (~.gsuiteaddons.GetAuthorizationRequest): The request object. Request message to get Google - Workspace Add-ons authorization + Workspace add-ons authorization information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -829,6 +973,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_authorization(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_authorization_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -903,7 +1051,7 @@ def __call__( Returns: ~.gsuiteaddons.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ http_options = ( @@ -969,6 +1117,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1114,6 +1266,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_install_status(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_install_status_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1372,6 +1528,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_deployments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_deployments_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1450,7 +1610,7 @@ def __call__( Returns: ~.gsuiteaddons.Deployment: - A Google Workspace Add-on deployment + A Google Workspace add-on deployment """ http_options = ( @@ -1523,6 +1683,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_replace_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_replace_deployment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py index ed4ddb75e077..0bed49df183f 100644 --- a/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py +++ b/packages/google-cloud-gsuiteaddons/google/cloud/gsuiteaddons_v1/types/gsuiteaddons.py @@ -49,13 +49,13 @@ class GetAuthorizationRequest(proto.Message): - r"""Request message to get Google Workspace Add-ons authorization + r"""Request message to get Google Workspace add-ons authorization information. Attributes: name (str): Required. Name of the project for which to get the Google - Workspace Add-ons authorization information. + Workspace add-ons authorization information. Example: ``projects/my_project/authorization``. """ @@ -332,7 +332,7 @@ class InstallStatus(proto.Message): class Deployment(proto.Message): - r"""A Google Workspace Add-on deployment + r"""A Google Workspace add-on deployment Attributes: name (str): @@ -343,7 +343,7 @@ class Deployment(proto.Message): request consent from the end user before executing an add-on endpoint. add_ons (google.cloud.gsuiteaddons_v1.types.AddOns): - The Google Workspace Add-on configuration. + The Google Workspace add-on configuration. etag (str): This value is computed by the server based on the version of the deployment in storage, and @@ -372,12 +372,12 @@ class Deployment(proto.Message): class AddOns(proto.Message): - r"""A Google Workspace Add-on configuration. + r"""A Google Workspace add-on configuration. Attributes: common (google.apps.script.type.types.CommonAddOnManifest): Configuration that is common across all - Google Workspace Add-ons. + Google Workspace add-ons. gmail (google.apps.script.type.gmail.types.GmailAddOnManifest): Gmail add-on configuration. drive (google.apps.script.type.drive.types.DriveAddOnManifest): diff --git a/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json b/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json index 0d71d3245ec1..b8be415308bf 100644 --- a/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json +++ b/packages/google-cloud-gsuiteaddons/samples/generated_samples/snippet_metadata_google.cloud.gsuiteaddons.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-gsuiteaddons", - "version": "0.3.13" + "version": "0.3.15" }, "snippets": [ { diff --git a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py index 602fcee11d4e..a34a2a9ec487 100644 --- a/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py +++ b/packages/google-cloud-gsuiteaddons/tests/unit/gapic/gsuiteaddons_v1/test_g_suite_add_ons.py @@ -73,6 +73,13 @@ ) from google.cloud.gsuiteaddons_v1.types import gsuiteaddons +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -316,6 +323,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = GSuiteAddOnsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = GSuiteAddOnsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6668,10 +6718,13 @@ def test_get_authorization_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "post_get_authorization" ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_get_authorization_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "pre_get_authorization" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gsuiteaddons.GetAuthorizationRequest.pb( gsuiteaddons.GetAuthorizationRequest() ) @@ -6695,6 +6748,7 @@ def test_get_authorization_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gsuiteaddons.Authorization() + post_with_metadata.return_value = gsuiteaddons.Authorization(), metadata client.get_authorization( request, @@ -6706,6 +6760,7 @@ def test_get_authorization_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_deployment_rest_bad_request( @@ -6960,10 +7015,13 @@ def test_create_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "post_create_deployment" ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_create_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "pre_create_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gsuiteaddons.CreateDeploymentRequest.pb( gsuiteaddons.CreateDeploymentRequest() ) @@ -6987,6 +7045,7 @@ def test_create_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gsuiteaddons.Deployment() + post_with_metadata.return_value = gsuiteaddons.Deployment(), metadata client.create_deployment( request, @@ -6998,6 +7057,7 @@ def test_create_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_replace_deployment_rest_bad_request( @@ -7252,10 +7312,13 @@ def test_replace_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "post_replace_deployment" ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_replace_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "pre_replace_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gsuiteaddons.ReplaceDeploymentRequest.pb( gsuiteaddons.ReplaceDeploymentRequest() ) @@ -7279,6 +7342,7 @@ def test_replace_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gsuiteaddons.Deployment() + post_with_metadata.return_value = gsuiteaddons.Deployment(), metadata client.replace_deployment( request, @@ -7290,6 +7354,7 @@ def test_replace_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_deployment_rest_bad_request( @@ -7378,10 +7443,13 @@ def test_get_deployment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "post_get_deployment" ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_get_deployment_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "pre_get_deployment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gsuiteaddons.GetDeploymentRequest.pb( gsuiteaddons.GetDeploymentRequest() ) @@ -7405,6 +7473,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gsuiteaddons.Deployment() + post_with_metadata.return_value = gsuiteaddons.Deployment(), metadata client.get_deployment( request, @@ -7416,6 +7485,7 @@ def test_get_deployment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_deployments_rest_bad_request( @@ -7500,10 +7570,13 @@ def test_list_deployments_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "post_list_deployments" ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_list_deployments_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "pre_list_deployments" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gsuiteaddons.ListDeploymentsRequest.pb( gsuiteaddons.ListDeploymentsRequest() ) @@ -7529,6 +7602,10 @@ def test_list_deployments_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gsuiteaddons.ListDeploymentsResponse() + post_with_metadata.return_value = ( + gsuiteaddons.ListDeploymentsResponse(), + metadata, + ) client.list_deployments( request, @@ -7540,6 +7617,7 @@ def test_list_deployments_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_deployment_rest_bad_request( @@ -7951,10 +8029,13 @@ def test_get_install_status_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "post_get_install_status" ) as post, mock.patch.object( + transports.GSuiteAddOnsRestInterceptor, "post_get_install_status_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.GSuiteAddOnsRestInterceptor, "pre_get_install_status" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gsuiteaddons.GetInstallStatusRequest.pb( gsuiteaddons.GetInstallStatusRequest() ) @@ -7978,6 +8059,7 @@ def test_get_install_status_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gsuiteaddons.InstallStatus() + post_with_metadata.return_value = gsuiteaddons.InstallStatus(), metadata client.get_install_status( request, @@ -7989,6 +8071,7 @@ def test_get_install_status_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-iam-logging/.OwlBot.yaml b/packages/google-cloud-iam-logging/.OwlBot.yaml index 65814a90de9f..858f266f2b4b 100644 --- a/packages/google-cloud-iam-logging/.OwlBot.yaml +++ b/packages/google-cloud-iam-logging/.OwlBot.yaml @@ -15,6 +15,9 @@ deep-remove-regex: - /owl-bot-staging +deep-preserve-regex: + - /owl-bot-staging/google-cloud-iam-logging/v1/.*_pb2.py.* + deep-copy-regex: - source: /google/iam/(v.*)/logging/.*-py dest: /owl-bot-staging/google-cloud-iam-logging/$1 diff --git a/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py b/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py index 4660de83cb4b..558c8aab67c5 100644 --- a/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py +++ b/packages/google-cloud-iam-logging/google/cloud/iam_logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py b/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py index 4660de83cb4b..558c8aab67c5 100644 --- a/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py +++ b/packages/google-cloud-iam-logging/google/cloud/iam_logging_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.4.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/CHANGELOG.md b/packages/google-cloud-iam/CHANGELOG.md index 0229790dddaa..3c76fb84e230 100644 --- a/packages/google-cloud-iam/CHANGELOG.md +++ b/packages/google-cloud-iam/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-iam/#history +## [2.18.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-iam-v2.17.0...google-cloud-iam-v2.18.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-iam-v2.16.1...google-cloud-iam-v2.17.0) (2024-12-12) diff --git a/packages/google-cloud-iam/google/cloud/iam/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py b/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py index 617c345dc559..6920d1686203 100644 --- a/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_admin_v1/services/iam/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -534,6 +536,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py index c70e13c42795..316425d6dc73 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -494,6 +496,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/transports/rest.py b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/transports/rest.py index 9747740a6baa..47bcfd38e59d 100644 --- a/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/google-cloud-iam/google/cloud/iam_credentials_v1/services/iam_credentials/transports/rest.py @@ -125,12 +125,37 @@ def post_generate_access_token( ) -> common.GenerateAccessTokenResponse: """Post-rpc interceptor for generate_access_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_access_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_generate_access_token` interceptor runs + before the `post_generate_access_token_with_metadata` interceptor. """ return response + def post_generate_access_token_with_metadata( + self, + response: common.GenerateAccessTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_generate_access_token_with_metadata` + interceptor in new development instead of the `post_generate_access_token` interceptor. + When both interceptors are used, this `post_generate_access_token_with_metadata` interceptor runs after the + `post_generate_access_token` interceptor. The (possibly modified) response returned by + `post_generate_access_token` will be passed to + `post_generate_access_token_with_metadata`. + """ + return response, metadata + def pre_generate_id_token( self, request: common.GenerateIdTokenRequest, @@ -148,12 +173,35 @@ def post_generate_id_token( ) -> common.GenerateIdTokenResponse: """Post-rpc interceptor for generate_id_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_id_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_generate_id_token` interceptor runs + before the `post_generate_id_token_with_metadata` interceptor. """ return response + def post_generate_id_token_with_metadata( + self, + response: common.GenerateIdTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for generate_id_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_generate_id_token_with_metadata` + interceptor in new development instead of the `post_generate_id_token` interceptor. + When both interceptors are used, this `post_generate_id_token_with_metadata` interceptor runs after the + `post_generate_id_token` interceptor. The (possibly modified) response returned by + `post_generate_id_token` will be passed to + `post_generate_id_token_with_metadata`. + """ + return response, metadata + def pre_sign_blob( self, request: common.SignBlobRequest, @@ -171,12 +219,35 @@ def post_sign_blob( ) -> common.SignBlobResponse: """Post-rpc interceptor for sign_blob - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sign_blob_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_sign_blob` interceptor runs + before the `post_sign_blob_with_metadata` interceptor. """ return response + def post_sign_blob_with_metadata( + self, + response: common.SignBlobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for sign_blob + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_sign_blob_with_metadata` + interceptor in new development instead of the `post_sign_blob` interceptor. + When both interceptors are used, this `post_sign_blob_with_metadata` interceptor runs after the + `post_sign_blob` interceptor. The (possibly modified) response returned by + `post_sign_blob` will be passed to + `post_sign_blob_with_metadata`. + """ + return response, metadata + def pre_sign_jwt( self, request: common.SignJwtRequest, @@ -192,12 +263,35 @@ def pre_sign_jwt( def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtResponse: """Post-rpc interceptor for sign_jwt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sign_jwt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_sign_jwt` interceptor runs + before the `post_sign_jwt_with_metadata` interceptor. """ return response + def post_sign_jwt_with_metadata( + self, + response: common.SignJwtResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for sign_jwt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_sign_jwt_with_metadata` + interceptor in new development instead of the `post_sign_jwt` interceptor. + When both interceptors are used, this `post_sign_jwt_with_metadata` interceptor runs after the + `post_sign_jwt` interceptor. The (possibly modified) response returned by + `post_sign_jwt` will be passed to + `post_sign_jwt_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class IAMCredentialsRestStub: @@ -420,6 +514,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_access_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_access_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -570,6 +668,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_id_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_id_token_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -722,6 +824,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_sign_blob(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sign_blob_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -872,6 +978,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_sign_jwt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sign_jwt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py index 3fe88405f8c4..11e3af4ab9dc 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -468,6 +470,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1355,16 +1384,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/transports/rest.py b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/transports/rest.py index 292a9f3ebc49..ac61f25441ca 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/transports/rest.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2/services/policies/transports/rest.py @@ -133,12 +133,35 @@ def post_create_policy( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Policies server but before - it is returned to user code. + it is returned to user code. This `post_create_policy` interceptor runs + before the `post_create_policy_with_metadata` interceptor. """ return response + def post_create_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Policies server but before it is returned to user code. + + We recommend only using this `post_create_policy_with_metadata` + interceptor in new development instead of the `post_create_policy` interceptor. + When both interceptors are used, this `post_create_policy_with_metadata` interceptor runs after the + `post_create_policy` interceptor. The (possibly modified) response returned by + `post_create_policy` will be passed to + `post_create_policy_with_metadata`. + """ + return response, metadata + def pre_delete_policy( self, request: policy.DeletePolicyRequest, @@ -156,12 +179,35 @@ def post_delete_policy( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Policies server but before - it is returned to user code. + it is returned to user code. This `post_delete_policy` interceptor runs + before the `post_delete_policy_with_metadata` interceptor. """ return response + def post_delete_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Policies server but before it is returned to user code. + + We recommend only using this `post_delete_policy_with_metadata` + interceptor in new development instead of the `post_delete_policy` interceptor. + When both interceptors are used, this `post_delete_policy_with_metadata` interceptor runs after the + `post_delete_policy` interceptor. The (possibly modified) response returned by + `post_delete_policy` will be passed to + `post_delete_policy_with_metadata`. + """ + return response, metadata + def pre_get_policy( self, request: policy.GetPolicyRequest, @@ -177,12 +223,33 @@ def pre_get_policy( def post_get_policy(self, response: policy.Policy) -> policy.Policy: """Post-rpc interceptor for get_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Policies server but before - it is returned to user code. + it is returned to user code. This `post_get_policy` interceptor runs + before the `post_get_policy_with_metadata` interceptor. """ return response + def post_get_policy_with_metadata( + self, response: policy.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[policy.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Policies server but before it is returned to user code. + + We recommend only using this `post_get_policy_with_metadata` + interceptor in new development instead of the `post_get_policy` interceptor. + When both interceptors are used, this `post_get_policy_with_metadata` interceptor runs after the + `post_get_policy` interceptor. The (possibly modified) response returned by + `post_get_policy` will be passed to + `post_get_policy_with_metadata`. + """ + return response, metadata + def pre_list_policies( self, request: policy.ListPoliciesRequest, @@ -200,12 +267,35 @@ def post_list_policies( ) -> policy.ListPoliciesResponse: """Post-rpc interceptor for list_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Policies server but before - it is returned to user code. + it is returned to user code. This `post_list_policies` interceptor runs + before the `post_list_policies_with_metadata` interceptor. """ return response + def post_list_policies_with_metadata( + self, + response: policy.ListPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy.ListPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Policies server but before it is returned to user code. + + We recommend only using this `post_list_policies_with_metadata` + interceptor in new development instead of the `post_list_policies` interceptor. + When both interceptors are used, this `post_list_policies_with_metadata` interceptor runs after the + `post_list_policies` interceptor. The (possibly modified) response returned by + `post_list_policies` will be passed to + `post_list_policies_with_metadata`. + """ + return response, metadata + def pre_update_policy( self, request: policy.UpdatePolicyRequest, @@ -223,12 +313,35 @@ def post_update_policy( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Policies server but before - it is returned to user code. + it is returned to user code. This `post_update_policy` interceptor runs + before the `post_update_policy_with_metadata` interceptor. """ return response + def post_update_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Policies server but before it is returned to user code. + + We recommend only using this `post_update_policy_with_metadata` + interceptor in new development instead of the `post_update_policy` interceptor. + When both interceptors are used, this `post_update_policy_with_metadata` interceptor runs after the + `post_update_policy` interceptor. The (possibly modified) response returned by + `post_update_policy` will be passed to + `post_update_policy_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, @@ -503,6 +616,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -646,6 +763,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -786,6 +907,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -928,6 +1053,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_policies_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1077,6 +1206,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py b/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py index 6053ad2404bf..1074c4de1723 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py b/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py index b342263b222e..f7ee3d9fec91 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v2beta/services/policies/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -466,6 +468,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1353,16 +1382,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json index a3c3f92bcc73..192b430ca62a 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.admin.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.17.0" + "version": "2.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index d746b41f0de7..df1a76d87a7b 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.17.0" + "version": "2.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json index 661552fe25d0..ec3fa5534d89 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.17.0" + "version": "2.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json index a551d8628fec..8247dcabe73c 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iam", - "version": "2.17.0" + "version": "2.18.0" }, "snippets": [ { diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py b/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py index 8ca140d38bf8..2c8cd0204536 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_admin_v1/test_iam.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -61,6 +62,13 @@ ) from google.cloud.iam_admin_v1.types import iam +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -272,6 +280,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IAMClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IAMClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py b/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py index dbf92be1a19e..248dd816df21 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_credentials_v1/test_iam_credentials.py @@ -61,6 +61,13 @@ ) from google.cloud.iam_credentials_v1.types import common +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -319,6 +326,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IAMCredentialsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IAMCredentialsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3694,10 +3744,14 @@ def test_generate_access_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IAMCredentialsRestInterceptor, "post_generate_access_token" ) as post, mock.patch.object( + transports.IAMCredentialsRestInterceptor, + "post_generate_access_token_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IAMCredentialsRestInterceptor, "pre_generate_access_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.GenerateAccessTokenRequest.pb( common.GenerateAccessTokenRequest() ) @@ -3723,6 +3777,7 @@ def test_generate_access_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.GenerateAccessTokenResponse() + post_with_metadata.return_value = common.GenerateAccessTokenResponse(), metadata client.generate_access_token( request, @@ -3734,6 +3789,7 @@ def test_generate_access_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenRequest): @@ -3816,10 +3872,13 @@ def test_generate_id_token_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IAMCredentialsRestInterceptor, "post_generate_id_token" ) as post, mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_generate_id_token_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IAMCredentialsRestInterceptor, "pre_generate_id_token" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) transcode.return_value = { "method": "post", @@ -3843,6 +3902,7 @@ def test_generate_id_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.GenerateIdTokenResponse() + post_with_metadata.return_value = common.GenerateIdTokenResponse(), metadata client.generate_id_token( request, @@ -3854,6 +3914,7 @@ def test_generate_id_token_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): @@ -3938,10 +3999,13 @@ def test_sign_blob_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IAMCredentialsRestInterceptor, "post_sign_blob" ) as post, mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_sign_blob_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IAMCredentialsRestInterceptor, "pre_sign_blob" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) transcode.return_value = { "method": "post", @@ -3963,6 +4027,7 @@ def test_sign_blob_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.SignBlobResponse() + post_with_metadata.return_value = common.SignBlobResponse(), metadata client.sign_blob( request, @@ -3974,6 +4039,7 @@ def test_sign_blob_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): @@ -4058,10 +4124,13 @@ def test_sign_jwt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IAMCredentialsRestInterceptor, "post_sign_jwt" ) as post, mock.patch.object( + transports.IAMCredentialsRestInterceptor, "post_sign_jwt_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IAMCredentialsRestInterceptor, "pre_sign_jwt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) transcode.return_value = { "method": "post", @@ -4083,6 +4152,7 @@ def test_sign_jwt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.SignJwtResponse() + post_with_metadata.return_value = common.SignJwtResponse(), metadata client.sign_jwt( request, @@ -4094,6 +4164,7 @@ def test_sign_jwt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py index 6e312b1dce02..acc78c820a18 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v2/test_policies.py @@ -74,6 +74,13 @@ from google.cloud.iam_v2.types import policy from google.cloud.iam_v2.types import policy as gi_policy +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -304,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4139,10 +4189,13 @@ def test_list_policies_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PoliciesRestInterceptor, "post_list_policies" ) as post, mock.patch.object( + transports.PoliciesRestInterceptor, "post_list_policies_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PoliciesRestInterceptor, "pre_list_policies" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = policy.ListPoliciesRequest.pb(policy.ListPoliciesRequest()) transcode.return_value = { "method": "post", @@ -4166,6 +4219,7 @@ def test_list_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy.ListPoliciesResponse() + post_with_metadata.return_value = policy.ListPoliciesResponse(), metadata client.list_policies( request, @@ -4177,6 +4231,7 @@ def test_list_policies_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_policy_rest_bad_request(request_type=policy.GetPolicyRequest): @@ -4267,10 +4322,13 @@ def test_get_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PoliciesRestInterceptor, "post_get_policy" ) as post, mock.patch.object( + transports.PoliciesRestInterceptor, "post_get_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PoliciesRestInterceptor, "pre_get_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = policy.GetPolicyRequest.pb(policy.GetPolicyRequest()) transcode.return_value = { "method": "post", @@ -4292,6 +4350,7 @@ def test_get_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy.Policy() + post_with_metadata.return_value = policy.Policy(), metadata client.get_policy( request, @@ -4303,6 +4362,7 @@ def test_get_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_policy_rest_bad_request(request_type=gi_policy.CreatePolicyRequest): @@ -4487,10 +4547,13 @@ def test_create_policy_rest_interceptors(null_interceptor): ), mock.patch.object( transports.PoliciesRestInterceptor, "post_create_policy" ) as post, mock.patch.object( + transports.PoliciesRestInterceptor, "post_create_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PoliciesRestInterceptor, "pre_create_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gi_policy.CreatePolicyRequest.pb(gi_policy.CreatePolicyRequest()) transcode.return_value = { "method": "post", @@ -4512,6 +4575,7 @@ def test_create_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_policy( request, @@ -4523,6 +4587,7 @@ def test_create_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_policy_rest_bad_request(request_type=policy.UpdatePolicyRequest): @@ -4707,10 +4772,13 @@ def test_update_policy_rest_interceptors(null_interceptor): ), mock.patch.object( transports.PoliciesRestInterceptor, "post_update_policy" ) as post, mock.patch.object( + transports.PoliciesRestInterceptor, "post_update_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PoliciesRestInterceptor, "pre_update_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = policy.UpdatePolicyRequest.pb(policy.UpdatePolicyRequest()) transcode.return_value = { "method": "post", @@ -4732,6 +4800,7 @@ def test_update_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_policy( request, @@ -4743,6 +4812,7 @@ def test_update_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_policy_rest_bad_request(request_type=policy.DeletePolicyRequest): @@ -4819,10 +4889,13 @@ def test_delete_policy_rest_interceptors(null_interceptor): ), mock.patch.object( transports.PoliciesRestInterceptor, "post_delete_policy" ) as post, mock.patch.object( + transports.PoliciesRestInterceptor, "post_delete_policy_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PoliciesRestInterceptor, "pre_delete_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = policy.DeletePolicyRequest.pb(policy.DeletePolicyRequest()) transcode.return_value = { "method": "post", @@ -4844,6 +4917,7 @@ def test_delete_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_policy( request, @@ -4855,6 +4929,7 @@ def test_delete_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request( diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py index b91e4db5e8c3..e0d3c4d46c8b 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v2beta/test_policies.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -69,6 +70,13 @@ from google.cloud.iam_v2beta.types import policy from google.cloud.iam_v2beta.types import policy as gi_policy +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -299,6 +307,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-iap/CHANGELOG.md b/packages/google-cloud-iap/CHANGELOG.md index 810e950391d7..1a00ebfa71d8 100644 --- a/packages/google-cloud-iap/CHANGELOG.md +++ b/packages/google-cloud-iap/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-iap-v1.15.0...google-cloud-iap-v1.16.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.15.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-iap-v1.14.1...google-cloud-iap-v1.15.0) (2024-12-12) diff --git a/packages/google-cloud-iap/google/cloud/iap/gapic_version.py b/packages/google-cloud-iap/google/cloud/iap/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-iap/google/cloud/iap/gapic_version.py +++ b/packages/google-cloud-iap/google/cloud/iap/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py index cf18a472a8a2..3e0ea3b28f0a 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "1.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py index afd98d1ab4ab..411192f82c19 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py index 1657cd0ab0d0..4b12c9b0318b 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_admin_service/transports/rest.py @@ -172,12 +172,35 @@ def post_create_tunnel_dest_group( ) -> service.TunnelDestGroup: """Post-rpc interceptor for create_tunnel_dest_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_tunnel_dest_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_create_tunnel_dest_group` interceptor runs + before the `post_create_tunnel_dest_group_with_metadata` interceptor. """ return response + def post_create_tunnel_dest_group_with_metadata( + self, + response: service.TunnelDestGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.TunnelDestGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_tunnel_dest_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_create_tunnel_dest_group_with_metadata` + interceptor in new development instead of the `post_create_tunnel_dest_group` interceptor. + When both interceptors are used, this `post_create_tunnel_dest_group_with_metadata` interceptor runs after the + `post_create_tunnel_dest_group` interceptor. The (possibly modified) response returned by + `post_create_tunnel_dest_group` will be passed to + `post_create_tunnel_dest_group_with_metadata`. + """ + return response, metadata + def pre_delete_tunnel_dest_group( self, request: service.DeleteTunnelDestGroupRequest, @@ -209,12 +232,35 @@ def pre_get_iam_policy( def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. """ return response + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_iap_settings( self, request: service.GetIapSettingsRequest, @@ -232,12 +278,35 @@ def post_get_iap_settings( ) -> service.IapSettings: """Post-rpc interceptor for get_iap_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_iap_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_iap_settings` interceptor runs + before the `post_get_iap_settings_with_metadata` interceptor. """ return response + def post_get_iap_settings_with_metadata( + self, + response: service.IapSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.IapSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iap_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_get_iap_settings_with_metadata` + interceptor in new development instead of the `post_get_iap_settings` interceptor. + When both interceptors are used, this `post_get_iap_settings_with_metadata` interceptor runs after the + `post_get_iap_settings` interceptor. The (possibly modified) response returned by + `post_get_iap_settings` will be passed to + `post_get_iap_settings_with_metadata`. + """ + return response, metadata + def pre_get_tunnel_dest_group( self, request: service.GetTunnelDestGroupRequest, @@ -257,12 +326,35 @@ def post_get_tunnel_dest_group( ) -> service.TunnelDestGroup: """Post-rpc interceptor for get_tunnel_dest_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_tunnel_dest_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_get_tunnel_dest_group` interceptor runs + before the `post_get_tunnel_dest_group_with_metadata` interceptor. """ return response + def post_get_tunnel_dest_group_with_metadata( + self, + response: service.TunnelDestGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.TunnelDestGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_tunnel_dest_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_get_tunnel_dest_group_with_metadata` + interceptor in new development instead of the `post_get_tunnel_dest_group` interceptor. + When both interceptors are used, this `post_get_tunnel_dest_group_with_metadata` interceptor runs after the + `post_get_tunnel_dest_group` interceptor. The (possibly modified) response returned by + `post_get_tunnel_dest_group` will be passed to + `post_get_tunnel_dest_group_with_metadata`. + """ + return response, metadata + def pre_list_tunnel_dest_groups( self, request: service.ListTunnelDestGroupsRequest, @@ -282,12 +374,37 @@ def post_list_tunnel_dest_groups( ) -> service.ListTunnelDestGroupsResponse: """Post-rpc interceptor for list_tunnel_dest_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_tunnel_dest_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_list_tunnel_dest_groups` interceptor runs + before the `post_list_tunnel_dest_groups_with_metadata` interceptor. """ return response + def post_list_tunnel_dest_groups_with_metadata( + self, + response: service.ListTunnelDestGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListTunnelDestGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_tunnel_dest_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_list_tunnel_dest_groups_with_metadata` + interceptor in new development instead of the `post_list_tunnel_dest_groups` interceptor. + When both interceptors are used, this `post_list_tunnel_dest_groups_with_metadata` interceptor runs after the + `post_list_tunnel_dest_groups` interceptor. The (possibly modified) response returned by + `post_list_tunnel_dest_groups` will be passed to + `post_list_tunnel_dest_groups_with_metadata`. + """ + return response, metadata + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -305,12 +422,35 @@ def pre_set_iam_policy( def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. """ return response + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -331,12 +471,38 @@ def post_test_iam_permissions( ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. """ return response + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_iap_settings( self, request: service.UpdateIapSettingsRequest, @@ -356,12 +522,35 @@ def post_update_iap_settings( ) -> service.IapSettings: """Post-rpc interceptor for update_iap_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_iap_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_iap_settings` interceptor runs + before the `post_update_iap_settings_with_metadata` interceptor. """ return response + def post_update_iap_settings_with_metadata( + self, + response: service.IapSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.IapSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_iap_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_update_iap_settings_with_metadata` + interceptor in new development instead of the `post_update_iap_settings` interceptor. + When both interceptors are used, this `post_update_iap_settings_with_metadata` interceptor runs after the + `post_update_iap_settings` interceptor. The (possibly modified) response returned by + `post_update_iap_settings` will be passed to + `post_update_iap_settings_with_metadata`. + """ + return response, metadata + def pre_update_tunnel_dest_group( self, request: service.UpdateTunnelDestGroupRequest, @@ -381,12 +570,35 @@ def post_update_tunnel_dest_group( ) -> service.TunnelDestGroup: """Post-rpc interceptor for update_tunnel_dest_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_tunnel_dest_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyAdminService server but before - it is returned to user code. + it is returned to user code. This `post_update_tunnel_dest_group` interceptor runs + before the `post_update_tunnel_dest_group_with_metadata` interceptor. """ return response + def post_update_tunnel_dest_group_with_metadata( + self, + response: service.TunnelDestGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.TunnelDestGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_tunnel_dest_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyAdminService server but before it is returned to user code. + + We recommend only using this `post_update_tunnel_dest_group_with_metadata` + interceptor in new development instead of the `post_update_tunnel_dest_group` interceptor. + When both interceptors are used, this `post_update_tunnel_dest_group_with_metadata` interceptor runs after the + `post_update_tunnel_dest_group` interceptor. The (possibly modified) response returned by + `post_update_tunnel_dest_group` will be passed to + `post_update_tunnel_dest_group_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class IdentityAwareProxyAdminServiceRestStub: @@ -606,6 +818,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_tunnel_dest_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_tunnel_dest_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -941,6 +1157,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1084,6 +1304,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_iap_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iap_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1229,6 +1453,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_tunnel_dest_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_tunnel_dest_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1376,6 +1604,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_tunnel_dest_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_tunnel_dest_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1602,6 +1834,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1753,6 +1989,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1903,6 +2143,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_iap_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_iap_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2054,6 +2298,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_tunnel_dest_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_tunnel_dest_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py index b9462790468c..1877c163c5fd 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -474,6 +476,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py index 3e2aac7d0528..9b2552545d1f 100644 --- a/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py +++ b/packages/google-cloud-iap/google/cloud/iap_v1/services/identity_aware_proxy_o_auth_service/transports/rest.py @@ -150,12 +150,33 @@ def pre_create_brand( def post_create_brand(self, response: service.Brand) -> service.Brand: """Post-rpc interceptor for create_brand - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_brand_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_create_brand` interceptor runs + before the `post_create_brand_with_metadata` interceptor. """ return response + def post_create_brand_with_metadata( + self, response: service.Brand, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[service.Brand, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_brand + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_create_brand_with_metadata` + interceptor in new development instead of the `post_create_brand` interceptor. + When both interceptors are used, this `post_create_brand_with_metadata` interceptor runs after the + `post_create_brand` interceptor. The (possibly modified) response returned by + `post_create_brand` will be passed to + `post_create_brand_with_metadata`. + """ + return response, metadata + def pre_create_identity_aware_proxy_client( self, request: service.CreateIdentityAwareProxyClientRequest, @@ -176,12 +197,37 @@ def post_create_identity_aware_proxy_client( ) -> service.IdentityAwareProxyClient: """Post-rpc interceptor for create_identity_aware_proxy_client - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_identity_aware_proxy_client_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_create_identity_aware_proxy_client` interceptor runs + before the `post_create_identity_aware_proxy_client_with_metadata` interceptor. """ return response + def post_create_identity_aware_proxy_client_with_metadata( + self, + response: service.IdentityAwareProxyClient, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.IdentityAwareProxyClient, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_identity_aware_proxy_client + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_create_identity_aware_proxy_client_with_metadata` + interceptor in new development instead of the `post_create_identity_aware_proxy_client` interceptor. + When both interceptors are used, this `post_create_identity_aware_proxy_client_with_metadata` interceptor runs after the + `post_create_identity_aware_proxy_client` interceptor. The (possibly modified) response returned by + `post_create_identity_aware_proxy_client` will be passed to + `post_create_identity_aware_proxy_client_with_metadata`. + """ + return response, metadata + def pre_delete_identity_aware_proxy_client( self, request: service.DeleteIdentityAwareProxyClientRequest, @@ -212,12 +258,33 @@ def pre_get_brand( def post_get_brand(self, response: service.Brand) -> service.Brand: """Post-rpc interceptor for get_brand - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_brand_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_get_brand` interceptor runs + before the `post_get_brand_with_metadata` interceptor. """ return response + def post_get_brand_with_metadata( + self, response: service.Brand, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[service.Brand, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_brand + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_get_brand_with_metadata` + interceptor in new development instead of the `post_get_brand` interceptor. + When both interceptors are used, this `post_get_brand_with_metadata` interceptor runs after the + `post_get_brand` interceptor. The (possibly modified) response returned by + `post_get_brand` will be passed to + `post_get_brand_with_metadata`. + """ + return response, metadata + def pre_get_identity_aware_proxy_client( self, request: service.GetIdentityAwareProxyClientRequest, @@ -238,12 +305,37 @@ def post_get_identity_aware_proxy_client( ) -> service.IdentityAwareProxyClient: """Post-rpc interceptor for get_identity_aware_proxy_client - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_identity_aware_proxy_client_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_get_identity_aware_proxy_client` interceptor runs + before the `post_get_identity_aware_proxy_client_with_metadata` interceptor. """ return response + def post_get_identity_aware_proxy_client_with_metadata( + self, + response: service.IdentityAwareProxyClient, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.IdentityAwareProxyClient, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_identity_aware_proxy_client + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_get_identity_aware_proxy_client_with_metadata` + interceptor in new development instead of the `post_get_identity_aware_proxy_client` interceptor. + When both interceptors are used, this `post_get_identity_aware_proxy_client_with_metadata` interceptor runs after the + `post_get_identity_aware_proxy_client` interceptor. The (possibly modified) response returned by + `post_get_identity_aware_proxy_client` will be passed to + `post_get_identity_aware_proxy_client_with_metadata`. + """ + return response, metadata + def pre_list_brands( self, request: service.ListBrandsRequest, @@ -261,12 +353,35 @@ def post_list_brands( ) -> service.ListBrandsResponse: """Post-rpc interceptor for list_brands - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_brands_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_list_brands` interceptor runs + before the `post_list_brands_with_metadata` interceptor. """ return response + def post_list_brands_with_metadata( + self, + response: service.ListBrandsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListBrandsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_brands + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_list_brands_with_metadata` + interceptor in new development instead of the `post_list_brands` interceptor. + When both interceptors are used, this `post_list_brands_with_metadata` interceptor runs after the + `post_list_brands` interceptor. The (possibly modified) response returned by + `post_list_brands` will be passed to + `post_list_brands_with_metadata`. + """ + return response, metadata + def pre_list_identity_aware_proxy_clients( self, request: service.ListIdentityAwareProxyClientsRequest, @@ -287,12 +402,38 @@ def post_list_identity_aware_proxy_clients( ) -> service.ListIdentityAwareProxyClientsResponse: """Post-rpc interceptor for list_identity_aware_proxy_clients - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_identity_aware_proxy_clients_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_list_identity_aware_proxy_clients` interceptor runs + before the `post_list_identity_aware_proxy_clients_with_metadata` interceptor. """ return response + def post_list_identity_aware_proxy_clients_with_metadata( + self, + response: service.ListIdentityAwareProxyClientsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListIdentityAwareProxyClientsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_identity_aware_proxy_clients + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_list_identity_aware_proxy_clients_with_metadata` + interceptor in new development instead of the `post_list_identity_aware_proxy_clients` interceptor. + When both interceptors are used, this `post_list_identity_aware_proxy_clients_with_metadata` interceptor runs after the + `post_list_identity_aware_proxy_clients` interceptor. The (possibly modified) response returned by + `post_list_identity_aware_proxy_clients` will be passed to + `post_list_identity_aware_proxy_clients_with_metadata`. + """ + return response, metadata + def pre_reset_identity_aware_proxy_client_secret( self, request: service.ResetIdentityAwareProxyClientSecretRequest, @@ -313,12 +454,37 @@ def post_reset_identity_aware_proxy_client_secret( ) -> service.IdentityAwareProxyClient: """Post-rpc interceptor for reset_identity_aware_proxy_client_secret - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reset_identity_aware_proxy_client_secret_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IdentityAwareProxyOAuthService server but before - it is returned to user code. + it is returned to user code. This `post_reset_identity_aware_proxy_client_secret` interceptor runs + before the `post_reset_identity_aware_proxy_client_secret_with_metadata` interceptor. """ return response + def post_reset_identity_aware_proxy_client_secret_with_metadata( + self, + response: service.IdentityAwareProxyClient, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.IdentityAwareProxyClient, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for reset_identity_aware_proxy_client_secret + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IdentityAwareProxyOAuthService server but before it is returned to user code. + + We recommend only using this `post_reset_identity_aware_proxy_client_secret_with_metadata` + interceptor in new development instead of the `post_reset_identity_aware_proxy_client_secret` interceptor. + When both interceptors are used, this `post_reset_identity_aware_proxy_client_secret_with_metadata` interceptor runs after the + `post_reset_identity_aware_proxy_client_secret` interceptor. The (possibly modified) response returned by + `post_reset_identity_aware_proxy_client_secret` will be passed to + `post_reset_identity_aware_proxy_client_secret_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class IdentityAwareProxyOAuthServiceRestStub: @@ -541,6 +707,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_brand(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_brand_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -699,6 +869,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_identity_aware_proxy_client(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_create_identity_aware_proxy_client_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -963,6 +1140,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_brand(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_brand_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1112,6 +1293,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_identity_aware_proxy_client(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_identity_aware_proxy_client_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1257,6 +1445,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_brands(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_brands_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1406,6 +1598,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_identity_aware_proxy_clients(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_identity_aware_proxy_clients_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1566,6 +1765,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_reset_identity_aware_proxy_client_secret(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_reset_identity_aware_proxy_client_secret_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json b/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json index b2206e8f39fd..879911c88b81 100644 --- a/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json +++ b/packages/google-cloud-iap/samples/generated_samples/snippet_metadata_google.cloud.iap.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-iap", - "version": "1.15.0" + "version": "1.16.0" }, "snippets": [ { diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py index 1e4992bcf69d..4a9d126d0990 100644 --- a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_admin_service.py @@ -67,6 +67,13 @@ ) from google.cloud.iap_v1.types import service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -357,6 +364,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IdentityAwareProxyAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IdentityAwareProxyAdminServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6768,10 +6818,14 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -6793,6 +6847,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.set_iam_policy( request, @@ -6804,6 +6859,7 @@ def test_set_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -6887,10 +6943,14 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", @@ -6912,6 +6972,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata client.get_iam_policy( request, @@ -6923,6 +6984,7 @@ def test_get_iam_policy_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_test_iam_permissions_rest_bad_request( @@ -7005,11 +7067,15 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_test_iam_permissions", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_test_iam_permissions", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", @@ -7033,6 +7099,10 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) client.test_iam_permissions( request, @@ -7044,6 +7114,7 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iap_settings_rest_bad_request(request_type=service.GetIapSettingsRequest): @@ -7127,10 +7198,14 @@ def test_get_iap_settings_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_get_iap_settings", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_get_iap_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_get_iap_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetIapSettingsRequest.pb(service.GetIapSettingsRequest()) transcode.return_value = { "method": "post", @@ -7152,6 +7227,7 @@ def test_get_iap_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.IapSettings() + post_with_metadata.return_value = service.IapSettings(), metadata client.get_iap_settings( request, @@ -7163,6 +7239,7 @@ def test_get_iap_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_iap_settings_rest_bad_request( @@ -7355,11 +7432,15 @@ def test_update_iap_settings_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_update_iap_settings", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_update_iap_settings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_update_iap_settings", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateIapSettingsRequest.pb( service.UpdateIapSettingsRequest() ) @@ -7383,6 +7464,7 @@ def test_update_iap_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.IapSettings() + post_with_metadata.return_value = service.IapSettings(), metadata client.update_iap_settings( request, @@ -7394,6 +7476,7 @@ def test_update_iap_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_tunnel_dest_groups_rest_bad_request( @@ -7479,11 +7562,15 @@ def test_list_tunnel_dest_groups_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_list_tunnel_dest_groups", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_list_tunnel_dest_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_list_tunnel_dest_groups", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListTunnelDestGroupsRequest.pb( service.ListTunnelDestGroupsRequest() ) @@ -7509,6 +7596,10 @@ def test_list_tunnel_dest_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListTunnelDestGroupsResponse() + post_with_metadata.return_value = ( + service.ListTunnelDestGroupsResponse(), + metadata, + ) client.list_tunnel_dest_groups( request, @@ -7520,6 +7611,7 @@ def test_list_tunnel_dest_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_tunnel_dest_group_rest_bad_request( @@ -7681,11 +7773,15 @@ def test_create_tunnel_dest_group_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_create_tunnel_dest_group", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_create_tunnel_dest_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_create_tunnel_dest_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateTunnelDestGroupRequest.pb( service.CreateTunnelDestGroupRequest() ) @@ -7709,6 +7805,7 @@ def test_create_tunnel_dest_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.TunnelDestGroup() + post_with_metadata.return_value = service.TunnelDestGroup(), metadata client.create_tunnel_dest_group( request, @@ -7720,6 +7817,7 @@ def test_create_tunnel_dest_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_tunnel_dest_group_rest_bad_request( @@ -7813,11 +7911,15 @@ def test_get_tunnel_dest_group_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_get_tunnel_dest_group", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_get_tunnel_dest_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_get_tunnel_dest_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetTunnelDestGroupRequest.pb( service.GetTunnelDestGroupRequest() ) @@ -7841,6 +7943,7 @@ def test_get_tunnel_dest_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.TunnelDestGroup() + post_with_metadata.return_value = service.TunnelDestGroup(), metadata client.get_tunnel_dest_group( request, @@ -7852,6 +7955,7 @@ def test_get_tunnel_dest_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_tunnel_dest_group_rest_bad_request( @@ -8135,11 +8239,15 @@ def test_update_tunnel_dest_group_rest_interceptors(null_interceptor): transports.IdentityAwareProxyAdminServiceRestInterceptor, "post_update_tunnel_dest_group", ) as post, mock.patch.object( + transports.IdentityAwareProxyAdminServiceRestInterceptor, + "post_update_tunnel_dest_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyAdminServiceRestInterceptor, "pre_update_tunnel_dest_group", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateTunnelDestGroupRequest.pb( service.UpdateTunnelDestGroupRequest() ) @@ -8163,6 +8271,7 @@ def test_update_tunnel_dest_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.TunnelDestGroup() + post_with_metadata.return_value = service.TunnelDestGroup(), metadata client.update_tunnel_dest_group( request, @@ -8174,6 +8283,7 @@ def test_update_tunnel_dest_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py index 6f795a66c6b9..41220715c7f0 100644 --- a/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py +++ b/packages/google-cloud-iap/tests/unit/gapic/iap_v1/test_identity_aware_proxy_o_auth_service.py @@ -60,6 +60,13 @@ ) from google.cloud.iap_v1.types import service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -350,6 +357,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IdentityAwareProxyOAuthServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IdentityAwareProxyOAuthServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5098,10 +5148,14 @@ def test_list_brands_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_list_brands" ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_list_brands_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_list_brands" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListBrandsRequest.pb(service.ListBrandsRequest()) transcode.return_value = { "method": "post", @@ -5123,6 +5177,7 @@ def test_list_brands_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListBrandsResponse() + post_with_metadata.return_value = service.ListBrandsResponse(), metadata client.list_brands( request, @@ -5134,6 +5189,7 @@ def test_list_brands_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_brand_rest_bad_request(request_type=service.CreateBrandRequest): @@ -5295,10 +5351,14 @@ def test_create_brand_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_create_brand" ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_create_brand_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_create_brand" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateBrandRequest.pb(service.CreateBrandRequest()) transcode.return_value = { "method": "post", @@ -5320,6 +5380,7 @@ def test_create_brand_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Brand() + post_with_metadata.return_value = service.Brand(), metadata client.create_brand( request, @@ -5331,6 +5392,7 @@ def test_create_brand_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_brand_rest_bad_request(request_type=service.GetBrandRequest): @@ -5419,10 +5481,14 @@ def test_get_brand_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_get_brand" ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_get_brand_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_get_brand" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetBrandRequest.pb(service.GetBrandRequest()) transcode.return_value = { "method": "post", @@ -5444,6 +5510,7 @@ def test_get_brand_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.Brand() + post_with_metadata.return_value = service.Brand(), metadata client.get_brand( request, @@ -5455,6 +5522,7 @@ def test_get_brand_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_identity_aware_proxy_client_rest_bad_request( @@ -5622,11 +5690,15 @@ def test_create_identity_aware_proxy_client_rest_interceptors(null_interceptor): transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_create_identity_aware_proxy_client", ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_create_identity_aware_proxy_client_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_create_identity_aware_proxy_client", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateIdentityAwareProxyClientRequest.pb( service.CreateIdentityAwareProxyClientRequest() ) @@ -5652,6 +5724,7 @@ def test_create_identity_aware_proxy_client_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.IdentityAwareProxyClient() + post_with_metadata.return_value = service.IdentityAwareProxyClient(), metadata client.create_identity_aware_proxy_client( request, @@ -5663,6 +5736,7 @@ def test_create_identity_aware_proxy_client_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_identity_aware_proxy_clients_rest_bad_request( @@ -5748,11 +5822,15 @@ def test_list_identity_aware_proxy_clients_rest_interceptors(null_interceptor): transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_list_identity_aware_proxy_clients", ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_list_identity_aware_proxy_clients_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_list_identity_aware_proxy_clients", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListIdentityAwareProxyClientsRequest.pb( service.ListIdentityAwareProxyClientsRequest() ) @@ -5778,6 +5856,10 @@ def test_list_identity_aware_proxy_clients_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListIdentityAwareProxyClientsResponse() + post_with_metadata.return_value = ( + service.ListIdentityAwareProxyClientsResponse(), + metadata, + ) client.list_identity_aware_proxy_clients( request, @@ -5789,6 +5871,7 @@ def test_list_identity_aware_proxy_clients_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_identity_aware_proxy_client_rest_bad_request( @@ -5882,11 +5965,15 @@ def test_get_identity_aware_proxy_client_rest_interceptors(null_interceptor): transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_get_identity_aware_proxy_client", ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_get_identity_aware_proxy_client_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_get_identity_aware_proxy_client", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetIdentityAwareProxyClientRequest.pb( service.GetIdentityAwareProxyClientRequest() ) @@ -5912,6 +5999,7 @@ def test_get_identity_aware_proxy_client_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.IdentityAwareProxyClient() + post_with_metadata.return_value = service.IdentityAwareProxyClient(), metadata client.get_identity_aware_proxy_client( request, @@ -5923,6 +6011,7 @@ def test_get_identity_aware_proxy_client_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reset_identity_aware_proxy_client_secret_rest_bad_request( @@ -6016,11 +6105,15 @@ def test_reset_identity_aware_proxy_client_secret_rest_interceptors(null_interce transports.IdentityAwareProxyOAuthServiceRestInterceptor, "post_reset_identity_aware_proxy_client_secret", ) as post, mock.patch.object( + transports.IdentityAwareProxyOAuthServiceRestInterceptor, + "post_reset_identity_aware_proxy_client_secret_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.IdentityAwareProxyOAuthServiceRestInterceptor, "pre_reset_identity_aware_proxy_client_secret", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ResetIdentityAwareProxyClientSecretRequest.pb( service.ResetIdentityAwareProxyClientSecretRequest() ) @@ -6046,6 +6139,7 @@ def test_reset_identity_aware_proxy_client_secret_rest_interceptors(null_interce ] pre.return_value = request, metadata post.return_value = service.IdentityAwareProxyClient() + post_with_metadata.return_value = service.IdentityAwareProxyClient(), metadata client.reset_identity_aware_proxy_client_secret( request, @@ -6057,6 +6151,7 @@ def test_reset_identity_aware_proxy_client_secret_rest_interceptors(null_interce pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_identity_aware_proxy_client_rest_bad_request( diff --git a/packages/google-cloud-ids/CHANGELOG.md b/packages/google-cloud-ids/CHANGELOG.md index 4c1d2c8ac32c..185304d2f17c 100644 --- a/packages/google-cloud-ids/CHANGELOG.md +++ b/packages/google-cloud-ids/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.10.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-ids-v1.9.0...google-cloud-ids-v1.10.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.9.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-ids-v1.8.1...google-cloud-ids-v1.9.0) (2024-12-12) diff --git a/packages/google-cloud-ids/google/cloud/ids/gapic_version.py b/packages/google-cloud-ids/google/cloud/ids/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-ids/google/cloud/ids/gapic_version.py +++ b/packages/google-cloud-ids/google/cloud/ids/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py index 1c08bcbd1569..d1d2a9e60a97 100644 --- a/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py +++ b/packages/google-cloud-ids/google/cloud/ids_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py index afad378e5089..7a04194312ef 100644 --- a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -487,6 +489,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py index 7b86fb706db8..c4e8b2e35792 100644 --- a/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py +++ b/packages/google-cloud-ids/google/cloud/ids_v1/services/ids/transports/rest.py @@ -124,12 +124,35 @@ def post_create_endpoint( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_endpoint - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IDS server but before - it is returned to user code. + it is returned to user code. This `post_create_endpoint` interceptor runs + before the `post_create_endpoint_with_metadata` interceptor. """ return response + def post_create_endpoint_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IDS server but before it is returned to user code. + + We recommend only using this `post_create_endpoint_with_metadata` + interceptor in new development instead of the `post_create_endpoint` interceptor. + When both interceptors are used, this `post_create_endpoint_with_metadata` interceptor runs after the + `post_create_endpoint` interceptor. The (possibly modified) response returned by + `post_create_endpoint` will be passed to + `post_create_endpoint_with_metadata`. + """ + return response, metadata + def pre_delete_endpoint( self, request: ids.DeleteEndpointRequest, @@ -147,12 +170,35 @@ def post_delete_endpoint( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_endpoint - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IDS server but before - it is returned to user code. + it is returned to user code. This `post_delete_endpoint` interceptor runs + before the `post_delete_endpoint_with_metadata` interceptor. """ return response + def post_delete_endpoint_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IDS server but before it is returned to user code. + + We recommend only using this `post_delete_endpoint_with_metadata` + interceptor in new development instead of the `post_delete_endpoint` interceptor. + When both interceptors are used, this `post_delete_endpoint_with_metadata` interceptor runs after the + `post_delete_endpoint` interceptor. The (possibly modified) response returned by + `post_delete_endpoint` will be passed to + `post_delete_endpoint_with_metadata`. + """ + return response, metadata + def pre_get_endpoint( self, request: ids.GetEndpointRequest, @@ -168,12 +214,33 @@ def pre_get_endpoint( def post_get_endpoint(self, response: ids.Endpoint) -> ids.Endpoint: """Post-rpc interceptor for get_endpoint - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_endpoint_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IDS server but before - it is returned to user code. + it is returned to user code. This `post_get_endpoint` interceptor runs + before the `post_get_endpoint_with_metadata` interceptor. """ return response + def post_get_endpoint_with_metadata( + self, response: ids.Endpoint, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[ids.Endpoint, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_endpoint + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IDS server but before it is returned to user code. + + We recommend only using this `post_get_endpoint_with_metadata` + interceptor in new development instead of the `post_get_endpoint` interceptor. + When both interceptors are used, this `post_get_endpoint_with_metadata` interceptor runs after the + `post_get_endpoint` interceptor. The (possibly modified) response returned by + `post_get_endpoint` will be passed to + `post_get_endpoint_with_metadata`. + """ + return response, metadata + def pre_list_endpoints( self, request: ids.ListEndpointsRequest, @@ -191,12 +258,35 @@ def post_list_endpoints( ) -> ids.ListEndpointsResponse: """Post-rpc interceptor for list_endpoints - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_endpoints_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IDS server but before - it is returned to user code. + it is returned to user code. This `post_list_endpoints` interceptor runs + before the `post_list_endpoints_with_metadata` interceptor. """ return response + def post_list_endpoints_with_metadata( + self, + response: ids.ListEndpointsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ids.ListEndpointsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_endpoints + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IDS server but before it is returned to user code. + + We recommend only using this `post_list_endpoints_with_metadata` + interceptor in new development instead of the `post_list_endpoints` interceptor. + When both interceptors are used, this `post_list_endpoints_with_metadata` interceptor runs after the + `post_list_endpoints` interceptor. The (possibly modified) response returned by + `post_list_endpoints` will be passed to + `post_list_endpoints_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class IDSRestStub: @@ -462,6 +552,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_endpoint_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -603,6 +697,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_endpoint_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -747,6 +845,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_endpoint(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_endpoint_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -887,6 +989,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_endpoints(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_endpoints_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json b/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json index 7450209ba0d1..2417873644f3 100644 --- a/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json +++ b/packages/google-cloud-ids/samples/generated_samples/snippet_metadata_google.cloud.ids.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-ids", - "version": "1.9.0" + "version": "1.10.0" }, "snippets": [ { diff --git a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py index 5e71ca99976c..48095965cd2e 100644 --- a/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py +++ b/packages/google-cloud-ids/tests/unit/gapic/ids_v1/test_ids.py @@ -72,6 +72,13 @@ ) from google.cloud.ids_v1.types import ids +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -283,6 +290,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IDSClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IDSClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3755,10 +3805,13 @@ def test_list_endpoints_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IDSRestInterceptor, "post_list_endpoints" ) as post, mock.patch.object( + transports.IDSRestInterceptor, "post_list_endpoints_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IDSRestInterceptor, "pre_list_endpoints" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ids.ListEndpointsRequest.pb(ids.ListEndpointsRequest()) transcode.return_value = { "method": "post", @@ -3780,6 +3833,7 @@ def test_list_endpoints_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ids.ListEndpointsResponse() + post_with_metadata.return_value = ids.ListEndpointsResponse(), metadata client.list_endpoints( request, @@ -3791,6 +3845,7 @@ def test_list_endpoints_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_endpoint_rest_bad_request(request_type=ids.GetEndpointRequest): @@ -3885,10 +3940,13 @@ def test_get_endpoint_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.IDSRestInterceptor, "post_get_endpoint" ) as post, mock.patch.object( + transports.IDSRestInterceptor, "post_get_endpoint_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IDSRestInterceptor, "pre_get_endpoint" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ids.GetEndpointRequest.pb(ids.GetEndpointRequest()) transcode.return_value = { "method": "post", @@ -3910,6 +3968,7 @@ def test_get_endpoint_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ids.Endpoint() + post_with_metadata.return_value = ids.Endpoint(), metadata client.get_endpoint( request, @@ -3921,6 +3980,7 @@ def test_get_endpoint_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_endpoint_rest_bad_request(request_type=ids.CreateEndpointRequest): @@ -4077,10 +4137,13 @@ def test_create_endpoint_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IDSRestInterceptor, "post_create_endpoint" ) as post, mock.patch.object( + transports.IDSRestInterceptor, "post_create_endpoint_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IDSRestInterceptor, "pre_create_endpoint" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ids.CreateEndpointRequest.pb(ids.CreateEndpointRequest()) transcode.return_value = { "method": "post", @@ -4102,6 +4165,7 @@ def test_create_endpoint_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_endpoint( request, @@ -4113,6 +4177,7 @@ def test_create_endpoint_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_endpoint_rest_bad_request(request_type=ids.DeleteEndpointRequest): @@ -4189,10 +4254,13 @@ def test_delete_endpoint_rest_interceptors(null_interceptor): ), mock.patch.object( transports.IDSRestInterceptor, "post_delete_endpoint" ) as post, mock.patch.object( + transports.IDSRestInterceptor, "post_delete_endpoint_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.IDSRestInterceptor, "pre_delete_endpoint" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ids.DeleteEndpointRequest.pb(ids.DeleteEndpointRequest()) transcode.return_value = { "method": "post", @@ -4214,6 +4282,7 @@ def test_delete_endpoint_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_endpoint( request, @@ -4225,6 +4294,7 @@ def test_delete_endpoint_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-kms-inventory/CHANGELOG.md b/packages/google-cloud-kms-inventory/CHANGELOG.md index c364daea3fbf..821e8a6fe605 100644 --- a/packages/google-cloud-kms-inventory/CHANGELOG.md +++ b/packages/google-cloud-kms-inventory/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.2.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-inventory-v0.2.12...google-cloud-kms-inventory-v0.2.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [0.2.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-inventory-v0.2.11...google-cloud-kms-inventory-v0.2.12) (2024-12-12) diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py index adf4f4805354..92b9be206b5f 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.12" # {x-release-please-version} +__version__ = "0.2.13" # {x-release-please-version} diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py index adf4f4805354..92b9be206b5f 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.12" # {x-release-please-version} +__version__ = "0.2.13" # {x-release-please-version} diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py index f66d5c301c47..798569c131bf 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -516,6 +518,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/transports/rest.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/transports/rest.py index 6867969724be..29b3b29c3f2c 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/transports/rest.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_dashboard_service/transports/rest.py @@ -102,12 +102,38 @@ def post_list_crypto_keys( ) -> key_dashboard_service.ListCryptoKeysResponse: """Post-rpc interceptor for list_crypto_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_crypto_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyDashboardService server but before - it is returned to user code. + it is returned to user code. This `post_list_crypto_keys` interceptor runs + before the `post_list_crypto_keys_with_metadata` interceptor. """ return response + def post_list_crypto_keys_with_metadata( + self, + response: key_dashboard_service.ListCryptoKeysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + key_dashboard_service.ListCryptoKeysResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_crypto_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyDashboardService server but before it is returned to user code. + + We recommend only using this `post_list_crypto_keys_with_metadata` + interceptor in new development instead of the `post_list_crypto_keys` interceptor. + When both interceptors are used, this `post_list_crypto_keys_with_metadata` interceptor runs after the + `post_list_crypto_keys` interceptor. The (possibly modified) response returned by + `post_list_crypto_keys` will be passed to + `post_list_crypto_keys_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class KeyDashboardServiceRestStub: @@ -319,6 +345,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_crypto_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_crypto_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py index e51d13829907..1c8170e10382 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -525,6 +527,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py index 30bada80f35f..a6601efb28cc 100644 --- a/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py +++ b/packages/google-cloud-kms-inventory/google/cloud/kms_inventory_v1/services/key_tracking_service/transports/rest.py @@ -110,12 +110,38 @@ def post_get_protected_resources_summary( ) -> key_tracking_service.ProtectedResourcesSummary: """Post-rpc interceptor for get_protected_resources_summary - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_protected_resources_summary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyTrackingService server but before - it is returned to user code. + it is returned to user code. This `post_get_protected_resources_summary` interceptor runs + before the `post_get_protected_resources_summary_with_metadata` interceptor. """ return response + def post_get_protected_resources_summary_with_metadata( + self, + response: key_tracking_service.ProtectedResourcesSummary, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + key_tracking_service.ProtectedResourcesSummary, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get_protected_resources_summary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyTrackingService server but before it is returned to user code. + + We recommend only using this `post_get_protected_resources_summary_with_metadata` + interceptor in new development instead of the `post_get_protected_resources_summary` interceptor. + When both interceptors are used, this `post_get_protected_resources_summary_with_metadata` interceptor runs after the + `post_get_protected_resources_summary` interceptor. The (possibly modified) response returned by + `post_get_protected_resources_summary` will be passed to + `post_get_protected_resources_summary_with_metadata`. + """ + return response, metadata + def pre_search_protected_resources( self, request: key_tracking_service.SearchProtectedResourcesRequest, @@ -136,12 +162,38 @@ def post_search_protected_resources( ) -> key_tracking_service.SearchProtectedResourcesResponse: """Post-rpc interceptor for search_protected_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_protected_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyTrackingService server but before - it is returned to user code. + it is returned to user code. This `post_search_protected_resources` interceptor runs + before the `post_search_protected_resources_with_metadata` interceptor. """ return response + def post_search_protected_resources_with_metadata( + self, + response: key_tracking_service.SearchProtectedResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + key_tracking_service.SearchProtectedResourcesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_protected_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyTrackingService server but before it is returned to user code. + + We recommend only using this `post_search_protected_resources_with_metadata` + interceptor in new development instead of the `post_search_protected_resources` interceptor. + When both interceptors are used, this `post_search_protected_resources_with_metadata` interceptor runs after the + `post_search_protected_resources` interceptor. The (possibly modified) response returned by + `post_search_protected_resources` will be passed to + `post_search_protected_resources_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class KeyTrackingServiceRestStub: @@ -356,6 +408,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_protected_resources_summary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_get_protected_resources_summary_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -507,6 +566,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_protected_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_protected_resources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json b/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json index 7a866f768823..7f05da24ad82 100644 --- a/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json +++ b/packages/google-cloud-kms-inventory/samples/generated_samples/snippet_metadata_google.cloud.kms.inventory.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms-inventory", - "version": "0.2.12" + "version": "0.2.13" }, "snippets": [ { diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py index 7a90eedfa5f4..bef4068b0454 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_dashboard_service.py @@ -61,6 +61,13 @@ ) from google.cloud.kms_inventory_v1.types import key_dashboard_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = KeyDashboardServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = KeyDashboardServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2156,10 +2206,14 @@ def test_list_crypto_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyDashboardServiceRestInterceptor, "post_list_crypto_keys" ) as post, mock.patch.object( + transports.KeyDashboardServiceRestInterceptor, + "post_list_crypto_keys_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyDashboardServiceRestInterceptor, "pre_list_crypto_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = key_dashboard_service.ListCryptoKeysRequest.pb( key_dashboard_service.ListCryptoKeysRequest() ) @@ -2185,6 +2239,10 @@ def test_list_crypto_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = key_dashboard_service.ListCryptoKeysResponse() + post_with_metadata.return_value = ( + key_dashboard_service.ListCryptoKeysResponse(), + metadata, + ) client.list_crypto_keys( request, @@ -2196,6 +2254,7 @@ def test_list_crypto_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py index 5899124ebb9b..8d1a8e43e313 100644 --- a/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py +++ b/packages/google-cloud-kms-inventory/tests/unit/gapic/kms_inventory_v1/test_key_tracking_service.py @@ -60,6 +60,13 @@ ) from google.cloud.kms_inventory_v1.types import key_tracking_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = KeyTrackingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = KeyTrackingServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2823,11 +2873,15 @@ def test_get_protected_resources_summary_rest_interceptors(null_interceptor): transports.KeyTrackingServiceRestInterceptor, "post_get_protected_resources_summary", ) as post, mock.patch.object( + transports.KeyTrackingServiceRestInterceptor, + "post_get_protected_resources_summary_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyTrackingServiceRestInterceptor, "pre_get_protected_resources_summary", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = key_tracking_service.GetProtectedResourcesSummaryRequest.pb( key_tracking_service.GetProtectedResourcesSummaryRequest() ) @@ -2853,6 +2907,10 @@ def test_get_protected_resources_summary_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = key_tracking_service.ProtectedResourcesSummary() + post_with_metadata.return_value = ( + key_tracking_service.ProtectedResourcesSummary(), + metadata, + ) client.get_protected_resources_summary( request, @@ -2864,6 +2922,7 @@ def test_get_protected_resources_summary_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_protected_resources_rest_bad_request( @@ -2950,10 +3009,14 @@ def test_search_protected_resources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyTrackingServiceRestInterceptor, "post_search_protected_resources" ) as post, mock.patch.object( + transports.KeyTrackingServiceRestInterceptor, + "post_search_protected_resources_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyTrackingServiceRestInterceptor, "pre_search_protected_resources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = key_tracking_service.SearchProtectedResourcesRequest.pb( key_tracking_service.SearchProtectedResourcesRequest() ) @@ -2979,6 +3042,10 @@ def test_search_protected_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = key_tracking_service.SearchProtectedResourcesResponse() + post_with_metadata.return_value = ( + key_tracking_service.SearchProtectedResourcesResponse(), + metadata, + ) client.search_protected_resources( request, @@ -2990,6 +3057,7 @@ def test_search_protected_resources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-kms/CHANGELOG.md b/packages/google-cloud-kms/CHANGELOG.md index 908c736453a8..c1fb061f940e 100644 --- a/packages/google-cloud-kms/CHANGELOG.md +++ b/packages/google-cloud-kms/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-kms/#history +## [3.3.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v3.2.2...google-cloud-kms-v3.3.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + +## [3.2.2](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v3.2.1...google-cloud-kms-v3.2.2) (2025-01-13) + + +### Documentation + +* [google-cloud-kms] modify enum comment ([#13410](https://github.com/googleapis/google-cloud-python/issues/13410)) ([129140e](https://github.com/googleapis/google-cloud-python/commit/129140eeb51c96459b22d9e8fedb26a432d36ff6)) + +## [3.2.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v3.2.0...google-cloud-kms-v3.2.1) (2024-12-18) + + +### Documentation + +* [google-cloud-kms] code documentation improvements ([#13366](https://github.com/googleapis/google-cloud-python/issues/13366)) ([0c0f37d](https://github.com/googleapis/google-cloud-python/commit/0c0f37d415a844d29d97a5dba59258d181c8bcc3)) + ## [3.2.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-kms-v3.1.1...google-cloud-kms-v3.2.0) (2024-12-12) diff --git a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py index 349033e81d71..57b675ccdf88 100644 --- a/packages/google-cloud-kms/google/cloud/kms/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.2.0" # {x-release-please-version} +__version__ = "3.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py index 349033e81d71..57b675ccdf88 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.2.0" # {x-release-please-version} +__version__ = "3.3.0" # {x-release-please-version} diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py index eae39a711229..db34c2aabfef 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/async_client.py @@ -321,10 +321,11 @@ async def create_key_handle( triggering the provisioning of a new [CryptoKey][google.cloud.kms.v1.CryptoKey] for CMEK use with the given resource type in the configured key project and the same - location. [GetOperation][Operations.GetOperation] should be used - to resolve the resulting long-running operation and get the - resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] and - [CryptoKey][google.cloud.kms.v1.CryptoKey]. + location. + [GetOperation][google.longrunning.Operations.GetOperation] + should be used to resolve the resulting long-running operation + and get the resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] + and [CryptoKey][google.cloud.kms.v1.CryptoKey]. .. code-block:: python diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py index 3bd717e10c60..ec95c1434ae8 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -532,6 +534,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -740,10 +769,11 @@ def create_key_handle( triggering the provisioning of a new [CryptoKey][google.cloud.kms.v1.CryptoKey] for CMEK use with the given resource type in the configured key project and the same - location. [GetOperation][Operations.GetOperation] should be used - to resolve the resulting long-running operation and get the - resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] and - [CryptoKey][google.cloud.kms.v1.CryptoKey]. + location. + [GetOperation][google.longrunning.Operations.GetOperation] + should be used to resolve the resulting long-running operation + and get the resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] + and [CryptoKey][google.cloud.kms.v1.CryptoKey]. .. code-block:: python @@ -1166,16 +1196,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1221,16 +1255,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1276,16 +1314,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def set_iam_policy( self, @@ -1401,16 +1443,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1527,16 +1573,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1591,16 +1641,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py index d63dcd61a920..1f68fb308aa5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc.py @@ -369,10 +369,11 @@ def create_key_handle( triggering the provisioning of a new [CryptoKey][google.cloud.kms.v1.CryptoKey] for CMEK use with the given resource type in the configured key project and the same - location. [GetOperation][Operations.GetOperation] should be used - to resolve the resulting long-running operation and get the - resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] and - [CryptoKey][google.cloud.kms.v1.CryptoKey]. + location. + [GetOperation][google.longrunning.Operations.GetOperation] + should be used to resolve the resulting long-running operation + and get the resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] + and [CryptoKey][google.cloud.kms.v1.CryptoKey]. Returns: Callable[[~.CreateKeyHandleRequest], diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py index 34bd7a85963c..41243db7a99c 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/grpc_asyncio.py @@ -378,10 +378,11 @@ def create_key_handle( triggering the provisioning of a new [CryptoKey][google.cloud.kms.v1.CryptoKey] for CMEK use with the given resource type in the configured key project and the same - location. [GetOperation][Operations.GetOperation] should be used - to resolve the resulting long-running operation and get the - resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] and - [CryptoKey][google.cloud.kms.v1.CryptoKey]. + location. + [GetOperation][google.longrunning.Operations.GetOperation] + should be used to resolve the resulting long-running operation + and get the resulting [KeyHandle][google.cloud.kms.v1.KeyHandle] + and [CryptoKey][google.cloud.kms.v1.CryptoKey]. Returns: Callable[[~.CreateKeyHandleRequest], diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py index d551a862ad52..7eccdfaa5d73 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey/transports/rest.py @@ -119,12 +119,35 @@ def post_create_key_handle( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_key_handle - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_key_handle_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autokey server but before - it is returned to user code. + it is returned to user code. This `post_create_key_handle` interceptor runs + before the `post_create_key_handle_with_metadata` interceptor. """ return response + def post_create_key_handle_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_key_handle + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autokey server but before it is returned to user code. + + We recommend only using this `post_create_key_handle_with_metadata` + interceptor in new development instead of the `post_create_key_handle` interceptor. + When both interceptors are used, this `post_create_key_handle_with_metadata` interceptor runs after the + `post_create_key_handle` interceptor. The (possibly modified) response returned by + `post_create_key_handle` will be passed to + `post_create_key_handle_with_metadata`. + """ + return response, metadata + def pre_get_key_handle( self, request: autokey.GetKeyHandleRequest, @@ -140,12 +163,35 @@ def pre_get_key_handle( def post_get_key_handle(self, response: autokey.KeyHandle) -> autokey.KeyHandle: """Post-rpc interceptor for get_key_handle - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_key_handle_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autokey server but before - it is returned to user code. + it is returned to user code. This `post_get_key_handle` interceptor runs + before the `post_get_key_handle_with_metadata` interceptor. """ return response + def post_get_key_handle_with_metadata( + self, + response: autokey.KeyHandle, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[autokey.KeyHandle, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_key_handle + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autokey server but before it is returned to user code. + + We recommend only using this `post_get_key_handle_with_metadata` + interceptor in new development instead of the `post_get_key_handle` interceptor. + When both interceptors are used, this `post_get_key_handle_with_metadata` interceptor runs after the + `post_get_key_handle` interceptor. The (possibly modified) response returned by + `post_get_key_handle` will be passed to + `post_get_key_handle_with_metadata`. + """ + return response, metadata + def pre_list_key_handles( self, request: autokey.ListKeyHandlesRequest, @@ -163,12 +209,35 @@ def post_list_key_handles( ) -> autokey.ListKeyHandlesResponse: """Post-rpc interceptor for list_key_handles - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_key_handles_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Autokey server but before - it is returned to user code. + it is returned to user code. This `post_list_key_handles` interceptor runs + before the `post_list_key_handles_with_metadata` interceptor. """ return response + def post_list_key_handles_with_metadata( + self, + response: autokey.ListKeyHandlesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[autokey.ListKeyHandlesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_key_handles + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Autokey server but before it is returned to user code. + + We recommend only using this `post_list_key_handles_with_metadata` + interceptor in new development instead of the `post_list_key_handles` interceptor. + When both interceptors are used, this `post_list_key_handles_with_metadata` interceptor runs after the + `post_list_key_handles` interceptor. The (possibly modified) response returned by + `post_list_key_handles` will be passed to + `post_list_key_handles_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -589,6 +658,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_key_handle(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_key_handle_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -735,6 +808,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_key_handle(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_handle_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -884,6 +961,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_key_handles(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_key_handles_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py index bd64f271589d..b4a57fa5ffc5 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -489,6 +491,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1087,16 +1116,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1142,16 +1175,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1197,16 +1234,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def set_iam_policy( self, @@ -1322,16 +1363,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1448,16 +1493,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1512,16 +1561,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py index aa01fd2da28c..502f783daf53 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/autokey_admin/transports/rest.py @@ -121,12 +121,35 @@ def post_get_autokey_config( ) -> autokey_admin.AutokeyConfig: """Post-rpc interceptor for get_autokey_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_autokey_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutokeyAdmin server but before - it is returned to user code. + it is returned to user code. This `post_get_autokey_config` interceptor runs + before the `post_get_autokey_config_with_metadata` interceptor. """ return response + def post_get_autokey_config_with_metadata( + self, + response: autokey_admin.AutokeyConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[autokey_admin.AutokeyConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_autokey_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutokeyAdmin server but before it is returned to user code. + + We recommend only using this `post_get_autokey_config_with_metadata` + interceptor in new development instead of the `post_get_autokey_config` interceptor. + When both interceptors are used, this `post_get_autokey_config_with_metadata` interceptor runs after the + `post_get_autokey_config` interceptor. The (possibly modified) response returned by + `post_get_autokey_config` will be passed to + `post_get_autokey_config_with_metadata`. + """ + return response, metadata + def pre_show_effective_autokey_config( self, request: autokey_admin.ShowEffectiveAutokeyConfigRequest, @@ -147,12 +170,38 @@ def post_show_effective_autokey_config( ) -> autokey_admin.ShowEffectiveAutokeyConfigResponse: """Post-rpc interceptor for show_effective_autokey_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_show_effective_autokey_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutokeyAdmin server but before - it is returned to user code. + it is returned to user code. This `post_show_effective_autokey_config` interceptor runs + before the `post_show_effective_autokey_config_with_metadata` interceptor. """ return response + def post_show_effective_autokey_config_with_metadata( + self, + response: autokey_admin.ShowEffectiveAutokeyConfigResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + autokey_admin.ShowEffectiveAutokeyConfigResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for show_effective_autokey_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutokeyAdmin server but before it is returned to user code. + + We recommend only using this `post_show_effective_autokey_config_with_metadata` + interceptor in new development instead of the `post_show_effective_autokey_config` interceptor. + When both interceptors are used, this `post_show_effective_autokey_config_with_metadata` interceptor runs after the + `post_show_effective_autokey_config` interceptor. The (possibly modified) response returned by + `post_show_effective_autokey_config` will be passed to + `post_show_effective_autokey_config_with_metadata`. + """ + return response, metadata + def pre_update_autokey_config( self, request: autokey_admin.UpdateAutokeyConfigRequest, @@ -173,12 +222,35 @@ def post_update_autokey_config( ) -> autokey_admin.AutokeyConfig: """Post-rpc interceptor for update_autokey_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_autokey_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AutokeyAdmin server but before - it is returned to user code. + it is returned to user code. This `post_update_autokey_config` interceptor runs + before the `post_update_autokey_config_with_metadata` interceptor. """ return response + def post_update_autokey_config_with_metadata( + self, + response: autokey_admin.AutokeyConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[autokey_admin.AutokeyConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_autokey_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AutokeyAdmin server but before it is returned to user code. + + We recommend only using this `post_update_autokey_config_with_metadata` + interceptor in new development instead of the `post_update_autokey_config` interceptor. + When both interceptors are used, this `post_update_autokey_config_with_metadata` interceptor runs after the + `post_update_autokey_config` interceptor. The (possibly modified) response returned by + `post_update_autokey_config` will be passed to + `post_update_autokey_config_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -543,6 +615,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_autokey_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_autokey_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -692,6 +768,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_show_effective_autokey_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_show_effective_autokey_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -847,6 +930,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_autokey_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_autokey_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py index c6ad17cb2c9a..05e74b5822e2 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/async_client.py @@ -491,7 +491,7 @@ async def sample_get_ekm_connection(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -630,7 +630,7 @@ async def sample_create_ekm_connection(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -761,7 +761,7 @@ async def sample_update_ekm_connection(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -884,7 +884,7 @@ async def sample_get_ekm_config(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in a given project and location. """ @@ -1007,7 +1007,7 @@ async def sample_update_ekm_config(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in a given project and location. """ diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py index b6b539f536d0..095873754ca2 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -538,6 +540,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -922,7 +951,7 @@ def sample_get_ekm_connection(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -1058,7 +1087,7 @@ def sample_create_ekm_connection(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -1186,7 +1215,7 @@ def sample_update_ekm_connection(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -1306,7 +1335,7 @@ def sample_get_ekm_config(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in a given project and location. """ @@ -1426,7 +1455,7 @@ def sample_update_ekm_config(): with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in a given project and location. """ @@ -1647,16 +1676,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -1702,16 +1735,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1757,16 +1794,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def set_iam_policy( self, @@ -1882,16 +1923,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2008,16 +2053,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2072,16 +2121,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/transports/rest.py index 996e36202328..3f67a11b005a 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/ekm_service/transports/rest.py @@ -153,12 +153,35 @@ def post_create_ekm_connection( ) -> ekm_service.EkmConnection: """Post-rpc interceptor for create_ekm_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_ekm_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_create_ekm_connection` interceptor runs + before the `post_create_ekm_connection_with_metadata` interceptor. """ return response + def post_create_ekm_connection_with_metadata( + self, + response: ekm_service.EkmConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_service.EkmConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_ekm_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_create_ekm_connection_with_metadata` + interceptor in new development instead of the `post_create_ekm_connection` interceptor. + When both interceptors are used, this `post_create_ekm_connection_with_metadata` interceptor runs after the + `post_create_ekm_connection` interceptor. The (possibly modified) response returned by + `post_create_ekm_connection` will be passed to + `post_create_ekm_connection_with_metadata`. + """ + return response, metadata + def pre_get_ekm_config( self, request: ekm_service.GetEkmConfigRequest, @@ -178,12 +201,35 @@ def post_get_ekm_config( ) -> ekm_service.EkmConfig: """Post-rpc interceptor for get_ekm_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ekm_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_get_ekm_config` interceptor runs + before the `post_get_ekm_config_with_metadata` interceptor. """ return response + def post_get_ekm_config_with_metadata( + self, + response: ekm_service.EkmConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_service.EkmConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ekm_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_get_ekm_config_with_metadata` + interceptor in new development instead of the `post_get_ekm_config` interceptor. + When both interceptors are used, this `post_get_ekm_config_with_metadata` interceptor runs after the + `post_get_ekm_config` interceptor. The (possibly modified) response returned by + `post_get_ekm_config` will be passed to + `post_get_ekm_config_with_metadata`. + """ + return response, metadata + def pre_get_ekm_connection( self, request: ekm_service.GetEkmConnectionRequest, @@ -203,12 +249,35 @@ def post_get_ekm_connection( ) -> ekm_service.EkmConnection: """Post-rpc interceptor for get_ekm_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_ekm_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_get_ekm_connection` interceptor runs + before the `post_get_ekm_connection_with_metadata` interceptor. """ return response + def post_get_ekm_connection_with_metadata( + self, + response: ekm_service.EkmConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_service.EkmConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_ekm_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_get_ekm_connection_with_metadata` + interceptor in new development instead of the `post_get_ekm_connection` interceptor. + When both interceptors are used, this `post_get_ekm_connection_with_metadata` interceptor runs after the + `post_get_ekm_connection` interceptor. The (possibly modified) response returned by + `post_get_ekm_connection` will be passed to + `post_get_ekm_connection_with_metadata`. + """ + return response, metadata + def pre_list_ekm_connections( self, request: ekm_service.ListEkmConnectionsRequest, @@ -228,12 +297,37 @@ def post_list_ekm_connections( ) -> ekm_service.ListEkmConnectionsResponse: """Post-rpc interceptor for list_ekm_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_ekm_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_list_ekm_connections` interceptor runs + before the `post_list_ekm_connections_with_metadata` interceptor. """ return response + def post_list_ekm_connections_with_metadata( + self, + response: ekm_service.ListEkmConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + ekm_service.ListEkmConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_ekm_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_list_ekm_connections_with_metadata` + interceptor in new development instead of the `post_list_ekm_connections` interceptor. + When both interceptors are used, this `post_list_ekm_connections_with_metadata` interceptor runs after the + `post_list_ekm_connections` interceptor. The (possibly modified) response returned by + `post_list_ekm_connections` will be passed to + `post_list_ekm_connections_with_metadata`. + """ + return response, metadata + def pre_update_ekm_config( self, request: ekm_service.UpdateEkmConfigRequest, @@ -253,12 +347,35 @@ def post_update_ekm_config( ) -> ekm_service.EkmConfig: """Post-rpc interceptor for update_ekm_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_ekm_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_update_ekm_config` interceptor runs + before the `post_update_ekm_config_with_metadata` interceptor. """ return response + def post_update_ekm_config_with_metadata( + self, + response: ekm_service.EkmConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_service.EkmConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_ekm_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_update_ekm_config_with_metadata` + interceptor in new development instead of the `post_update_ekm_config` interceptor. + When both interceptors are used, this `post_update_ekm_config_with_metadata` interceptor runs after the + `post_update_ekm_config` interceptor. The (possibly modified) response returned by + `post_update_ekm_config` will be passed to + `post_update_ekm_config_with_metadata`. + """ + return response, metadata + def pre_update_ekm_connection( self, request: ekm_service.UpdateEkmConnectionRequest, @@ -278,12 +395,35 @@ def post_update_ekm_connection( ) -> ekm_service.EkmConnection: """Post-rpc interceptor for update_ekm_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_ekm_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_update_ekm_connection` interceptor runs + before the `post_update_ekm_connection_with_metadata` interceptor. """ return response + def post_update_ekm_connection_with_metadata( + self, + response: ekm_service.EkmConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ekm_service.EkmConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_ekm_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_update_ekm_connection_with_metadata` + interceptor in new development instead of the `post_update_ekm_connection` interceptor. + When both interceptors are used, this `post_update_ekm_connection_with_metadata` interceptor runs after the + `post_update_ekm_connection` interceptor. The (possibly modified) response returned by + `post_update_ekm_connection` will be passed to + `post_update_ekm_connection_with_metadata`. + """ + return response, metadata + def pre_verify_connectivity( self, request: ekm_service.VerifyConnectivityRequest, @@ -303,12 +443,37 @@ def post_verify_connectivity( ) -> ekm_service.VerifyConnectivityResponse: """Post-rpc interceptor for verify_connectivity - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_verify_connectivity_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the EkmService server but before - it is returned to user code. + it is returned to user code. This `post_verify_connectivity` interceptor runs + before the `post_verify_connectivity_with_metadata` interceptor. """ return response + def post_verify_connectivity_with_metadata( + self, + response: ekm_service.VerifyConnectivityResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + ekm_service.VerifyConnectivityResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for verify_connectivity + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EkmService server but before it is returned to user code. + + We recommend only using this `post_verify_connectivity_with_metadata` + interceptor in new development instead of the `post_verify_connectivity` interceptor. + When both interceptors are used, this `post_verify_connectivity_with_metadata` interceptor runs after the + `post_verify_connectivity` interceptor. The (possibly modified) response returned by + `post_verify_connectivity` will be passed to + `post_verify_connectivity_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -609,7 +774,7 @@ def __call__( with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -686,6 +851,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_ekm_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_ekm_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -769,7 +938,7 @@ def __call__( with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in a given project and location. """ @@ -841,6 +1010,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ekm_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ekm_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -924,7 +1097,7 @@ def __call__( with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -996,6 +1169,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_ekm_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_ekm_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1141,6 +1318,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_ekm_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_ekm_connections_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1227,7 +1408,7 @@ def __call__( with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in a given project and location. """ @@ -1302,6 +1483,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_ekm_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_ekm_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1386,7 +1571,7 @@ def __call__( with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -1463,6 +1648,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_ekm_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_ekm_connection_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1608,6 +1797,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_verify_connectivity(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_verify_connectivity_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py index 1412bf644c8a..7025098b9883 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -605,6 +607,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4399,16 +4428,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -4454,16 +4487,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -4509,16 +4546,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def set_iam_policy( self, @@ -4634,16 +4675,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -4760,16 +4805,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -4824,16 +4873,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py index c4a13452857d..50f4a1bcc1dd 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/services/key_management_service/transports/rest.py @@ -321,12 +321,37 @@ def post_asymmetric_decrypt( ) -> service.AsymmetricDecryptResponse: """Post-rpc interceptor for asymmetric_decrypt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_asymmetric_decrypt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_asymmetric_decrypt` interceptor runs + before the `post_asymmetric_decrypt_with_metadata` interceptor. """ return response + def post_asymmetric_decrypt_with_metadata( + self, + response: service.AsymmetricDecryptResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.AsymmetricDecryptResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for asymmetric_decrypt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_asymmetric_decrypt_with_metadata` + interceptor in new development instead of the `post_asymmetric_decrypt` interceptor. + When both interceptors are used, this `post_asymmetric_decrypt_with_metadata` interceptor runs after the + `post_asymmetric_decrypt` interceptor. The (possibly modified) response returned by + `post_asymmetric_decrypt` will be passed to + `post_asymmetric_decrypt_with_metadata`. + """ + return response, metadata + def pre_asymmetric_sign( self, request: service.AsymmetricSignRequest, @@ -344,12 +369,35 @@ def post_asymmetric_sign( ) -> service.AsymmetricSignResponse: """Post-rpc interceptor for asymmetric_sign - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_asymmetric_sign_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_asymmetric_sign` interceptor runs + before the `post_asymmetric_sign_with_metadata` interceptor. """ return response + def post_asymmetric_sign_with_metadata( + self, + response: service.AsymmetricSignResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.AsymmetricSignResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for asymmetric_sign + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_asymmetric_sign_with_metadata` + interceptor in new development instead of the `post_asymmetric_sign` interceptor. + When both interceptors are used, this `post_asymmetric_sign_with_metadata` interceptor runs after the + `post_asymmetric_sign` interceptor. The (possibly modified) response returned by + `post_asymmetric_sign` will be passed to + `post_asymmetric_sign_with_metadata`. + """ + return response, metadata + def pre_create_crypto_key( self, request: service.CreateCryptoKeyRequest, @@ -367,12 +415,35 @@ def post_create_crypto_key( ) -> resources.CryptoKey: """Post-rpc interceptor for create_crypto_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_crypto_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_create_crypto_key` interceptor runs + before the `post_create_crypto_key_with_metadata` interceptor. """ return response + def post_create_crypto_key_with_metadata( + self, + response: resources.CryptoKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKey, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_crypto_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_create_crypto_key_with_metadata` + interceptor in new development instead of the `post_create_crypto_key` interceptor. + When both interceptors are used, this `post_create_crypto_key_with_metadata` interceptor runs after the + `post_create_crypto_key` interceptor. The (possibly modified) response returned by + `post_create_crypto_key` will be passed to + `post_create_crypto_key_with_metadata`. + """ + return response, metadata + def pre_create_crypto_key_version( self, request: service.CreateCryptoKeyVersionRequest, @@ -392,12 +463,35 @@ def post_create_crypto_key_version( ) -> resources.CryptoKeyVersion: """Post-rpc interceptor for create_crypto_key_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_create_crypto_key_version` interceptor runs + before the `post_create_crypto_key_version_with_metadata` interceptor. """ return response + def post_create_crypto_key_version_with_metadata( + self, + response: resources.CryptoKeyVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKeyVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_create_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_create_crypto_key_version` interceptor. + When both interceptors are used, this `post_create_crypto_key_version_with_metadata` interceptor runs after the + `post_create_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_create_crypto_key_version` will be passed to + `post_create_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_create_import_job( self, request: service.CreateImportJobRequest, @@ -415,12 +509,35 @@ def post_create_import_job( ) -> resources.ImportJob: """Post-rpc interceptor for create_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_create_import_job` interceptor runs + before the `post_create_import_job_with_metadata` interceptor. """ return response + def post_create_import_job_with_metadata( + self, + response: resources.ImportJob, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ImportJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_create_import_job_with_metadata` + interceptor in new development instead of the `post_create_import_job` interceptor. + When both interceptors are used, this `post_create_import_job_with_metadata` interceptor runs after the + `post_create_import_job` interceptor. The (possibly modified) response returned by + `post_create_import_job` will be passed to + `post_create_import_job_with_metadata`. + """ + return response, metadata + def pre_create_key_ring( self, request: service.CreateKeyRingRequest, @@ -436,12 +553,35 @@ def pre_create_key_ring( def post_create_key_ring(self, response: resources.KeyRing) -> resources.KeyRing: """Post-rpc interceptor for create_key_ring - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_key_ring_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_create_key_ring` interceptor runs + before the `post_create_key_ring_with_metadata` interceptor. """ return response + def post_create_key_ring_with_metadata( + self, + response: resources.KeyRing, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyRing, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_key_ring + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_create_key_ring_with_metadata` + interceptor in new development instead of the `post_create_key_ring` interceptor. + When both interceptors are used, this `post_create_key_ring_with_metadata` interceptor runs after the + `post_create_key_ring` interceptor. The (possibly modified) response returned by + `post_create_key_ring` will be passed to + `post_create_key_ring_with_metadata`. + """ + return response, metadata + def pre_decrypt( self, request: service.DecryptRequest, @@ -459,12 +599,35 @@ def post_decrypt( ) -> service.DecryptResponse: """Post-rpc interceptor for decrypt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_decrypt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_decrypt` interceptor runs + before the `post_decrypt_with_metadata` interceptor. """ return response + def post_decrypt_with_metadata( + self, + response: service.DecryptResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.DecryptResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for decrypt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_decrypt_with_metadata` + interceptor in new development instead of the `post_decrypt` interceptor. + When both interceptors are used, this `post_decrypt_with_metadata` interceptor runs after the + `post_decrypt` interceptor. The (possibly modified) response returned by + `post_decrypt` will be passed to + `post_decrypt_with_metadata`. + """ + return response, metadata + def pre_destroy_crypto_key_version( self, request: service.DestroyCryptoKeyVersionRequest, @@ -484,12 +647,35 @@ def post_destroy_crypto_key_version( ) -> resources.CryptoKeyVersion: """Post-rpc interceptor for destroy_crypto_key_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_destroy_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_destroy_crypto_key_version` interceptor runs + before the `post_destroy_crypto_key_version_with_metadata` interceptor. """ return response + def post_destroy_crypto_key_version_with_metadata( + self, + response: resources.CryptoKeyVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKeyVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for destroy_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_destroy_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_destroy_crypto_key_version` interceptor. + When both interceptors are used, this `post_destroy_crypto_key_version_with_metadata` interceptor runs after the + `post_destroy_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_destroy_crypto_key_version` will be passed to + `post_destroy_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_encrypt( self, request: service.EncryptRequest, @@ -507,12 +693,35 @@ def post_encrypt( ) -> service.EncryptResponse: """Post-rpc interceptor for encrypt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_encrypt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_encrypt` interceptor runs + before the `post_encrypt_with_metadata` interceptor. """ return response + def post_encrypt_with_metadata( + self, + response: service.EncryptResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.EncryptResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for encrypt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_encrypt_with_metadata` + interceptor in new development instead of the `post_encrypt` interceptor. + When both interceptors are used, this `post_encrypt_with_metadata` interceptor runs after the + `post_encrypt` interceptor. The (possibly modified) response returned by + `post_encrypt` will be passed to + `post_encrypt_with_metadata`. + """ + return response, metadata + def pre_generate_random_bytes( self, request: service.GenerateRandomBytesRequest, @@ -532,12 +741,37 @@ def post_generate_random_bytes( ) -> service.GenerateRandomBytesResponse: """Post-rpc interceptor for generate_random_bytes - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_random_bytes_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_generate_random_bytes` interceptor runs + before the `post_generate_random_bytes_with_metadata` interceptor. """ return response + def post_generate_random_bytes_with_metadata( + self, + response: service.GenerateRandomBytesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.GenerateRandomBytesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for generate_random_bytes + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_generate_random_bytes_with_metadata` + interceptor in new development instead of the `post_generate_random_bytes` interceptor. + When both interceptors are used, this `post_generate_random_bytes_with_metadata` interceptor runs after the + `post_generate_random_bytes` interceptor. The (possibly modified) response returned by + `post_generate_random_bytes` will be passed to + `post_generate_random_bytes_with_metadata`. + """ + return response, metadata + def pre_get_crypto_key( self, request: service.GetCryptoKeyRequest, @@ -553,12 +787,35 @@ def pre_get_crypto_key( def post_get_crypto_key(self, response: resources.CryptoKey) -> resources.CryptoKey: """Post-rpc interceptor for get_crypto_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_crypto_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_get_crypto_key` interceptor runs + before the `post_get_crypto_key_with_metadata` interceptor. """ return response + def post_get_crypto_key_with_metadata( + self, + response: resources.CryptoKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKey, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_crypto_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_get_crypto_key_with_metadata` + interceptor in new development instead of the `post_get_crypto_key` interceptor. + When both interceptors are used, this `post_get_crypto_key_with_metadata` interceptor runs after the + `post_get_crypto_key` interceptor. The (possibly modified) response returned by + `post_get_crypto_key` will be passed to + `post_get_crypto_key_with_metadata`. + """ + return response, metadata + def pre_get_crypto_key_version( self, request: service.GetCryptoKeyVersionRequest, @@ -578,12 +835,35 @@ def post_get_crypto_key_version( ) -> resources.CryptoKeyVersion: """Post-rpc interceptor for get_crypto_key_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_get_crypto_key_version` interceptor runs + before the `post_get_crypto_key_version_with_metadata` interceptor. """ return response + def post_get_crypto_key_version_with_metadata( + self, + response: resources.CryptoKeyVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKeyVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_get_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_get_crypto_key_version` interceptor. + When both interceptors are used, this `post_get_crypto_key_version_with_metadata` interceptor runs after the + `post_get_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_get_crypto_key_version` will be passed to + `post_get_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_get_import_job( self, request: service.GetImportJobRequest, @@ -599,12 +879,35 @@ def pre_get_import_job( def post_get_import_job(self, response: resources.ImportJob) -> resources.ImportJob: """Post-rpc interceptor for get_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_get_import_job` interceptor runs + before the `post_get_import_job_with_metadata` interceptor. """ return response + def post_get_import_job_with_metadata( + self, + response: resources.ImportJob, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ImportJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_get_import_job_with_metadata` + interceptor in new development instead of the `post_get_import_job` interceptor. + When both interceptors are used, this `post_get_import_job_with_metadata` interceptor runs after the + `post_get_import_job` interceptor. The (possibly modified) response returned by + `post_get_import_job` will be passed to + `post_get_import_job_with_metadata`. + """ + return response, metadata + def pre_get_key_ring( self, request: service.GetKeyRingRequest, @@ -620,12 +923,35 @@ def pre_get_key_ring( def post_get_key_ring(self, response: resources.KeyRing) -> resources.KeyRing: """Post-rpc interceptor for get_key_ring - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_key_ring_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_get_key_ring` interceptor runs + before the `post_get_key_ring_with_metadata` interceptor. """ return response + def post_get_key_ring_with_metadata( + self, + response: resources.KeyRing, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.KeyRing, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_key_ring + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_get_key_ring_with_metadata` + interceptor in new development instead of the `post_get_key_ring` interceptor. + When both interceptors are used, this `post_get_key_ring_with_metadata` interceptor runs after the + `post_get_key_ring` interceptor. The (possibly modified) response returned by + `post_get_key_ring` will be passed to + `post_get_key_ring_with_metadata`. + """ + return response, metadata + def pre_get_public_key( self, request: service.GetPublicKeyRequest, @@ -641,12 +967,35 @@ def pre_get_public_key( def post_get_public_key(self, response: resources.PublicKey) -> resources.PublicKey: """Post-rpc interceptor for get_public_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_public_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_get_public_key` interceptor runs + before the `post_get_public_key_with_metadata` interceptor. """ return response + def post_get_public_key_with_metadata( + self, + response: resources.PublicKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.PublicKey, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_public_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_get_public_key_with_metadata` + interceptor in new development instead of the `post_get_public_key` interceptor. + When both interceptors are used, this `post_get_public_key_with_metadata` interceptor runs after the + `post_get_public_key` interceptor. The (possibly modified) response returned by + `post_get_public_key` will be passed to + `post_get_public_key_with_metadata`. + """ + return response, metadata + def pre_import_crypto_key_version( self, request: service.ImportCryptoKeyVersionRequest, @@ -666,12 +1015,35 @@ def post_import_crypto_key_version( ) -> resources.CryptoKeyVersion: """Post-rpc interceptor for import_crypto_key_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_import_crypto_key_version` interceptor runs + before the `post_import_crypto_key_version_with_metadata` interceptor. """ return response + def post_import_crypto_key_version_with_metadata( + self, + response: resources.CryptoKeyVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKeyVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_import_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_import_crypto_key_version` interceptor. + When both interceptors are used, this `post_import_crypto_key_version_with_metadata` interceptor runs after the + `post_import_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_import_crypto_key_version` will be passed to + `post_import_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_list_crypto_keys( self, request: service.ListCryptoKeysRequest, @@ -689,12 +1061,35 @@ def post_list_crypto_keys( ) -> service.ListCryptoKeysResponse: """Post-rpc interceptor for list_crypto_keys - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_crypto_keys_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_list_crypto_keys` interceptor runs + before the `post_list_crypto_keys_with_metadata` interceptor. """ return response + def post_list_crypto_keys_with_metadata( + self, + response: service.ListCryptoKeysResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListCryptoKeysResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_crypto_keys + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_list_crypto_keys_with_metadata` + interceptor in new development instead of the `post_list_crypto_keys` interceptor. + When both interceptors are used, this `post_list_crypto_keys_with_metadata` interceptor runs after the + `post_list_crypto_keys` interceptor. The (possibly modified) response returned by + `post_list_crypto_keys` will be passed to + `post_list_crypto_keys_with_metadata`. + """ + return response, metadata + def pre_list_crypto_key_versions( self, request: service.ListCryptoKeyVersionsRequest, @@ -714,12 +1109,37 @@ def post_list_crypto_key_versions( ) -> service.ListCryptoKeyVersionsResponse: """Post-rpc interceptor for list_crypto_key_versions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_crypto_key_versions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_list_crypto_key_versions` interceptor runs + before the `post_list_crypto_key_versions_with_metadata` interceptor. """ return response + def post_list_crypto_key_versions_with_metadata( + self, + response: service.ListCryptoKeyVersionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + service.ListCryptoKeyVersionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_crypto_key_versions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_list_crypto_key_versions_with_metadata` + interceptor in new development instead of the `post_list_crypto_key_versions` interceptor. + When both interceptors are used, this `post_list_crypto_key_versions_with_metadata` interceptor runs after the + `post_list_crypto_key_versions` interceptor. The (possibly modified) response returned by + `post_list_crypto_key_versions` will be passed to + `post_list_crypto_key_versions_with_metadata`. + """ + return response, metadata + def pre_list_import_jobs( self, request: service.ListImportJobsRequest, @@ -737,12 +1157,35 @@ def post_list_import_jobs( ) -> service.ListImportJobsResponse: """Post-rpc interceptor for list_import_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_import_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_list_import_jobs` interceptor runs + before the `post_list_import_jobs_with_metadata` interceptor. """ return response + def post_list_import_jobs_with_metadata( + self, + response: service.ListImportJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListImportJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_import_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_list_import_jobs_with_metadata` + interceptor in new development instead of the `post_list_import_jobs` interceptor. + When both interceptors are used, this `post_list_import_jobs_with_metadata` interceptor runs after the + `post_list_import_jobs` interceptor. The (possibly modified) response returned by + `post_list_import_jobs` will be passed to + `post_list_import_jobs_with_metadata`. + """ + return response, metadata + def pre_list_key_rings( self, request: service.ListKeyRingsRequest, @@ -760,12 +1203,35 @@ def post_list_key_rings( ) -> service.ListKeyRingsResponse: """Post-rpc interceptor for list_key_rings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_key_rings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_list_key_rings` interceptor runs + before the `post_list_key_rings_with_metadata` interceptor. """ return response + def post_list_key_rings_with_metadata( + self, + response: service.ListKeyRingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.ListKeyRingsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_key_rings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_list_key_rings_with_metadata` + interceptor in new development instead of the `post_list_key_rings` interceptor. + When both interceptors are used, this `post_list_key_rings_with_metadata` interceptor runs after the + `post_list_key_rings` interceptor. The (possibly modified) response returned by + `post_list_key_rings` will be passed to + `post_list_key_rings_with_metadata`. + """ + return response, metadata + def pre_mac_sign( self, request: service.MacSignRequest, @@ -783,12 +1249,35 @@ def post_mac_sign( ) -> service.MacSignResponse: """Post-rpc interceptor for mac_sign - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_mac_sign_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_mac_sign` interceptor runs + before the `post_mac_sign_with_metadata` interceptor. """ return response + def post_mac_sign_with_metadata( + self, + response: service.MacSignResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.MacSignResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for mac_sign + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_mac_sign_with_metadata` + interceptor in new development instead of the `post_mac_sign` interceptor. + When both interceptors are used, this `post_mac_sign_with_metadata` interceptor runs after the + `post_mac_sign` interceptor. The (possibly modified) response returned by + `post_mac_sign` will be passed to + `post_mac_sign_with_metadata`. + """ + return response, metadata + def pre_mac_verify( self, request: service.MacVerifyRequest, @@ -806,12 +1295,35 @@ def post_mac_verify( ) -> service.MacVerifyResponse: """Post-rpc interceptor for mac_verify - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_mac_verify_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_mac_verify` interceptor runs + before the `post_mac_verify_with_metadata` interceptor. """ return response + def post_mac_verify_with_metadata( + self, + response: service.MacVerifyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.MacVerifyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for mac_verify + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_mac_verify_with_metadata` + interceptor in new development instead of the `post_mac_verify` interceptor. + When both interceptors are used, this `post_mac_verify_with_metadata` interceptor runs after the + `post_mac_verify` interceptor. The (possibly modified) response returned by + `post_mac_verify` will be passed to + `post_mac_verify_with_metadata`. + """ + return response, metadata + def pre_raw_decrypt( self, request: service.RawDecryptRequest, @@ -829,12 +1341,35 @@ def post_raw_decrypt( ) -> service.RawDecryptResponse: """Post-rpc interceptor for raw_decrypt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_raw_decrypt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_raw_decrypt` interceptor runs + before the `post_raw_decrypt_with_metadata` interceptor. """ return response + def post_raw_decrypt_with_metadata( + self, + response: service.RawDecryptResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.RawDecryptResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for raw_decrypt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_raw_decrypt_with_metadata` + interceptor in new development instead of the `post_raw_decrypt` interceptor. + When both interceptors are used, this `post_raw_decrypt_with_metadata` interceptor runs after the + `post_raw_decrypt` interceptor. The (possibly modified) response returned by + `post_raw_decrypt` will be passed to + `post_raw_decrypt_with_metadata`. + """ + return response, metadata + def pre_raw_encrypt( self, request: service.RawEncryptRequest, @@ -852,12 +1387,35 @@ def post_raw_encrypt( ) -> service.RawEncryptResponse: """Post-rpc interceptor for raw_encrypt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_raw_encrypt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_raw_encrypt` interceptor runs + before the `post_raw_encrypt_with_metadata` interceptor. """ return response + def post_raw_encrypt_with_metadata( + self, + response: service.RawEncryptResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.RawEncryptResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for raw_encrypt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_raw_encrypt_with_metadata` + interceptor in new development instead of the `post_raw_encrypt` interceptor. + When both interceptors are used, this `post_raw_encrypt_with_metadata` interceptor runs after the + `post_raw_encrypt` interceptor. The (possibly modified) response returned by + `post_raw_encrypt` will be passed to + `post_raw_encrypt_with_metadata`. + """ + return response, metadata + def pre_restore_crypto_key_version( self, request: service.RestoreCryptoKeyVersionRequest, @@ -877,12 +1435,35 @@ def post_restore_crypto_key_version( ) -> resources.CryptoKeyVersion: """Post-rpc interceptor for restore_crypto_key_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_restore_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_restore_crypto_key_version` interceptor runs + before the `post_restore_crypto_key_version_with_metadata` interceptor. """ return response + def post_restore_crypto_key_version_with_metadata( + self, + response: resources.CryptoKeyVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKeyVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for restore_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_restore_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_restore_crypto_key_version` interceptor. + When both interceptors are used, this `post_restore_crypto_key_version_with_metadata` interceptor runs after the + `post_restore_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_restore_crypto_key_version` will be passed to + `post_restore_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_update_crypto_key( self, request: service.UpdateCryptoKeyRequest, @@ -900,12 +1481,35 @@ def post_update_crypto_key( ) -> resources.CryptoKey: """Post-rpc interceptor for update_crypto_key - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_crypto_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_update_crypto_key` interceptor runs + before the `post_update_crypto_key_with_metadata` interceptor. """ return response + def post_update_crypto_key_with_metadata( + self, + response: resources.CryptoKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKey, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_crypto_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_update_crypto_key_with_metadata` + interceptor in new development instead of the `post_update_crypto_key` interceptor. + When both interceptors are used, this `post_update_crypto_key_with_metadata` interceptor runs after the + `post_update_crypto_key` interceptor. The (possibly modified) response returned by + `post_update_crypto_key` will be passed to + `post_update_crypto_key_with_metadata`. + """ + return response, metadata + def pre_update_crypto_key_primary_version( self, request: service.UpdateCryptoKeyPrimaryVersionRequest, @@ -926,12 +1530,35 @@ def post_update_crypto_key_primary_version( ) -> resources.CryptoKey: """Post-rpc interceptor for update_crypto_key_primary_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_crypto_key_primary_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_update_crypto_key_primary_version` interceptor runs + before the `post_update_crypto_key_primary_version_with_metadata` interceptor. """ return response + def post_update_crypto_key_primary_version_with_metadata( + self, + response: resources.CryptoKey, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKey, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_crypto_key_primary_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_update_crypto_key_primary_version_with_metadata` + interceptor in new development instead of the `post_update_crypto_key_primary_version` interceptor. + When both interceptors are used, this `post_update_crypto_key_primary_version_with_metadata` interceptor runs after the + `post_update_crypto_key_primary_version` interceptor. The (possibly modified) response returned by + `post_update_crypto_key_primary_version` will be passed to + `post_update_crypto_key_primary_version_with_metadata`. + """ + return response, metadata + def pre_update_crypto_key_version( self, request: service.UpdateCryptoKeyVersionRequest, @@ -951,12 +1578,35 @@ def post_update_crypto_key_version( ) -> resources.CryptoKeyVersion: """Post-rpc interceptor for update_crypto_key_version - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_crypto_key_version_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the KeyManagementService server but before - it is returned to user code. + it is returned to user code. This `post_update_crypto_key_version` interceptor runs + before the `post_update_crypto_key_version_with_metadata` interceptor. """ return response + def post_update_crypto_key_version_with_metadata( + self, + response: resources.CryptoKeyVersion, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.CryptoKeyVersion, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_crypto_key_version + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the KeyManagementService server but before it is returned to user code. + + We recommend only using this `post_update_crypto_key_version_with_metadata` + interceptor in new development instead of the `post_update_crypto_key_version` interceptor. + When both interceptors are used, this `post_update_crypto_key_version_with_metadata` interceptor runs after the + `post_update_crypto_key_version` interceptor. The (possibly modified) response returned by + `post_update_crypto_key_version` will be passed to + `post_update_crypto_key_version_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -1333,6 +1983,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_asymmetric_decrypt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_asymmetric_decrypt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1485,6 +2139,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_asymmetric_sign(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_asymmetric_sign_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1644,6 +2302,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_crypto_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_crypto_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1811,6 +2473,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_crypto_key_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2006,6 +2672,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2157,6 +2827,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_key_ring(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_key_ring_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2307,6 +2981,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_decrypt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_decrypt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2473,6 +3151,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_destroy_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_destroy_crypto_key_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2623,6 +3305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_encrypt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_encrypt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2777,6 +3463,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_random_bytes(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_random_bytes_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2930,6 +3620,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_crypto_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_crypto_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3091,6 +3785,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_crypto_key_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3278,6 +3976,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3423,6 +4125,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_key_ring(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_key_ring_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3569,6 +4275,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_public_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_public_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3736,6 +4446,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_crypto_key_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3882,6 +4596,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_crypto_keys(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_crypto_keys_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4030,6 +4748,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_crypto_key_versions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_crypto_key_versions_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4178,6 +4900,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_import_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_import_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4322,6 +5048,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_key_rings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_key_rings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4472,6 +5202,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_mac_sign(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_mac_sign_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4622,6 +5356,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_mac_verify(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_mac_verify_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4772,6 +5510,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_raw_decrypt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_raw_decrypt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4922,6 +5664,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_raw_encrypt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_raw_encrypt_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5088,6 +5834,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_restore_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_restore_crypto_key_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5247,6 +5997,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_crypto_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_crypto_key_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5409,6 +6163,13 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_crypto_key_primary_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_update_crypto_key_primary_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5576,6 +6337,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_crypto_key_version(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_crypto_key_version_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py index 8fda5560ca60..c25cf04da1eb 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/ekm_service.py @@ -334,7 +334,7 @@ class EkmConnection(proto.Message): [CryptoKeys][google.cloud.kms.v1.CryptoKey] and [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], as + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], as well as performing cryptographic operations using keys created within the [EkmConnection][google.cloud.kms.v1.EkmConnection]. @@ -412,7 +412,7 @@ class KeyManagementMode(proto.Enum): - Destruction of external key material associated with this [EkmConnection][google.cloud.kms.v1.EkmConnection] can be requested by calling - [DestroyCryptoKeyVersion][EkmService.DestroyCryptoKeyVersion]. + [DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion]. - Automatic rotation of key material is supported. """ KEY_MANAGEMENT_MODE_UNSPECIFIED = 0 @@ -502,8 +502,8 @@ class EkmConfig(proto.Message): [CryptoKeys][google.cloud.kms.v1.CryptoKey] and [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] with a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC] in a - given project and location. + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC] in + a given project and location. Attributes: name (str): diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index b30a8ca4ed64..295acbbaa8f4 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -288,7 +288,7 @@ class CryptoKey(proto.Message): [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion] have a [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of - [EXTERNAL_VPC][CryptoKeyVersion.ProtectionLevel.EXTERNAL_VPC], + [EXTERNAL_VPC][google.cloud.kms.v1.ProtectionLevel.EXTERNAL_VPC], with the resource name in the format ``projects/*/locations/*/ekmConnections/*``. Note, this list is non-exhaustive and may apply to additional @@ -693,8 +693,6 @@ class CryptoKeyVersionAlgorithm(proto.Enum): The suffix following ``HMAC_`` corresponds to the hash algorithm being used (eg. SHA256). - Algorithms beginning with ``PQ_`` are post-quantum. - For more information, see [Key purposes and algorithms] (https://cloud.google.com/kms/docs/algorithms). diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py index eb0280ea01c5..c7b959dbf498 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py @@ -676,7 +676,8 @@ class ImportCryptoKeyVersionRequest(proto.Message): the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] must be a child of [ImportCryptoKeyVersionRequest.parent][google.cloud.kms.v1.ImportCryptoKeyVersionRequest.parent], - have been previously created via [ImportCryptoKeyVersion][], + have been previously created via + [ImportCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.ImportCryptoKeyVersion], and be in [DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED] or @@ -1614,7 +1615,8 @@ class MacVerifyRequest(proto.Message): [KeyManagementService][google.cloud.kms.v1.KeyManagementService] will report an error if the checksum verification fails. If you receive a checksum error, your client should verify that - CRC32C([MacVerifyRequest.tag][]) is equal to + CRC32C([MacVerifyRequest.mac][google.cloud.kms.v1.MacVerifyRequest.mac]) + is equal to [MacVerifyRequest.mac_crc32c][google.cloud.kms.v1.MacVerifyRequest.mac_crc32c], and if so, perform a limited number of retries. A persistent mismatch may indicate an issue in your computation of the diff --git a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json index 64091c5630ff..a657757ff4e2 100644 --- a/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json +++ b/packages/google-cloud-kms/samples/generated_samples/snippet_metadata_google.cloud.kms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-kms", - "version": "3.2.0" + "version": "3.3.0" }, "snippets": [ { diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py index 171a511c6720..fc591aa42d1c 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey.py @@ -74,6 +74,13 @@ ) from google.cloud.kms_v1.types import autokey +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -297,6 +304,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutokeyClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutokeyClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3275,10 +3325,13 @@ def test_create_key_handle_rest_interceptors(null_interceptor): ), mock.patch.object( transports.AutokeyRestInterceptor, "post_create_key_handle" ) as post, mock.patch.object( + transports.AutokeyRestInterceptor, "post_create_key_handle_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutokeyRestInterceptor, "pre_create_key_handle" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autokey.CreateKeyHandleRequest.pb(autokey.CreateKeyHandleRequest()) transcode.return_value = { "method": "post", @@ -3300,6 +3353,7 @@ def test_create_key_handle_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_key_handle( request, @@ -3311,6 +3365,7 @@ def test_create_key_handle_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_key_handle_rest_bad_request(request_type=autokey.GetKeyHandleRequest): @@ -3395,10 +3450,13 @@ def test_get_key_handle_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutokeyRestInterceptor, "post_get_key_handle" ) as post, mock.patch.object( + transports.AutokeyRestInterceptor, "post_get_key_handle_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutokeyRestInterceptor, "pre_get_key_handle" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autokey.GetKeyHandleRequest.pb(autokey.GetKeyHandleRequest()) transcode.return_value = { "method": "post", @@ -3420,6 +3478,7 @@ def test_get_key_handle_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autokey.KeyHandle() + post_with_metadata.return_value = autokey.KeyHandle(), metadata client.get_key_handle( request, @@ -3431,6 +3490,7 @@ def test_get_key_handle_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_key_handles_rest_bad_request(request_type=autokey.ListKeyHandlesRequest): @@ -3511,10 +3571,13 @@ def test_list_key_handles_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutokeyRestInterceptor, "post_list_key_handles" ) as post, mock.patch.object( + transports.AutokeyRestInterceptor, "post_list_key_handles_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutokeyRestInterceptor, "pre_list_key_handles" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autokey.ListKeyHandlesRequest.pb(autokey.ListKeyHandlesRequest()) transcode.return_value = { "method": "post", @@ -3538,6 +3601,7 @@ def test_list_key_handles_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autokey.ListKeyHandlesResponse() + post_with_metadata.return_value = autokey.ListKeyHandlesResponse(), metadata client.list_key_handles( request, @@ -3549,6 +3613,7 @@ def test_list_key_handles_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py index 79c9e8a5b8f9..c2f7c7dc8d73 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_autokey_admin.py @@ -65,6 +65,13 @@ ) from google.cloud.kms_v1.types import autokey_admin +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -308,6 +315,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AutokeyAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AutokeyAdminClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3097,10 +3147,14 @@ def test_update_autokey_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutokeyAdminRestInterceptor, "post_update_autokey_config" ) as post, mock.patch.object( + transports.AutokeyAdminRestInterceptor, + "post_update_autokey_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutokeyAdminRestInterceptor, "pre_update_autokey_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autokey_admin.UpdateAutokeyConfigRequest.pb( autokey_admin.UpdateAutokeyConfigRequest() ) @@ -3126,6 +3180,7 @@ def test_update_autokey_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autokey_admin.AutokeyConfig() + post_with_metadata.return_value = autokey_admin.AutokeyConfig(), metadata client.update_autokey_config( request, @@ -3137,6 +3192,7 @@ def test_update_autokey_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_autokey_config_rest_bad_request( @@ -3225,10 +3281,13 @@ def test_get_autokey_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutokeyAdminRestInterceptor, "post_get_autokey_config" ) as post, mock.patch.object( + transports.AutokeyAdminRestInterceptor, "post_get_autokey_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.AutokeyAdminRestInterceptor, "pre_get_autokey_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autokey_admin.GetAutokeyConfigRequest.pb( autokey_admin.GetAutokeyConfigRequest() ) @@ -3254,6 +3313,7 @@ def test_get_autokey_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autokey_admin.AutokeyConfig() + post_with_metadata.return_value = autokey_admin.AutokeyConfig(), metadata client.get_autokey_config( request, @@ -3265,6 +3325,7 @@ def test_get_autokey_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_show_effective_autokey_config_rest_bad_request( @@ -3349,10 +3410,14 @@ def test_show_effective_autokey_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.AutokeyAdminRestInterceptor, "post_show_effective_autokey_config" ) as post, mock.patch.object( + transports.AutokeyAdminRestInterceptor, + "post_show_effective_autokey_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.AutokeyAdminRestInterceptor, "pre_show_effective_autokey_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = autokey_admin.ShowEffectiveAutokeyConfigRequest.pb( autokey_admin.ShowEffectiveAutokeyConfigRequest() ) @@ -3378,6 +3443,10 @@ def test_show_effective_autokey_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = autokey_admin.ShowEffectiveAutokeyConfigResponse() + post_with_metadata.return_value = ( + autokey_admin.ShowEffectiveAutokeyConfigResponse(), + metadata, + ) client.show_effective_autokey_config( request, @@ -3389,6 +3458,7 @@ def test_show_effective_autokey_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py index f3aa0be88d17..dbcdf6b5262f 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_ekm_service.py @@ -67,6 +67,13 @@ ) from google.cloud.kms_v1.types import ekm_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -302,6 +309,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EkmServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EkmServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -5687,10 +5737,13 @@ def test_list_ekm_connections_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_list_ekm_connections" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_list_ekm_connections_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_list_ekm_connections" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.ListEkmConnectionsRequest.pb( ekm_service.ListEkmConnectionsRequest() ) @@ -5716,6 +5769,10 @@ def test_list_ekm_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.ListEkmConnectionsResponse() + post_with_metadata.return_value = ( + ekm_service.ListEkmConnectionsResponse(), + metadata, + ) client.list_ekm_connections( request, @@ -5727,6 +5784,7 @@ def test_list_ekm_connections_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_ekm_connection_rest_bad_request( @@ -5820,10 +5878,13 @@ def test_get_ekm_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_get_ekm_connection" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_get_ekm_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_get_ekm_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.GetEkmConnectionRequest.pb( ekm_service.GetEkmConnectionRequest() ) @@ -5847,6 +5908,7 @@ def test_get_ekm_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.EkmConnection() + post_with_metadata.return_value = ekm_service.EkmConnection(), metadata client.get_ekm_connection( request, @@ -5858,6 +5920,7 @@ def test_get_ekm_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_ekm_connection_rest_bad_request( @@ -6048,10 +6111,13 @@ def test_create_ekm_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_create_ekm_connection" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_create_ekm_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_create_ekm_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.CreateEkmConnectionRequest.pb( ekm_service.CreateEkmConnectionRequest() ) @@ -6075,6 +6141,7 @@ def test_create_ekm_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.EkmConnection() + post_with_metadata.return_value = ekm_service.EkmConnection(), metadata client.create_ekm_connection( request, @@ -6086,6 +6153,7 @@ def test_create_ekm_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_ekm_connection_rest_bad_request( @@ -6284,10 +6352,13 @@ def test_update_ekm_connection_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_update_ekm_connection" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_update_ekm_connection_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_update_ekm_connection" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.UpdateEkmConnectionRequest.pb( ekm_service.UpdateEkmConnectionRequest() ) @@ -6311,6 +6382,7 @@ def test_update_ekm_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.EkmConnection() + post_with_metadata.return_value = ekm_service.EkmConnection(), metadata client.update_ekm_connection( request, @@ -6322,6 +6394,7 @@ def test_update_ekm_connection_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_ekm_config_rest_bad_request(request_type=ekm_service.GetEkmConfigRequest): @@ -6406,10 +6479,13 @@ def test_get_ekm_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_get_ekm_config" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_get_ekm_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_get_ekm_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.GetEkmConfigRequest.pb( ekm_service.GetEkmConfigRequest() ) @@ -6433,6 +6509,7 @@ def test_get_ekm_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.EkmConfig() + post_with_metadata.return_value = ekm_service.EkmConfig(), metadata client.get_ekm_config( request, @@ -6444,6 +6521,7 @@ def test_get_ekm_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_ekm_config_rest_bad_request( @@ -6605,10 +6683,13 @@ def test_update_ekm_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_update_ekm_config" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_update_ekm_config_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_update_ekm_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.UpdateEkmConfigRequest.pb( ekm_service.UpdateEkmConfigRequest() ) @@ -6632,6 +6713,7 @@ def test_update_ekm_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.EkmConfig() + post_with_metadata.return_value = ekm_service.EkmConfig(), metadata client.update_ekm_config( request, @@ -6643,6 +6725,7 @@ def test_update_ekm_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_verify_connectivity_rest_bad_request( @@ -6724,10 +6807,13 @@ def test_verify_connectivity_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.EkmServiceRestInterceptor, "post_verify_connectivity" ) as post, mock.patch.object( + transports.EkmServiceRestInterceptor, "post_verify_connectivity_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.EkmServiceRestInterceptor, "pre_verify_connectivity" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = ekm_service.VerifyConnectivityRequest.pb( ekm_service.VerifyConnectivityRequest() ) @@ -6753,6 +6839,10 @@ def test_verify_connectivity_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = ekm_service.VerifyConnectivityResponse() + post_with_metadata.return_value = ( + ekm_service.VerifyConnectivityResponse(), + metadata, + ) client.verify_connectivity( request, @@ -6764,6 +6854,7 @@ def test_verify_connectivity_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py index c7081cc8ad3b..24ecbb7b2ad8 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_key_management_service.py @@ -69,6 +69,13 @@ ) from google.cloud.kms_v1.types import resources, service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -342,6 +349,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = KeyManagementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = KeyManagementServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -18796,10 +18846,14 @@ def test_list_key_rings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_list_key_rings" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_list_key_rings_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_list_key_rings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListKeyRingsRequest.pb(service.ListKeyRingsRequest()) transcode.return_value = { "method": "post", @@ -18823,6 +18877,7 @@ def test_list_key_rings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListKeyRingsResponse() + post_with_metadata.return_value = service.ListKeyRingsResponse(), metadata client.list_key_rings( request, @@ -18834,6 +18889,7 @@ def test_list_key_rings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_crypto_keys_rest_bad_request(request_type=service.ListCryptoKeysRequest): @@ -18918,10 +18974,14 @@ def test_list_crypto_keys_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_list_crypto_keys" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_list_crypto_keys_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_list_crypto_keys" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListCryptoKeysRequest.pb(service.ListCryptoKeysRequest()) transcode.return_value = { "method": "post", @@ -18945,6 +19005,7 @@ def test_list_crypto_keys_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListCryptoKeysResponse() + post_with_metadata.return_value = service.ListCryptoKeysResponse(), metadata client.list_crypto_keys( request, @@ -18956,6 +19017,7 @@ def test_list_crypto_keys_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_crypto_key_versions_rest_bad_request( @@ -19046,10 +19108,14 @@ def test_list_crypto_key_versions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_list_crypto_key_versions" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_list_crypto_key_versions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_list_crypto_key_versions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListCryptoKeyVersionsRequest.pb( service.ListCryptoKeyVersionsRequest() ) @@ -19075,6 +19141,10 @@ def test_list_crypto_key_versions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListCryptoKeyVersionsResponse() + post_with_metadata.return_value = ( + service.ListCryptoKeyVersionsResponse(), + metadata, + ) client.list_crypto_key_versions( request, @@ -19086,6 +19156,7 @@ def test_list_crypto_key_versions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_import_jobs_rest_bad_request(request_type=service.ListImportJobsRequest): @@ -19170,10 +19241,14 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_list_import_jobs" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_list_import_jobs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_list_import_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ListImportJobsRequest.pb(service.ListImportJobsRequest()) transcode.return_value = { "method": "post", @@ -19197,6 +19272,7 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.ListImportJobsResponse() + post_with_metadata.return_value = service.ListImportJobsResponse(), metadata client.list_import_jobs( request, @@ -19208,6 +19284,7 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_key_ring_rest_bad_request(request_type=service.GetKeyRingRequest): @@ -19290,10 +19367,14 @@ def test_get_key_ring_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_get_key_ring" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_get_key_ring_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_get_key_ring" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetKeyRingRequest.pb(service.GetKeyRingRequest()) transcode.return_value = { "method": "post", @@ -19315,6 +19396,7 @@ def test_get_key_ring_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyRing() + post_with_metadata.return_value = resources.KeyRing(), metadata client.get_key_ring( request, @@ -19326,6 +19408,7 @@ def test_get_key_ring_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_crypto_key_rest_bad_request(request_type=service.GetCryptoKeyRequest): @@ -19418,10 +19501,14 @@ def test_get_crypto_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_get_crypto_key" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_get_crypto_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_get_crypto_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetCryptoKeyRequest.pb(service.GetCryptoKeyRequest()) transcode.return_value = { "method": "post", @@ -19443,6 +19530,7 @@ def test_get_crypto_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKey() + post_with_metadata.return_value = resources.CryptoKey(), metadata client.get_crypto_key( request, @@ -19454,6 +19542,7 @@ def test_get_crypto_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_crypto_key_version_rest_bad_request( @@ -19567,10 +19656,14 @@ def test_get_crypto_key_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_get_crypto_key_version" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_get_crypto_key_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_get_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetCryptoKeyVersionRequest.pb( service.GetCryptoKeyVersionRequest() ) @@ -19594,6 +19687,7 @@ def test_get_crypto_key_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata client.get_crypto_key_version( request, @@ -19605,6 +19699,7 @@ def test_get_crypto_key_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_public_key_rest_bad_request(request_type=service.GetPublicKeyRequest): @@ -19700,10 +19795,14 @@ def test_get_public_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_get_public_key" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_get_public_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_get_public_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetPublicKeyRequest.pb(service.GetPublicKeyRequest()) transcode.return_value = { "method": "post", @@ -19725,6 +19824,7 @@ def test_get_public_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.PublicKey() + post_with_metadata.return_value = resources.PublicKey(), metadata client.get_public_key( request, @@ -19736,6 +19836,7 @@ def test_get_public_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_import_job_rest_bad_request(request_type=service.GetImportJobRequest): @@ -19831,10 +19932,14 @@ def test_get_import_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_get_import_job" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_get_import_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_get_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GetImportJobRequest.pb(service.GetImportJobRequest()) transcode.return_value = { "method": "post", @@ -19856,6 +19961,7 @@ def test_get_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ImportJob() + post_with_metadata.return_value = resources.ImportJob(), metadata client.get_import_job( request, @@ -19867,6 +19973,7 @@ def test_get_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_key_ring_rest_bad_request(request_type=service.CreateKeyRingRequest): @@ -20020,10 +20127,14 @@ def test_create_key_ring_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_create_key_ring" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_create_key_ring_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_create_key_ring" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateKeyRingRequest.pb(service.CreateKeyRingRequest()) transcode.return_value = { "method": "post", @@ -20045,6 +20156,7 @@ def test_create_key_ring_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.KeyRing() + post_with_metadata.return_value = resources.KeyRing(), metadata client.create_key_ring( request, @@ -20056,6 +20168,7 @@ def test_create_key_ring_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_crypto_key_rest_bad_request( @@ -20261,10 +20374,14 @@ def test_create_crypto_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_create_crypto_key" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_create_crypto_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_create_crypto_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateCryptoKeyRequest.pb(service.CreateCryptoKeyRequest()) transcode.return_value = { "method": "post", @@ -20286,6 +20403,7 @@ def test_create_crypto_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKey() + post_with_metadata.return_value = resources.CryptoKey(), metadata client.create_crypto_key( request, @@ -20297,6 +20415,7 @@ def test_create_crypto_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_crypto_key_version_rest_bad_request( @@ -20512,10 +20631,14 @@ def test_create_crypto_key_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_create_crypto_key_version" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_create_crypto_key_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_create_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateCryptoKeyVersionRequest.pb( service.CreateCryptoKeyVersionRequest() ) @@ -20539,6 +20662,7 @@ def test_create_crypto_key_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata client.create_crypto_key_version( request, @@ -20550,6 +20674,7 @@ def test_create_crypto_key_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_crypto_key_version_rest_bad_request( @@ -20663,10 +20788,14 @@ def test_import_crypto_key_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_import_crypto_key_version" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_import_crypto_key_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_import_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.ImportCryptoKeyVersionRequest.pb( service.ImportCryptoKeyVersionRequest() ) @@ -20690,6 +20819,7 @@ def test_import_crypto_key_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata client.import_crypto_key_version( request, @@ -20701,6 +20831,7 @@ def test_import_crypto_key_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_import_job_rest_bad_request( @@ -20887,10 +21018,14 @@ def test_create_import_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_create_import_job" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_create_import_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_create_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.CreateImportJobRequest.pb(service.CreateImportJobRequest()) transcode.return_value = { "method": "post", @@ -20912,6 +21047,7 @@ def test_create_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ImportJob() + post_with_metadata.return_value = resources.ImportJob(), metadata client.create_import_job( request, @@ -20923,6 +21059,7 @@ def test_create_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_crypto_key_rest_bad_request( @@ -21136,10 +21273,14 @@ def test_update_crypto_key_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_update_crypto_key" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_update_crypto_key_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_update_crypto_key" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateCryptoKeyRequest.pb(service.UpdateCryptoKeyRequest()) transcode.return_value = { "method": "post", @@ -21161,6 +21302,7 @@ def test_update_crypto_key_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKey() + post_with_metadata.return_value = resources.CryptoKey(), metadata client.update_crypto_key( request, @@ -21172,6 +21314,7 @@ def test_update_crypto_key_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_crypto_key_version_rest_bad_request( @@ -21391,10 +21534,14 @@ def test_update_crypto_key_version_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_update_crypto_key_version" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_update_crypto_key_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_update_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateCryptoKeyVersionRequest.pb( service.UpdateCryptoKeyVersionRequest() ) @@ -21418,6 +21565,7 @@ def test_update_crypto_key_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata client.update_crypto_key_version( request, @@ -21429,6 +21577,7 @@ def test_update_crypto_key_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_crypto_key_primary_version_rest_bad_request( @@ -21524,11 +21673,15 @@ def test_update_crypto_key_primary_version_rest_interceptors(null_interceptor): transports.KeyManagementServiceRestInterceptor, "post_update_crypto_key_primary_version", ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_update_crypto_key_primary_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_update_crypto_key_primary_version", ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.UpdateCryptoKeyPrimaryVersionRequest.pb( service.UpdateCryptoKeyPrimaryVersionRequest() ) @@ -21552,6 +21705,7 @@ def test_update_crypto_key_primary_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKey() + post_with_metadata.return_value = resources.CryptoKey(), metadata client.update_crypto_key_primary_version( request, @@ -21563,6 +21717,7 @@ def test_update_crypto_key_primary_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_destroy_crypto_key_version_rest_bad_request( @@ -21677,10 +21832,14 @@ def test_destroy_crypto_key_version_rest_interceptors(null_interceptor): transports.KeyManagementServiceRestInterceptor, "post_destroy_crypto_key_version", ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_destroy_crypto_key_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_destroy_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DestroyCryptoKeyVersionRequest.pb( service.DestroyCryptoKeyVersionRequest() ) @@ -21704,6 +21863,7 @@ def test_destroy_crypto_key_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata client.destroy_crypto_key_version( request, @@ -21715,6 +21875,7 @@ def test_destroy_crypto_key_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_restore_crypto_key_version_rest_bad_request( @@ -21829,10 +21990,14 @@ def test_restore_crypto_key_version_rest_interceptors(null_interceptor): transports.KeyManagementServiceRestInterceptor, "post_restore_crypto_key_version", ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_restore_crypto_key_version_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_restore_crypto_key_version" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RestoreCryptoKeyVersionRequest.pb( service.RestoreCryptoKeyVersionRequest() ) @@ -21856,6 +22021,7 @@ def test_restore_crypto_key_version_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.CryptoKeyVersion() + post_with_metadata.return_value = resources.CryptoKeyVersion(), metadata client.restore_crypto_key_version( request, @@ -21867,6 +22033,7 @@ def test_restore_crypto_key_version_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_encrypt_rest_bad_request(request_type=service.EncryptRequest): @@ -21961,10 +22128,13 @@ def test_encrypt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_encrypt" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_encrypt_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_encrypt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.EncryptRequest.pb(service.EncryptRequest()) transcode.return_value = { "method": "post", @@ -21986,6 +22156,7 @@ def test_encrypt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.EncryptResponse() + post_with_metadata.return_value = service.EncryptResponse(), metadata client.encrypt( request, @@ -21997,6 +22168,7 @@ def test_encrypt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_decrypt_rest_bad_request(request_type=service.DecryptRequest): @@ -22087,10 +22259,13 @@ def test_decrypt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_decrypt" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_decrypt_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_decrypt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.DecryptRequest.pb(service.DecryptRequest()) transcode.return_value = { "method": "post", @@ -22112,6 +22287,7 @@ def test_decrypt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.DecryptResponse() + post_with_metadata.return_value = service.DecryptResponse(), metadata client.decrypt( request, @@ -22123,6 +22299,7 @@ def test_decrypt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_raw_encrypt_rest_bad_request(request_type=service.RawEncryptRequest): @@ -22223,10 +22400,13 @@ def test_raw_encrypt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_raw_encrypt" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_raw_encrypt_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_raw_encrypt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RawEncryptRequest.pb(service.RawEncryptRequest()) transcode.return_value = { "method": "post", @@ -22248,6 +22428,7 @@ def test_raw_encrypt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.RawEncryptResponse() + post_with_metadata.return_value = service.RawEncryptResponse(), metadata client.raw_encrypt( request, @@ -22259,6 +22440,7 @@ def test_raw_encrypt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_raw_decrypt_rest_bad_request(request_type=service.RawDecryptRequest): @@ -22353,10 +22535,13 @@ def test_raw_decrypt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_raw_decrypt" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_raw_decrypt_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_raw_decrypt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.RawDecryptRequest.pb(service.RawDecryptRequest()) transcode.return_value = { "method": "post", @@ -22378,6 +22563,7 @@ def test_raw_decrypt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.RawDecryptResponse() + post_with_metadata.return_value = service.RawDecryptResponse(), metadata client.raw_decrypt( request, @@ -22389,6 +22575,7 @@ def test_raw_decrypt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_asymmetric_sign_rest_bad_request(request_type=service.AsymmetricSignRequest): @@ -22483,10 +22670,14 @@ def test_asymmetric_sign_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_asymmetric_sign" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_asymmetric_sign_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_asymmetric_sign" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.AsymmetricSignRequest.pb(service.AsymmetricSignRequest()) transcode.return_value = { "method": "post", @@ -22510,6 +22701,7 @@ def test_asymmetric_sign_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.AsymmetricSignResponse() + post_with_metadata.return_value = service.AsymmetricSignResponse(), metadata client.asymmetric_sign( request, @@ -22521,6 +22713,7 @@ def test_asymmetric_sign_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_asymmetric_decrypt_rest_bad_request( @@ -22613,10 +22806,14 @@ def test_asymmetric_decrypt_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_asymmetric_decrypt" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_asymmetric_decrypt_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_asymmetric_decrypt" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.AsymmetricDecryptRequest.pb( service.AsymmetricDecryptRequest() ) @@ -22642,6 +22839,7 @@ def test_asymmetric_decrypt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.AsymmetricDecryptResponse() + post_with_metadata.return_value = service.AsymmetricDecryptResponse(), metadata client.asymmetric_decrypt( request, @@ -22653,6 +22851,7 @@ def test_asymmetric_decrypt_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_mac_sign_rest_bad_request(request_type=service.MacSignRequest): @@ -22745,10 +22944,13 @@ def test_mac_sign_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_mac_sign" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_mac_sign_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_mac_sign" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.MacSignRequest.pb(service.MacSignRequest()) transcode.return_value = { "method": "post", @@ -22770,6 +22972,7 @@ def test_mac_sign_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.MacSignResponse() + post_with_metadata.return_value = service.MacSignResponse(), metadata client.mac_sign( request, @@ -22781,6 +22984,7 @@ def test_mac_sign_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_mac_verify_rest_bad_request(request_type=service.MacVerifyRequest): @@ -22877,10 +23081,13 @@ def test_mac_verify_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_mac_verify" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, "post_mac_verify_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_mac_verify" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.MacVerifyRequest.pb(service.MacVerifyRequest()) transcode.return_value = { "method": "post", @@ -22902,6 +23109,7 @@ def test_mac_verify_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.MacVerifyResponse() + post_with_metadata.return_value = service.MacVerifyResponse(), metadata client.mac_verify( request, @@ -22913,6 +23121,7 @@ def test_mac_verify_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_random_bytes_rest_bad_request( @@ -22997,10 +23206,14 @@ def test_generate_random_bytes_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "post_generate_random_bytes" ) as post, mock.patch.object( + transports.KeyManagementServiceRestInterceptor, + "post_generate_random_bytes_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.KeyManagementServiceRestInterceptor, "pre_generate_random_bytes" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = service.GenerateRandomBytesRequest.pb( service.GenerateRandomBytesRequest() ) @@ -23026,6 +23239,10 @@ def test_generate_random_bytes_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = service.GenerateRandomBytesResponse() + post_with_metadata.return_value = ( + service.GenerateRandomBytesResponse(), + metadata, + ) client.generate_random_bytes( request, @@ -23037,6 +23254,7 @@ def test_generate_random_bytes_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-language/CHANGELOG.md b/packages/google-cloud-language/CHANGELOG.md index f70694f77a6c..9ec8682e7599 100644 --- a/packages/google-cloud-language/CHANGELOG.md +++ b/packages/google-cloud-language/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-language/#history +## [2.17.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-language-v2.16.0...google-cloud-language-v2.17.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [2.16.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-language-v2.15.1...google-cloud-language-v2.16.0) (2024-12-12) diff --git a/packages/google-cloud-language/google/cloud/language/gapic_version.py b/packages/google-cloud-language/google/cloud/language/gapic_version.py index e154065d8da8..6053ad2404bf 100644 --- a/packages/google-cloud-language/google/cloud/language/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py b/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py index e154065d8da8..6053ad2404bf 100644 --- a/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py index 2058479edad8..9f14d5f9cb02 100644 --- a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py +++ b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/transports/rest.py b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/transports/rest.py index 73a3c51d80e6..75907eaf35fc 100644 --- a/packages/google-cloud-language/google/cloud/language_v1/services/language_service/transports/rest.py +++ b/packages/google-cloud-language/google/cloud/language_v1/services/language_service/transports/rest.py @@ -149,12 +149,38 @@ def post_analyze_entities( ) -> language_service.AnalyzeEntitiesResponse: """Post-rpc interceptor for analyze_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_entities` interceptor runs + before the `post_analyze_entities_with_metadata` interceptor. """ return response + def post_analyze_entities_with_metadata( + self, + response: language_service.AnalyzeEntitiesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeEntitiesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_entities_with_metadata` + interceptor in new development instead of the `post_analyze_entities` interceptor. + When both interceptors are used, this `post_analyze_entities_with_metadata` interceptor runs after the + `post_analyze_entities` interceptor. The (possibly modified) response returned by + `post_analyze_entities` will be passed to + `post_analyze_entities_with_metadata`. + """ + return response, metadata + def pre_analyze_entity_sentiment( self, request: language_service.AnalyzeEntitySentimentRequest, @@ -175,12 +201,38 @@ def post_analyze_entity_sentiment( ) -> language_service.AnalyzeEntitySentimentResponse: """Post-rpc interceptor for analyze_entity_sentiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_entity_sentiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_entity_sentiment` interceptor runs + before the `post_analyze_entity_sentiment_with_metadata` interceptor. """ return response + def post_analyze_entity_sentiment_with_metadata( + self, + response: language_service.AnalyzeEntitySentimentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeEntitySentimentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_entity_sentiment_with_metadata` + interceptor in new development instead of the `post_analyze_entity_sentiment` interceptor. + When both interceptors are used, this `post_analyze_entity_sentiment_with_metadata` interceptor runs after the + `post_analyze_entity_sentiment` interceptor. The (possibly modified) response returned by + `post_analyze_entity_sentiment` will be passed to + `post_analyze_entity_sentiment_with_metadata`. + """ + return response, metadata + def pre_analyze_sentiment( self, request: language_service.AnalyzeSentimentRequest, @@ -201,12 +253,38 @@ def post_analyze_sentiment( ) -> language_service.AnalyzeSentimentResponse: """Post-rpc interceptor for analyze_sentiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_sentiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_sentiment` interceptor runs + before the `post_analyze_sentiment_with_metadata` interceptor. """ return response + def post_analyze_sentiment_with_metadata( + self, + response: language_service.AnalyzeSentimentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeSentimentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_sentiment_with_metadata` + interceptor in new development instead of the `post_analyze_sentiment` interceptor. + When both interceptors are used, this `post_analyze_sentiment_with_metadata` interceptor runs after the + `post_analyze_sentiment` interceptor. The (possibly modified) response returned by + `post_analyze_sentiment` will be passed to + `post_analyze_sentiment_with_metadata`. + """ + return response, metadata + def pre_analyze_syntax( self, request: language_service.AnalyzeSyntaxRequest, @@ -226,12 +304,37 @@ def post_analyze_syntax( ) -> language_service.AnalyzeSyntaxResponse: """Post-rpc interceptor for analyze_syntax - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_syntax_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_syntax` interceptor runs + before the `post_analyze_syntax_with_metadata` interceptor. """ return response + def post_analyze_syntax_with_metadata( + self, + response: language_service.AnalyzeSyntaxResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeSyntaxResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_syntax_with_metadata` + interceptor in new development instead of the `post_analyze_syntax` interceptor. + When both interceptors are used, this `post_analyze_syntax_with_metadata` interceptor runs after the + `post_analyze_syntax` interceptor. The (possibly modified) response returned by + `post_analyze_syntax` will be passed to + `post_analyze_syntax_with_metadata`. + """ + return response, metadata + def pre_annotate_text( self, request: language_service.AnnotateTextRequest, @@ -251,12 +354,37 @@ def post_annotate_text( ) -> language_service.AnnotateTextResponse: """Post-rpc interceptor for annotate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_annotate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_annotate_text` interceptor runs + before the `post_annotate_text_with_metadata` interceptor. """ return response + def post_annotate_text_with_metadata( + self, + response: language_service.AnnotateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnnotateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for annotate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_annotate_text_with_metadata` + interceptor in new development instead of the `post_annotate_text` interceptor. + When both interceptors are used, this `post_annotate_text_with_metadata` interceptor runs after the + `post_annotate_text` interceptor. The (possibly modified) response returned by + `post_annotate_text` will be passed to + `post_annotate_text_with_metadata`. + """ + return response, metadata + def pre_classify_text( self, request: language_service.ClassifyTextRequest, @@ -276,12 +404,37 @@ def post_classify_text( ) -> language_service.ClassifyTextResponse: """Post-rpc interceptor for classify_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_classify_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_classify_text` interceptor runs + before the `post_classify_text_with_metadata` interceptor. """ return response + def post_classify_text_with_metadata( + self, + response: language_service.ClassifyTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.ClassifyTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for classify_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_classify_text_with_metadata` + interceptor in new development instead of the `post_classify_text` interceptor. + When both interceptors are used, this `post_classify_text_with_metadata` interceptor runs after the + `post_classify_text` interceptor. The (possibly modified) response returned by + `post_classify_text` will be passed to + `post_classify_text_with_metadata`. + """ + return response, metadata + def pre_moderate_text( self, request: language_service.ModerateTextRequest, @@ -301,12 +454,37 @@ def post_moderate_text( ) -> language_service.ModerateTextResponse: """Post-rpc interceptor for moderate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_moderate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_moderate_text` interceptor runs + before the `post_moderate_text_with_metadata` interceptor. """ return response + def post_moderate_text_with_metadata( + self, + response: language_service.ModerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.ModerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for moderate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_moderate_text_with_metadata` + interceptor in new development instead of the `post_moderate_text` interceptor. + When both interceptors are used, this `post_moderate_text_with_metadata` interceptor runs after the + `post_moderate_text` interceptor. The (possibly modified) response returned by + `post_moderate_text` will be passed to + `post_moderate_text_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class LanguageServiceRestStub: @@ -520,6 +698,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -676,6 +858,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_entity_sentiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_entity_sentiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -831,6 +1017,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_sentiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_sentiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -979,6 +1169,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_syntax(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_syntax_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1132,6 +1326,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_annotate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_annotate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1283,6 +1481,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_classify_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_classify_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1434,6 +1636,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_moderate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_moderate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py b/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py index e154065d8da8..6053ad2404bf 100644 --- a/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py index 5d8084a08c12..502d26195fba 100644 --- a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py +++ b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/transports/rest.py index 6f6988dfb252..4330b43743a2 100644 --- a/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/transports/rest.py +++ b/packages/google-cloud-language/google/cloud/language_v1beta2/services/language_service/transports/rest.py @@ -149,12 +149,38 @@ def post_analyze_entities( ) -> language_service.AnalyzeEntitiesResponse: """Post-rpc interceptor for analyze_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_entities` interceptor runs + before the `post_analyze_entities_with_metadata` interceptor. """ return response + def post_analyze_entities_with_metadata( + self, + response: language_service.AnalyzeEntitiesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeEntitiesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_entities_with_metadata` + interceptor in new development instead of the `post_analyze_entities` interceptor. + When both interceptors are used, this `post_analyze_entities_with_metadata` interceptor runs after the + `post_analyze_entities` interceptor. The (possibly modified) response returned by + `post_analyze_entities` will be passed to + `post_analyze_entities_with_metadata`. + """ + return response, metadata + def pre_analyze_entity_sentiment( self, request: language_service.AnalyzeEntitySentimentRequest, @@ -175,12 +201,38 @@ def post_analyze_entity_sentiment( ) -> language_service.AnalyzeEntitySentimentResponse: """Post-rpc interceptor for analyze_entity_sentiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_entity_sentiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_entity_sentiment` interceptor runs + before the `post_analyze_entity_sentiment_with_metadata` interceptor. """ return response + def post_analyze_entity_sentiment_with_metadata( + self, + response: language_service.AnalyzeEntitySentimentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeEntitySentimentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_entity_sentiment_with_metadata` + interceptor in new development instead of the `post_analyze_entity_sentiment` interceptor. + When both interceptors are used, this `post_analyze_entity_sentiment_with_metadata` interceptor runs after the + `post_analyze_entity_sentiment` interceptor. The (possibly modified) response returned by + `post_analyze_entity_sentiment` will be passed to + `post_analyze_entity_sentiment_with_metadata`. + """ + return response, metadata + def pre_analyze_sentiment( self, request: language_service.AnalyzeSentimentRequest, @@ -201,12 +253,38 @@ def post_analyze_sentiment( ) -> language_service.AnalyzeSentimentResponse: """Post-rpc interceptor for analyze_sentiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_sentiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_sentiment` interceptor runs + before the `post_analyze_sentiment_with_metadata` interceptor. """ return response + def post_analyze_sentiment_with_metadata( + self, + response: language_service.AnalyzeSentimentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeSentimentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_sentiment_with_metadata` + interceptor in new development instead of the `post_analyze_sentiment` interceptor. + When both interceptors are used, this `post_analyze_sentiment_with_metadata` interceptor runs after the + `post_analyze_sentiment` interceptor. The (possibly modified) response returned by + `post_analyze_sentiment` will be passed to + `post_analyze_sentiment_with_metadata`. + """ + return response, metadata + def pre_analyze_syntax( self, request: language_service.AnalyzeSyntaxRequest, @@ -226,12 +304,37 @@ def post_analyze_syntax( ) -> language_service.AnalyzeSyntaxResponse: """Post-rpc interceptor for analyze_syntax - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_syntax_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_syntax` interceptor runs + before the `post_analyze_syntax_with_metadata` interceptor. """ return response + def post_analyze_syntax_with_metadata( + self, + response: language_service.AnalyzeSyntaxResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeSyntaxResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_syntax_with_metadata` + interceptor in new development instead of the `post_analyze_syntax` interceptor. + When both interceptors are used, this `post_analyze_syntax_with_metadata` interceptor runs after the + `post_analyze_syntax` interceptor. The (possibly modified) response returned by + `post_analyze_syntax` will be passed to + `post_analyze_syntax_with_metadata`. + """ + return response, metadata + def pre_annotate_text( self, request: language_service.AnnotateTextRequest, @@ -251,12 +354,37 @@ def post_annotate_text( ) -> language_service.AnnotateTextResponse: """Post-rpc interceptor for annotate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_annotate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_annotate_text` interceptor runs + before the `post_annotate_text_with_metadata` interceptor. """ return response + def post_annotate_text_with_metadata( + self, + response: language_service.AnnotateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnnotateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for annotate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_annotate_text_with_metadata` + interceptor in new development instead of the `post_annotate_text` interceptor. + When both interceptors are used, this `post_annotate_text_with_metadata` interceptor runs after the + `post_annotate_text` interceptor. The (possibly modified) response returned by + `post_annotate_text` will be passed to + `post_annotate_text_with_metadata`. + """ + return response, metadata + def pre_classify_text( self, request: language_service.ClassifyTextRequest, @@ -276,12 +404,37 @@ def post_classify_text( ) -> language_service.ClassifyTextResponse: """Post-rpc interceptor for classify_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_classify_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_classify_text` interceptor runs + before the `post_classify_text_with_metadata` interceptor. """ return response + def post_classify_text_with_metadata( + self, + response: language_service.ClassifyTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.ClassifyTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for classify_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_classify_text_with_metadata` + interceptor in new development instead of the `post_classify_text` interceptor. + When both interceptors are used, this `post_classify_text_with_metadata` interceptor runs after the + `post_classify_text` interceptor. The (possibly modified) response returned by + `post_classify_text` will be passed to + `post_classify_text_with_metadata`. + """ + return response, metadata + def pre_moderate_text( self, request: language_service.ModerateTextRequest, @@ -301,12 +454,37 @@ def post_moderate_text( ) -> language_service.ModerateTextResponse: """Post-rpc interceptor for moderate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_moderate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_moderate_text` interceptor runs + before the `post_moderate_text_with_metadata` interceptor. """ return response + def post_moderate_text_with_metadata( + self, + response: language_service.ModerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.ModerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for moderate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_moderate_text_with_metadata` + interceptor in new development instead of the `post_moderate_text` interceptor. + When both interceptors are used, this `post_moderate_text_with_metadata` interceptor runs after the + `post_moderate_text` interceptor. The (possibly modified) response returned by + `post_moderate_text` will be passed to + `post_moderate_text_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class LanguageServiceRestStub: @@ -520,6 +698,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -676,6 +858,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_entity_sentiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_entity_sentiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -831,6 +1017,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_sentiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_sentiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -979,6 +1169,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_syntax(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_syntax_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1132,6 +1326,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_annotate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_annotate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1283,6 +1481,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_classify_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_classify_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1434,6 +1636,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_moderate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_moderate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py b/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py index e154065d8da8..6053ad2404bf 100644 --- a/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py +++ b/packages/google-cloud-language/google/cloud/language_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py index 313ab60a80ff..a544854b8dad 100644 --- a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py +++ b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -463,6 +465,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/transports/rest.py b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/transports/rest.py index c15066427335..66e3e0e5725b 100644 --- a/packages/google-cloud-language/google/cloud/language_v2/services/language_service/transports/rest.py +++ b/packages/google-cloud-language/google/cloud/language_v2/services/language_service/transports/rest.py @@ -133,12 +133,38 @@ def post_analyze_entities( ) -> language_service.AnalyzeEntitiesResponse: """Post-rpc interceptor for analyze_entities - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_entities_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_entities` interceptor runs + before the `post_analyze_entities_with_metadata` interceptor. """ return response + def post_analyze_entities_with_metadata( + self, + response: language_service.AnalyzeEntitiesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeEntitiesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_entities_with_metadata` + interceptor in new development instead of the `post_analyze_entities` interceptor. + When both interceptors are used, this `post_analyze_entities_with_metadata` interceptor runs after the + `post_analyze_entities` interceptor. The (possibly modified) response returned by + `post_analyze_entities` will be passed to + `post_analyze_entities_with_metadata`. + """ + return response, metadata + def pre_analyze_sentiment( self, request: language_service.AnalyzeSentimentRequest, @@ -159,12 +185,38 @@ def post_analyze_sentiment( ) -> language_service.AnalyzeSentimentResponse: """Post-rpc interceptor for analyze_sentiment - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_sentiment_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_sentiment` interceptor runs + before the `post_analyze_sentiment_with_metadata` interceptor. """ return response + def post_analyze_sentiment_with_metadata( + self, + response: language_service.AnalyzeSentimentResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnalyzeSentimentResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_analyze_sentiment_with_metadata` + interceptor in new development instead of the `post_analyze_sentiment` interceptor. + When both interceptors are used, this `post_analyze_sentiment_with_metadata` interceptor runs after the + `post_analyze_sentiment` interceptor. The (possibly modified) response returned by + `post_analyze_sentiment` will be passed to + `post_analyze_sentiment_with_metadata`. + """ + return response, metadata + def pre_annotate_text( self, request: language_service.AnnotateTextRequest, @@ -184,12 +236,37 @@ def post_annotate_text( ) -> language_service.AnnotateTextResponse: """Post-rpc interceptor for annotate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_annotate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_annotate_text` interceptor runs + before the `post_annotate_text_with_metadata` interceptor. """ return response + def post_annotate_text_with_metadata( + self, + response: language_service.AnnotateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.AnnotateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for annotate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_annotate_text_with_metadata` + interceptor in new development instead of the `post_annotate_text` interceptor. + When both interceptors are used, this `post_annotate_text_with_metadata` interceptor runs after the + `post_annotate_text` interceptor. The (possibly modified) response returned by + `post_annotate_text` will be passed to + `post_annotate_text_with_metadata`. + """ + return response, metadata + def pre_classify_text( self, request: language_service.ClassifyTextRequest, @@ -209,12 +286,37 @@ def post_classify_text( ) -> language_service.ClassifyTextResponse: """Post-rpc interceptor for classify_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_classify_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_classify_text` interceptor runs + before the `post_classify_text_with_metadata` interceptor. """ return response + def post_classify_text_with_metadata( + self, + response: language_service.ClassifyTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.ClassifyTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for classify_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_classify_text_with_metadata` + interceptor in new development instead of the `post_classify_text` interceptor. + When both interceptors are used, this `post_classify_text_with_metadata` interceptor runs after the + `post_classify_text` interceptor. The (possibly modified) response returned by + `post_classify_text` will be passed to + `post_classify_text_with_metadata`. + """ + return response, metadata + def pre_moderate_text( self, request: language_service.ModerateTextRequest, @@ -234,12 +336,37 @@ def post_moderate_text( ) -> language_service.ModerateTextResponse: """Post-rpc interceptor for moderate_text - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_moderate_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the LanguageService server but before - it is returned to user code. + it is returned to user code. This `post_moderate_text` interceptor runs + before the `post_moderate_text_with_metadata` interceptor. """ return response + def post_moderate_text_with_metadata( + self, + response: language_service.ModerateTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + language_service.ModerateTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for moderate_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the LanguageService server but before it is returned to user code. + + We recommend only using this `post_moderate_text_with_metadata` + interceptor in new development instead of the `post_moderate_text` interceptor. + When both interceptors are used, this `post_moderate_text_with_metadata` interceptor runs after the + `post_moderate_text` interceptor. The (possibly modified) response returned by + `post_moderate_text` will be passed to + `post_moderate_text_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class LanguageServiceRestStub: @@ -453,6 +580,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_entities(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_entities_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -606,6 +737,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_sentiment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_sentiment_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -758,6 +893,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_annotate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_annotate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -909,6 +1048,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_classify_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_classify_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1060,6 +1203,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_moderate_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_moderate_text_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json index a59b9c7dac35..e51a5dcaa4ce 100644 --- a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json +++ b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.16.0" + "version": "2.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json index 4aaea43d83d6..08b1e309a9d8 100644 --- a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json +++ b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.16.0" + "version": "2.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json index c7575a563b47..fe048b55b3bc 100644 --- a/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json +++ b/packages/google-cloud-language/samples/generated_samples/snippet_metadata_google.cloud.language.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-language", - "version": "2.16.0" + "version": "2.17.0" }, "snippets": [ { diff --git a/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py b/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py index 85e241a13e72..fe42c55828e4 100644 --- a/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py +++ b/packages/google-cloud-language/tests/unit/gapic/language_v1/test_language_service.py @@ -59,6 +59,13 @@ ) from google.cloud.language_v1.types import language_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LanguageServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LanguageServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4868,10 +4918,14 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_sentiment" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, + "post_analyze_sentiment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeSentimentRequest.pb( language_service.AnalyzeSentimentRequest() ) @@ -4897,6 +4951,10 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeSentimentResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeSentimentResponse(), + metadata, + ) client.analyze_sentiment( request, @@ -4908,6 +4966,7 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_entities_rest_bad_request( @@ -4992,10 +5051,13 @@ def test_analyze_entities_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_entities" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeEntitiesRequest.pb( language_service.AnalyzeEntitiesRequest() ) @@ -5021,6 +5083,10 @@ def test_analyze_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeEntitiesResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeEntitiesResponse(), + metadata, + ) client.analyze_entities( request, @@ -5032,6 +5098,7 @@ def test_analyze_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_entity_sentiment_rest_bad_request( @@ -5116,10 +5183,14 @@ def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, + "post_analyze_entity_sentiment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeEntitySentimentRequest.pb( language_service.AnalyzeEntitySentimentRequest() ) @@ -5145,6 +5216,10 @@ def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeEntitySentimentResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeEntitySentimentResponse(), + metadata, + ) client.analyze_entity_sentiment( request, @@ -5156,6 +5231,7 @@ def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_syntax_rest_bad_request( @@ -5240,10 +5316,13 @@ def test_analyze_syntax_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_syntax" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_syntax_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_syntax" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeSyntaxRequest.pb( language_service.AnalyzeSyntaxRequest() ) @@ -5269,6 +5348,10 @@ def test_analyze_syntax_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeSyntaxResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeSyntaxResponse(), + metadata, + ) client.analyze_syntax( request, @@ -5280,6 +5363,7 @@ def test_analyze_syntax_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_classify_text_rest_bad_request( @@ -5361,10 +5445,13 @@ def test_classify_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_classify_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_classify_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_classify_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.ClassifyTextRequest.pb( language_service.ClassifyTextRequest() ) @@ -5390,6 +5477,10 @@ def test_classify_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.ClassifyTextResponse() + post_with_metadata.return_value = ( + language_service.ClassifyTextResponse(), + metadata, + ) client.classify_text( request, @@ -5401,6 +5492,7 @@ def test_classify_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_moderate_text_rest_bad_request( @@ -5482,10 +5574,13 @@ def test_moderate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_moderate_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_moderate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_moderate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.ModerateTextRequest.pb( language_service.ModerateTextRequest() ) @@ -5511,6 +5606,10 @@ def test_moderate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.ModerateTextResponse() + post_with_metadata.return_value = ( + language_service.ModerateTextResponse(), + metadata, + ) client.moderate_text( request, @@ -5522,6 +5621,7 @@ def test_moderate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_annotate_text_rest_bad_request( @@ -5606,10 +5706,13 @@ def test_annotate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_annotate_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_annotate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_annotate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnnotateTextRequest.pb( language_service.AnnotateTextRequest() ) @@ -5635,6 +5738,10 @@ def test_annotate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnnotateTextResponse() + post_with_metadata.return_value = ( + language_service.AnnotateTextResponse(), + metadata, + ) client.annotate_text( request, @@ -5646,6 +5753,7 @@ def test_annotate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py b/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py index 8dc21f885917..5f0fee4f9b06 100644 --- a/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py +++ b/packages/google-cloud-language/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -59,6 +59,13 @@ ) from google.cloud.language_v1beta2.types import language_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LanguageServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LanguageServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4869,10 +4919,14 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_sentiment" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, + "post_analyze_sentiment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeSentimentRequest.pb( language_service.AnalyzeSentimentRequest() ) @@ -4898,6 +4952,10 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeSentimentResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeSentimentResponse(), + metadata, + ) client.analyze_sentiment( request, @@ -4909,6 +4967,7 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_entities_rest_bad_request( @@ -4993,10 +5052,13 @@ def test_analyze_entities_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_entities" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeEntitiesRequest.pb( language_service.AnalyzeEntitiesRequest() ) @@ -5022,6 +5084,10 @@ def test_analyze_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeEntitiesResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeEntitiesResponse(), + metadata, + ) client.analyze_entities( request, @@ -5033,6 +5099,7 @@ def test_analyze_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_entity_sentiment_rest_bad_request( @@ -5117,10 +5184,14 @@ def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, + "post_analyze_entity_sentiment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeEntitySentimentRequest.pb( language_service.AnalyzeEntitySentimentRequest() ) @@ -5146,6 +5217,10 @@ def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeEntitySentimentResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeEntitySentimentResponse(), + metadata, + ) client.analyze_entity_sentiment( request, @@ -5157,6 +5232,7 @@ def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_syntax_rest_bad_request( @@ -5241,10 +5317,13 @@ def test_analyze_syntax_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_syntax" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_syntax_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_syntax" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeSyntaxRequest.pb( language_service.AnalyzeSyntaxRequest() ) @@ -5270,6 +5349,10 @@ def test_analyze_syntax_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeSyntaxResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeSyntaxResponse(), + metadata, + ) client.analyze_syntax( request, @@ -5281,6 +5364,7 @@ def test_analyze_syntax_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_classify_text_rest_bad_request( @@ -5362,10 +5446,13 @@ def test_classify_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_classify_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_classify_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_classify_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.ClassifyTextRequest.pb( language_service.ClassifyTextRequest() ) @@ -5391,6 +5478,10 @@ def test_classify_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.ClassifyTextResponse() + post_with_metadata.return_value = ( + language_service.ClassifyTextResponse(), + metadata, + ) client.classify_text( request, @@ -5402,6 +5493,7 @@ def test_classify_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_moderate_text_rest_bad_request( @@ -5483,10 +5575,13 @@ def test_moderate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_moderate_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_moderate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_moderate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.ModerateTextRequest.pb( language_service.ModerateTextRequest() ) @@ -5512,6 +5607,10 @@ def test_moderate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.ModerateTextResponse() + post_with_metadata.return_value = ( + language_service.ModerateTextResponse(), + metadata, + ) client.moderate_text( request, @@ -5523,6 +5622,7 @@ def test_moderate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_annotate_text_rest_bad_request( @@ -5607,10 +5707,13 @@ def test_annotate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_annotate_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_annotate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_annotate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnnotateTextRequest.pb( language_service.AnnotateTextRequest() ) @@ -5636,6 +5739,10 @@ def test_annotate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnnotateTextResponse() + post_with_metadata.return_value = ( + language_service.AnnotateTextResponse(), + metadata, + ) client.annotate_text( request, @@ -5647,6 +5754,7 @@ def test_annotate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py b/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py index b67a51eac44a..db75d07dc4b9 100644 --- a/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py +++ b/packages/google-cloud-language/tests/unit/gapic/language_v2/test_language_service.py @@ -59,6 +59,13 @@ ) from google.cloud.language_v2.types import language_service +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LanguageServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LanguageServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -3884,10 +3934,14 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_sentiment" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, + "post_analyze_sentiment_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeSentimentRequest.pb( language_service.AnalyzeSentimentRequest() ) @@ -3913,6 +3967,10 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeSentimentResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeSentimentResponse(), + metadata, + ) client.analyze_sentiment( request, @@ -3924,6 +3982,7 @@ def test_analyze_sentiment_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_entities_rest_bad_request( @@ -4010,10 +4069,13 @@ def test_analyze_entities_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_analyze_entities" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entities_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_analyze_entities" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnalyzeEntitiesRequest.pb( language_service.AnalyzeEntitiesRequest() ) @@ -4039,6 +4101,10 @@ def test_analyze_entities_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnalyzeEntitiesResponse() + post_with_metadata.return_value = ( + language_service.AnalyzeEntitiesResponse(), + metadata, + ) client.analyze_entities( request, @@ -4050,6 +4116,7 @@ def test_analyze_entities_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_classify_text_rest_bad_request( @@ -4136,10 +4203,13 @@ def test_classify_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_classify_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_classify_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_classify_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.ClassifyTextRequest.pb( language_service.ClassifyTextRequest() ) @@ -4165,6 +4235,10 @@ def test_classify_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.ClassifyTextResponse() + post_with_metadata.return_value = ( + language_service.ClassifyTextResponse(), + metadata, + ) client.classify_text( request, @@ -4176,6 +4250,7 @@ def test_classify_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_moderate_text_rest_bad_request( @@ -4262,10 +4337,13 @@ def test_moderate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_moderate_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_moderate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_moderate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.ModerateTextRequest.pb( language_service.ModerateTextRequest() ) @@ -4291,6 +4369,10 @@ def test_moderate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.ModerateTextResponse() + post_with_metadata.return_value = ( + language_service.ModerateTextResponse(), + metadata, + ) client.moderate_text( request, @@ -4302,6 +4384,7 @@ def test_moderate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_annotate_text_rest_bad_request( @@ -4388,10 +4471,13 @@ def test_annotate_text_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.LanguageServiceRestInterceptor, "post_annotate_text" ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_annotate_text_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.LanguageServiceRestInterceptor, "pre_annotate_text" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = language_service.AnnotateTextRequest.pb( language_service.AnnotateTextRequest() ) @@ -4417,6 +4503,10 @@ def test_annotate_text_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = language_service.AnnotateTextResponse() + post_with_metadata.return_value = ( + language_service.AnnotateTextResponse(), + metadata, + ) client.annotate_text( request, @@ -4428,6 +4518,7 @@ def test_annotate_text_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): diff --git a/packages/google-cloud-life-sciences/CHANGELOG.md b/packages/google-cloud-life-sciences/CHANGELOG.md index 0acb2d0e7c26..f6804f867141 100644 --- a/packages/google-cloud-life-sciences/CHANGELOG.md +++ b/packages/google-cloud-life-sciences/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.9.16](https://github.com/googleapis/google-cloud-python/compare/google-cloud-life-sciences-v0.9.15...google-cloud-life-sciences-v0.9.16) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [0.9.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-life-sciences-v0.9.14...google-cloud-life-sciences-v0.9.15) (2024-12-12) diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py index 956522e5b1bb..5d73c1716ca4 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.15" # {x-release-please-version} +__version__ = "0.9.16" # {x-release-please-version} diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py index 956522e5b1bb..5d73c1716ca4 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.9.15" # {x-release-please-version} +__version__ = "0.9.16" # {x-release-please-version} diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py index d3bab5e50c9c..960b7b636b26 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -470,6 +472,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -844,16 +873,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -899,16 +932,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, @@ -1009,16 +1046,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1064,16 +1105,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/transports/rest.py b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/transports/rest.py index 715e0ac658a6..b1e5e45e4b87 100644 --- a/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/transports/rest.py +++ b/packages/google-cloud-life-sciences/google/cloud/lifesciences_v2beta/services/workflows_service_v2_beta/transports/rest.py @@ -101,12 +101,35 @@ def post_run_pipeline( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_pipeline - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_pipeline_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the WorkflowsServiceV2Beta server but before - it is returned to user code. + it is returned to user code. This `post_run_pipeline` interceptor runs + before the `post_run_pipeline_with_metadata` interceptor. """ return response + def post_run_pipeline_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_pipeline + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the WorkflowsServiceV2Beta server but before it is returned to user code. + + We recommend only using this `post_run_pipeline_with_metadata` + interceptor in new development instead of the `post_run_pipeline` interceptor. + When both interceptors are used, this `post_run_pipeline_with_metadata` interceptor runs after the + `post_run_pipeline` interceptor. The (possibly modified) response returned by + `post_run_pipeline` will be passed to + `post_run_pipeline_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -494,6 +517,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_pipeline(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_pipeline_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json b/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json index e6a22e747c46..37db755e02ef 100644 --- a/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json +++ b/packages/google-cloud-life-sciences/samples/generated_samples/snippet_metadata_google.cloud.lifesciences.v2beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-life-sciences", - "version": "0.9.15" + "version": "0.9.16" }, "snippets": [ { diff --git a/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py b/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py index b45fadc77aec..78b669312dbb 100644 --- a/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py +++ b/packages/google-cloud-life-sciences/tests/unit/gapic/lifesciences_v2beta/test_workflows_service_v2_beta.py @@ -71,6 +71,13 @@ ) from google.cloud.lifesciences_v2beta.types import workflows +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -346,6 +353,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = WorkflowsServiceV2BetaClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = WorkflowsServiceV2BetaClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1746,10 +1796,14 @@ def test_run_pipeline_rest_interceptors(null_interceptor): ), mock.patch.object( transports.WorkflowsServiceV2BetaRestInterceptor, "post_run_pipeline" ) as post, mock.patch.object( + transports.WorkflowsServiceV2BetaRestInterceptor, + "post_run_pipeline_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.WorkflowsServiceV2BetaRestInterceptor, "pre_run_pipeline" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = workflows.RunPipelineRequest.pb(workflows.RunPipelineRequest()) transcode.return_value = { "method": "post", @@ -1771,6 +1825,7 @@ def test_run_pipeline_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_pipeline( request, @@ -1782,6 +1837,7 @@ def test_run_pipeline_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-managed-identities/CHANGELOG.md b/packages/google-cloud-managed-identities/CHANGELOG.md index c2f5ec852961..57666208819e 100644 --- a/packages/google-cloud-managed-identities/CHANGELOG.md +++ b/packages/google-cloud-managed-identities/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managed-identities-v1.11.0...google-cloud-managed-identities-v1.12.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) +* Add support for reading selective GAPIC generation methods from service YAML ([908d742](https://github.com/googleapis/google-cloud-python/commit/908d7421a4adadd7407df7ec2a25e25688ff180f)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managed-identities-v1.10.1...google-cloud-managed-identities-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py index dc8c514e6864..7b06d8cb80ca 100644 --- a/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py +++ b/packages/google-cloud-managed-identities/google/cloud/managedidentities_v1/services/managed_identities_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -525,6 +527,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json b/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json index bd3edb2af418..3f27c28af958 100644 --- a/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json +++ b/packages/google-cloud-managed-identities/samples/generated_samples/snippet_metadata_google.cloud.managedidentities.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-managed-identities", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py b/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py index 3961248e3092..270ef81bf2db 100644 --- a/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py +++ b/packages/google-cloud-managed-identities/tests/unit/gapic/managedidentities_v1/test_managed_identities_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -68,6 +69,13 @@ ) from google.cloud.managedidentities_v1.types import managed_identities_service, resource +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -343,6 +351,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ManagedIdentitiesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ManagedIdentitiesServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-managedkafka/CHANGELOG.md b/packages/google-cloud-managedkafka/CHANGELOG.md index 3ab3962f1d1c..6dc9726a85a7 100644 --- a/packages/google-cloud-managedkafka/CHANGELOG.md +++ b/packages/google-cloud-managedkafka/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.7](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managedkafka-v0.1.6...google-cloud-managedkafka-v0.1.7) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) +* Add support for reading selective GAPIC generation methods from service YAML ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) + ## [0.1.6](https://github.com/googleapis/google-cloud-python/compare/google-cloud-managedkafka-v0.1.5...google-cloud-managedkafka-v0.1.6) (2024-12-12) diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py index 51d2795b9d6b..cf5493b86bbc 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.1.7" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py index 51d2795b9d6b..cf5493b86bbc 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.6" # {x-release-please-version} +__version__ = "0.1.7" # {x-release-please-version} diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py index a0dc8fcefc11..3bac52352d6e 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -564,6 +566,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2463,16 +2492,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2518,16 +2551,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2684,16 +2721,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2739,16 +2780,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py index 04cf0f0bfb5a..d729f56b6a2e 100644 --- a/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py +++ b/packages/google-cloud-managedkafka/google/cloud/managedkafka_v1/services/managed_kafka/transports/rest.py @@ -200,12 +200,35 @@ def post_create_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_create_cluster` interceptor runs + before the `post_create_cluster_with_metadata` interceptor. """ return response + def post_create_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_create_cluster_with_metadata` + interceptor in new development instead of the `post_create_cluster` interceptor. + When both interceptors are used, this `post_create_cluster_with_metadata` interceptor runs after the + `post_create_cluster` interceptor. The (possibly modified) response returned by + `post_create_cluster` will be passed to + `post_create_cluster_with_metadata`. + """ + return response, metadata + def pre_create_topic( self, request: managed_kafka.CreateTopicRequest, @@ -223,12 +246,35 @@ def pre_create_topic( def post_create_topic(self, response: resources.Topic) -> resources.Topic: """Post-rpc interceptor for create_topic - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_create_topic` interceptor runs + before the `post_create_topic_with_metadata` interceptor. """ return response + def post_create_topic_with_metadata( + self, + response: resources.Topic, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_create_topic_with_metadata` + interceptor in new development instead of the `post_create_topic` interceptor. + When both interceptors are used, this `post_create_topic_with_metadata` interceptor runs after the + `post_create_topic` interceptor. The (possibly modified) response returned by + `post_create_topic` will be passed to + `post_create_topic_with_metadata`. + """ + return response, metadata + def pre_delete_cluster( self, request: managed_kafka.DeleteClusterRequest, @@ -248,12 +294,35 @@ def post_delete_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_delete_cluster` interceptor runs + before the `post_delete_cluster_with_metadata` interceptor. """ return response + def post_delete_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_delete_cluster_with_metadata` + interceptor in new development instead of the `post_delete_cluster` interceptor. + When both interceptors are used, this `post_delete_cluster_with_metadata` interceptor runs after the + `post_delete_cluster` interceptor. The (possibly modified) response returned by + `post_delete_cluster` will be passed to + `post_delete_cluster_with_metadata`. + """ + return response, metadata + def pre_delete_consumer_group( self, request: managed_kafka.DeleteConsumerGroupRequest, @@ -300,12 +369,35 @@ def pre_get_cluster( def post_get_cluster(self, response: resources.Cluster) -> resources.Cluster: """Post-rpc interceptor for get_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_get_cluster` interceptor runs + before the `post_get_cluster_with_metadata` interceptor. """ return response + def post_get_cluster_with_metadata( + self, + response: resources.Cluster, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Cluster, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_get_cluster_with_metadata` + interceptor in new development instead of the `post_get_cluster` interceptor. + When both interceptors are used, this `post_get_cluster_with_metadata` interceptor runs after the + `post_get_cluster` interceptor. The (possibly modified) response returned by + `post_get_cluster` will be passed to + `post_get_cluster_with_metadata`. + """ + return response, metadata + def pre_get_consumer_group( self, request: managed_kafka.GetConsumerGroupRequest, @@ -325,12 +417,35 @@ def post_get_consumer_group( ) -> resources.ConsumerGroup: """Post-rpc interceptor for get_consumer_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_consumer_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_get_consumer_group` interceptor runs + before the `post_get_consumer_group_with_metadata` interceptor. """ return response + def post_get_consumer_group_with_metadata( + self, + response: resources.ConsumerGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConsumerGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_consumer_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_get_consumer_group_with_metadata` + interceptor in new development instead of the `post_get_consumer_group` interceptor. + When both interceptors are used, this `post_get_consumer_group_with_metadata` interceptor runs after the + `post_get_consumer_group` interceptor. The (possibly modified) response returned by + `post_get_consumer_group` will be passed to + `post_get_consumer_group_with_metadata`. + """ + return response, metadata + def pre_get_topic( self, request: managed_kafka.GetTopicRequest, @@ -346,12 +461,35 @@ def pre_get_topic( def post_get_topic(self, response: resources.Topic) -> resources.Topic: """Post-rpc interceptor for get_topic - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_get_topic` interceptor runs + before the `post_get_topic_with_metadata` interceptor. """ return response + def post_get_topic_with_metadata( + self, + response: resources.Topic, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_get_topic_with_metadata` + interceptor in new development instead of the `post_get_topic` interceptor. + When both interceptors are used, this `post_get_topic_with_metadata` interceptor runs after the + `post_get_topic` interceptor. The (possibly modified) response returned by + `post_get_topic` will be passed to + `post_get_topic_with_metadata`. + """ + return response, metadata + def pre_list_clusters( self, request: managed_kafka.ListClustersRequest, @@ -371,12 +509,37 @@ def post_list_clusters( ) -> managed_kafka.ListClustersResponse: """Post-rpc interceptor for list_clusters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_clusters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_list_clusters` interceptor runs + before the `post_list_clusters_with_metadata` interceptor. """ return response + def post_list_clusters_with_metadata( + self, + response: managed_kafka.ListClustersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + managed_kafka.ListClustersResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_clusters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_list_clusters_with_metadata` + interceptor in new development instead of the `post_list_clusters` interceptor. + When both interceptors are used, this `post_list_clusters_with_metadata` interceptor runs after the + `post_list_clusters` interceptor. The (possibly modified) response returned by + `post_list_clusters` will be passed to + `post_list_clusters_with_metadata`. + """ + return response, metadata + def pre_list_consumer_groups( self, request: managed_kafka.ListConsumerGroupsRequest, @@ -396,12 +559,38 @@ def post_list_consumer_groups( ) -> managed_kafka.ListConsumerGroupsResponse: """Post-rpc interceptor for list_consumer_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_consumer_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_list_consumer_groups` interceptor runs + before the `post_list_consumer_groups_with_metadata` interceptor. """ return response + def post_list_consumer_groups_with_metadata( + self, + response: managed_kafka.ListConsumerGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + managed_kafka.ListConsumerGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_consumer_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_list_consumer_groups_with_metadata` + interceptor in new development instead of the `post_list_consumer_groups` interceptor. + When both interceptors are used, this `post_list_consumer_groups_with_metadata` interceptor runs after the + `post_list_consumer_groups` interceptor. The (possibly modified) response returned by + `post_list_consumer_groups` will be passed to + `post_list_consumer_groups_with_metadata`. + """ + return response, metadata + def pre_list_topics( self, request: managed_kafka.ListTopicsRequest, @@ -421,12 +610,37 @@ def post_list_topics( ) -> managed_kafka.ListTopicsResponse: """Post-rpc interceptor for list_topics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_topics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_list_topics` interceptor runs + before the `post_list_topics_with_metadata` interceptor. """ return response + def post_list_topics_with_metadata( + self, + response: managed_kafka.ListTopicsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + managed_kafka.ListTopicsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_topics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_list_topics_with_metadata` + interceptor in new development instead of the `post_list_topics` interceptor. + When both interceptors are used, this `post_list_topics_with_metadata` interceptor runs after the + `post_list_topics` interceptor. The (possibly modified) response returned by + `post_list_topics` will be passed to + `post_list_topics_with_metadata`. + """ + return response, metadata + def pre_update_cluster( self, request: managed_kafka.UpdateClusterRequest, @@ -446,12 +660,35 @@ def post_update_cluster( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_cluster - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_cluster_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_update_cluster` interceptor runs + before the `post_update_cluster_with_metadata` interceptor. """ return response + def post_update_cluster_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_cluster + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_update_cluster_with_metadata` + interceptor in new development instead of the `post_update_cluster` interceptor. + When both interceptors are used, this `post_update_cluster_with_metadata` interceptor runs after the + `post_update_cluster` interceptor. The (possibly modified) response returned by + `post_update_cluster` will be passed to + `post_update_cluster_with_metadata`. + """ + return response, metadata + def pre_update_consumer_group( self, request: managed_kafka.UpdateConsumerGroupRequest, @@ -472,12 +709,35 @@ def post_update_consumer_group( ) -> resources.ConsumerGroup: """Post-rpc interceptor for update_consumer_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_consumer_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_update_consumer_group` interceptor runs + before the `post_update_consumer_group_with_metadata` interceptor. """ return response + def post_update_consumer_group_with_metadata( + self, + response: resources.ConsumerGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.ConsumerGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_consumer_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_update_consumer_group_with_metadata` + interceptor in new development instead of the `post_update_consumer_group` interceptor. + When both interceptors are used, this `post_update_consumer_group_with_metadata` interceptor runs after the + `post_update_consumer_group` interceptor. The (possibly modified) response returned by + `post_update_consumer_group` will be passed to + `post_update_consumer_group_with_metadata`. + """ + return response, metadata + def pre_update_topic( self, request: managed_kafka.UpdateTopicRequest, @@ -495,12 +755,35 @@ def pre_update_topic( def post_update_topic(self, response: resources.Topic) -> resources.Topic: """Post-rpc interceptor for update_topic - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ManagedKafka server but before - it is returned to user code. + it is returned to user code. This `post_update_topic` interceptor runs + before the `post_update_topic_with_metadata` interceptor. """ return response + def post_update_topic_with_metadata( + self, + response: resources.Topic, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ManagedKafka server but before it is returned to user code. + + We recommend only using this `post_update_topic_with_metadata` + interceptor in new development instead of the `post_update_topic` interceptor. + When both interceptors are used, this `post_update_topic_with_metadata` interceptor runs after the + `post_update_topic` interceptor. The (possibly modified) response returned by + `post_update_topic` will be passed to + `post_update_topic_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -913,6 +1196,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1065,6 +1352,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_topic_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1206,6 +1497,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1570,6 +1865,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1714,6 +2013,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_consumer_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_consumer_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1856,6 +2159,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_topic_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1998,6 +2305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_clusters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_clusters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2142,6 +2453,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_consumer_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_consumer_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2288,6 +2603,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_topics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topics_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2437,6 +2756,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_cluster(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_cluster_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2587,6 +2910,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_consumer_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_consumer_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2739,6 +3066,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_topic_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json index d1c28f5baa40..66a49c6ed27b 100644 --- a/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json +++ b/packages/google-cloud-managedkafka/samples/generated_samples/snippet_metadata_google.cloud.managedkafka.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-managedkafka", - "version": "0.1.6" + "version": "0.1.7" }, "snippets": [ { diff --git a/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py index 1d21974d4df8..b6ece5693e83 100644 --- a/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py +++ b/packages/google-cloud-managedkafka/tests/unit/gapic/managedkafka_v1/test_managed_kafka.py @@ -74,6 +74,13 @@ ) from google.cloud.managedkafka_v1.types import managed_kafka, resources +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -317,6 +324,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ManagedKafkaClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ManagedKafkaClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -10166,10 +10216,13 @@ def test_list_clusters_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_list_clusters" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_list_clusters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_list_clusters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.ListClustersRequest.pb( managed_kafka.ListClustersRequest() ) @@ -10195,6 +10248,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = managed_kafka.ListClustersResponse() + post_with_metadata.return_value = managed_kafka.ListClustersResponse(), metadata client.list_clusters( request, @@ -10206,6 +10260,7 @@ def test_list_clusters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_cluster_rest_bad_request(request_type=managed_kafka.GetClusterRequest): @@ -10294,10 +10349,13 @@ def test_get_cluster_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_get_cluster" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_get_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_get_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.GetClusterRequest.pb( managed_kafka.GetClusterRequest() ) @@ -10321,6 +10379,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Cluster() + post_with_metadata.return_value = resources.Cluster(), metadata client.get_cluster( request, @@ -10332,6 +10391,7 @@ def test_get_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_cluster_rest_bad_request( @@ -10494,10 +10554,13 @@ def test_create_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_create_cluster" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_create_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_create_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.CreateClusterRequest.pb( managed_kafka.CreateClusterRequest() ) @@ -10521,6 +10584,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_cluster( request, @@ -10532,6 +10596,7 @@ def test_create_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_cluster_rest_bad_request( @@ -10698,10 +10763,13 @@ def test_update_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_update_cluster" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_update_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_update_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.UpdateClusterRequest.pb( managed_kafka.UpdateClusterRequest() ) @@ -10725,6 +10793,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_cluster( request, @@ -10736,6 +10805,7 @@ def test_update_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_cluster_rest_bad_request( @@ -10816,10 +10886,13 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ), mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_delete_cluster" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_delete_cluster_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_delete_cluster" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.DeleteClusterRequest.pb( managed_kafka.DeleteClusterRequest() ) @@ -10843,6 +10916,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_cluster( request, @@ -10854,6 +10928,7 @@ def test_delete_cluster_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_topics_rest_bad_request(request_type=managed_kafka.ListTopicsRequest): @@ -10936,10 +11011,13 @@ def test_list_topics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_list_topics" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_list_topics_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_list_topics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.ListTopicsRequest.pb( managed_kafka.ListTopicsRequest() ) @@ -10965,6 +11043,7 @@ def test_list_topics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = managed_kafka.ListTopicsResponse() + post_with_metadata.return_value = managed_kafka.ListTopicsResponse(), metadata client.list_topics( request, @@ -10976,6 +11055,7 @@ def test_list_topics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_topic_rest_bad_request(request_type=managed_kafka.GetTopicRequest): @@ -11066,10 +11146,13 @@ def test_get_topic_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_get_topic" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_get_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_get_topic" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.GetTopicRequest.pb(managed_kafka.GetTopicRequest()) transcode.return_value = { "method": "post", @@ -11091,6 +11174,7 @@ def test_get_topic_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Topic() + post_with_metadata.return_value = resources.Topic(), metadata client.get_topic( request, @@ -11102,6 +11186,7 @@ def test_get_topic_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_topic_rest_bad_request(request_type=managed_kafka.CreateTopicRequest): @@ -11261,10 +11346,13 @@ def test_create_topic_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_create_topic" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_create_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_create_topic" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.CreateTopicRequest.pb( managed_kafka.CreateTopicRequest() ) @@ -11288,6 +11376,7 @@ def test_create_topic_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Topic() + post_with_metadata.return_value = resources.Topic(), metadata client.create_topic( request, @@ -11299,6 +11388,7 @@ def test_create_topic_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_topic_rest_bad_request(request_type=managed_kafka.UpdateTopicRequest): @@ -11466,10 +11556,13 @@ def test_update_topic_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_update_topic" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_update_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_update_topic" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.UpdateTopicRequest.pb( managed_kafka.UpdateTopicRequest() ) @@ -11493,6 +11586,7 @@ def test_update_topic_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.Topic() + post_with_metadata.return_value = resources.Topic(), metadata client.update_topic( request, @@ -11504,6 +11598,7 @@ def test_update_topic_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_topic_rest_bad_request(request_type=managed_kafka.DeleteTopicRequest): @@ -11699,10 +11794,14 @@ def test_list_consumer_groups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_list_consumer_groups" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, + "post_list_consumer_groups_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_list_consumer_groups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.ListConsumerGroupsRequest.pb( managed_kafka.ListConsumerGroupsRequest() ) @@ -11728,6 +11827,10 @@ def test_list_consumer_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = managed_kafka.ListConsumerGroupsResponse() + post_with_metadata.return_value = ( + managed_kafka.ListConsumerGroupsResponse(), + metadata, + ) client.list_consumer_groups( request, @@ -11739,6 +11842,7 @@ def test_list_consumer_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_consumer_group_rest_bad_request( @@ -11827,10 +11931,13 @@ def test_get_consumer_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_get_consumer_group" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, "post_get_consumer_group_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_get_consumer_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.GetConsumerGroupRequest.pb( managed_kafka.GetConsumerGroupRequest() ) @@ -11854,6 +11961,7 @@ def test_get_consumer_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConsumerGroup() + post_with_metadata.return_value = resources.ConsumerGroup(), metadata client.get_consumer_group( request, @@ -11865,6 +11973,7 @@ def test_get_consumer_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_consumer_group_rest_bad_request( @@ -12028,10 +12137,14 @@ def test_update_consumer_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ManagedKafkaRestInterceptor, "post_update_consumer_group" ) as post, mock.patch.object( + transports.ManagedKafkaRestInterceptor, + "post_update_consumer_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.ManagedKafkaRestInterceptor, "pre_update_consumer_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = managed_kafka.UpdateConsumerGroupRequest.pb( managed_kafka.UpdateConsumerGroupRequest() ) @@ -12055,6 +12168,7 @@ def test_update_consumer_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = resources.ConsumerGroup() + post_with_metadata.return_value = resources.ConsumerGroup(), metadata client.update_consumer_group( request, @@ -12066,6 +12180,7 @@ def test_update_consumer_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_consumer_group_rest_bad_request( diff --git a/packages/google-cloud-media-translation/CHANGELOG.md b/packages/google-cloud-media-translation/CHANGELOG.md index 5939a42c38bf..3a8677361879 100644 --- a/packages/google-cloud-media-translation/CHANGELOG.md +++ b/packages/google-cloud-media-translation/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.11.15](https://github.com/googleapis/google-cloud-python/compare/google-cloud-media-translation-v0.11.14...google-cloud-media-translation-v0.11.15) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) +* Add support for reading selective GAPIC generation methods from service YAML ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) + ## [0.11.14](https://github.com/googleapis/google-cloud-python/compare/google-cloud-media-translation-v0.11.13...google-cloud-media-translation-v0.11.14) (2024-12-12) diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py index 9d5fd39a4c03..53cb05e18cb5 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.14" # {x-release-please-version} +__version__ = "0.11.15" # {x-release-please-version} diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py index 9d5fd39a4c03..53cb05e18cb5 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.14" # {x-release-please-version} +__version__ = "0.11.15" # {x-release-please-version} diff --git a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py index 8263a23d88d8..45a2f23e4f24 100644 --- a/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py +++ b/packages/google-cloud-media-translation/google/cloud/mediatranslation_v1beta1/services/speech_translation_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -465,6 +467,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json b/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json index 4712731b72ea..4bd1a0721905 100644 --- a/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json +++ b/packages/google-cloud-media-translation/samples/generated_samples/snippet_metadata_google.cloud.mediatranslation.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-media-translation", - "version": "0.11.14" + "version": "0.11.15" }, "snippets": [ { diff --git a/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py b/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py index 34c3d68d8545..442b837d46e8 100644 --- a/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py +++ b/packages/google-cloud-media-translation/tests/unit/gapic/mediatranslation_v1beta1/test_speech_translation_service.py @@ -22,6 +22,7 @@ except ImportError: # pragma: NO COVER import mock +import json import math from google.api_core import api_core_version @@ -55,6 +56,13 @@ ) from google.cloud.mediatranslation_v1beta1.types import media_translation +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -330,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SpeechTranslationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SpeechTranslationServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ diff --git a/packages/google-cloud-memcache/CHANGELOG.md b/packages/google-cloud-memcache/CHANGELOG.md index 918441d026de..171a63a5f9b4 100644 --- a/packages/google-cloud-memcache/CHANGELOG.md +++ b/packages/google-cloud-memcache/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.12.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-memcache-v1.11.0...google-cloud-memcache-v1.12.0) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) +* Add support for reading selective GAPIC generation methods from service YAML ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) + ## [1.11.0](https://github.com/googleapis/google-cloud-python/compare/google-cloud-memcache-v1.10.1...google-cloud-memcache-v1.11.0) (2024-12-12) diff --git a/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py b/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py +++ b/packages/google-cloud-memcache/google/cloud/memcache/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py b/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py index 40653d35a55a..0fc7794069a6 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -509,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1851,16 +1880,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1906,16 +1939,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2072,16 +2109,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2127,16 +2168,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py index 6a7f774341f3..eaff450e5cb2 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1/services/cloud_memcache/transports/rest.py @@ -159,12 +159,35 @@ def post_apply_parameters( ) -> operations_pb2.Operation: """Post-rpc interceptor for apply_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_apply_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_apply_parameters` interceptor runs + before the `post_apply_parameters_with_metadata` interceptor. """ return response + def post_apply_parameters_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for apply_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_apply_parameters_with_metadata` + interceptor in new development instead of the `post_apply_parameters` interceptor. + When both interceptors are used, this `post_apply_parameters_with_metadata` interceptor runs after the + `post_apply_parameters` interceptor. The (possibly modified) response returned by + `post_apply_parameters` will be passed to + `post_apply_parameters_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: cloud_memcache.CreateInstanceRequest, @@ -184,12 +207,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: cloud_memcache.DeleteInstanceRequest, @@ -209,12 +255,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: cloud_memcache.GetInstanceRequest, @@ -234,12 +303,35 @@ def post_get_instance( ) -> cloud_memcache.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: cloud_memcache.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_memcache.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: cloud_memcache.ListInstancesRequest, @@ -259,12 +351,37 @@ def post_list_instances( ) -> cloud_memcache.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: cloud_memcache.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_memcache.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_reschedule_maintenance( self, request: cloud_memcache.RescheduleMaintenanceRequest, @@ -285,12 +402,35 @@ def post_reschedule_maintenance( ) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. """ return response + def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: cloud_memcache.UpdateInstanceRequest, @@ -310,12 +450,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_parameters( self, request: cloud_memcache.UpdateParametersRequest, @@ -335,12 +498,35 @@ def post_update_parameters( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_update_parameters` interceptor runs + before the `post_update_parameters_with_metadata` interceptor. """ return response + def post_update_parameters_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_update_parameters_with_metadata` + interceptor in new development instead of the `post_update_parameters` interceptor. + When both interceptors are used, this `post_update_parameters_with_metadata` interceptor runs after the + `post_update_parameters` interceptor. The (possibly modified) response returned by + `post_update_parameters` will be passed to + `post_update_parameters_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -773,6 +959,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_apply_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_apply_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -921,6 +1111,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1063,6 +1257,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1206,6 +1404,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1349,6 +1551,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1502,6 +1708,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reschedule_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1650,6 +1860,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1800,6 +2014,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py index 50d842f376d0..739fdfae141c 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py index ca05def23d9c..d70dc74ef65f 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -509,6 +511,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2001,16 +2030,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2056,16 +2089,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2222,16 +2259,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2277,16 +2318,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py index 6307363f1cd6..962dfaf512ec 100644 --- a/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py +++ b/packages/google-cloud-memcache/google/cloud/memcache_v1beta2/services/cloud_memcache/transports/rest.py @@ -167,12 +167,35 @@ def post_apply_parameters( ) -> operations_pb2.Operation: """Post-rpc interceptor for apply_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_apply_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_apply_parameters` interceptor runs + before the `post_apply_parameters_with_metadata` interceptor. """ return response + def post_apply_parameters_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for apply_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_apply_parameters_with_metadata` + interceptor in new development instead of the `post_apply_parameters` interceptor. + When both interceptors are used, this `post_apply_parameters_with_metadata` interceptor runs after the + `post_apply_parameters` interceptor. The (possibly modified) response returned by + `post_apply_parameters` will be passed to + `post_apply_parameters_with_metadata`. + """ + return response, metadata + def pre_apply_software_update( self, request: cloud_memcache.ApplySoftwareUpdateRequest, @@ -193,12 +216,35 @@ def post_apply_software_update( ) -> operations_pb2.Operation: """Post-rpc interceptor for apply_software_update - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_apply_software_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_apply_software_update` interceptor runs + before the `post_apply_software_update_with_metadata` interceptor. """ return response + def post_apply_software_update_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for apply_software_update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_apply_software_update_with_metadata` + interceptor in new development instead of the `post_apply_software_update` interceptor. + When both interceptors are used, this `post_apply_software_update_with_metadata` interceptor runs after the + `post_apply_software_update` interceptor. The (possibly modified) response returned by + `post_apply_software_update` will be passed to + `post_apply_software_update_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: cloud_memcache.CreateInstanceRequest, @@ -218,12 +264,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: cloud_memcache.DeleteInstanceRequest, @@ -243,12 +312,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: cloud_memcache.GetInstanceRequest, @@ -268,12 +360,35 @@ def post_get_instance( ) -> cloud_memcache.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: cloud_memcache.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_memcache.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: cloud_memcache.ListInstancesRequest, @@ -293,12 +408,37 @@ def post_list_instances( ) -> cloud_memcache.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: cloud_memcache.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloud_memcache.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_reschedule_maintenance( self, request: cloud_memcache.RescheduleMaintenanceRequest, @@ -319,12 +459,35 @@ def post_reschedule_maintenance( ) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. """ return response + def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: cloud_memcache.UpdateInstanceRequest, @@ -344,12 +507,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_update_parameters( self, request: cloud_memcache.UpdateParametersRequest, @@ -369,12 +555,35 @@ def post_update_parameters( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_parameters - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_parameters_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudMemcache server but before - it is returned to user code. + it is returned to user code. This `post_update_parameters` interceptor runs + before the `post_update_parameters_with_metadata` interceptor. """ return response + def post_update_parameters_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_parameters + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudMemcache server but before it is returned to user code. + + We recommend only using this `post_update_parameters_with_metadata` + interceptor in new development instead of the `post_update_parameters` interceptor. + When both interceptors are used, this `post_update_parameters_with_metadata` interceptor runs after the + `post_update_parameters` interceptor. The (possibly modified) response returned by + `post_update_parameters` will be passed to + `post_update_parameters_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -807,6 +1016,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_apply_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_apply_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -957,6 +1170,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_apply_software_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_apply_software_update_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1105,6 +1322,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1247,6 +1468,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1390,6 +1615,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1533,6 +1762,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1686,6 +1919,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reschedule_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1834,6 +2071,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1984,6 +2225,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_parameters(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_parameters_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json index 55ac1347ba4a..a03ff897b510 100644 --- a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json +++ b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json index b1b3c5373e18..ebf7108c8120 100644 --- a/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json +++ b/packages/google-cloud-memcache/samples/generated_samples/snippet_metadata_google.cloud.memcache.v1beta2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memcache", - "version": "1.11.0" + "version": "1.12.0" }, "snippets": [ { diff --git a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py index 291103c615c3..b10577533cfd 100644 --- a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py +++ b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1/test_cloud_memcache.py @@ -77,6 +77,13 @@ ) from google.cloud.memcache_v1.types import cloud_memcache +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudMemcacheClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudMemcacheClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6228,10 +6278,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.ListInstancesRequest.pb( cloud_memcache.ListInstancesRequest() ) @@ -6257,6 +6310,10 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_memcache.ListInstancesResponse() + post_with_metadata.return_value = ( + cloud_memcache.ListInstancesResponse(), + metadata, + ) client.list_instances( request, @@ -6268,6 +6325,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=cloud_memcache.GetInstanceRequest): @@ -6366,10 +6424,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.GetInstanceRequest.pb( cloud_memcache.GetInstanceRequest() ) @@ -6393,6 +6454,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_memcache.Instance() + post_with_metadata.return_value = cloud_memcache.Instance(), metadata client.get_instance( request, @@ -6404,6 +6466,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request( @@ -6600,10 +6663,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.CreateInstanceRequest.pb( cloud_memcache.CreateInstanceRequest() ) @@ -6627,6 +6693,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -6638,6 +6705,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -6838,10 +6906,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.UpdateInstanceRequest.pb( cloud_memcache.UpdateInstanceRequest() ) @@ -6865,6 +6936,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -6876,6 +6948,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_parameters_rest_bad_request( @@ -6956,10 +7029,13 @@ def test_update_parameters_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_update_parameters" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_parameters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_update_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.UpdateParametersRequest.pb( cloud_memcache.UpdateParametersRequest() ) @@ -6983,6 +7059,7 @@ def test_update_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_parameters( request, @@ -6994,6 +7071,7 @@ def test_update_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -7074,10 +7152,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.DeleteInstanceRequest.pb( cloud_memcache.DeleteInstanceRequest() ) @@ -7101,6 +7182,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -7112,6 +7194,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_apply_parameters_rest_bad_request( @@ -7192,10 +7275,13 @@ def test_apply_parameters_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_apply_parameters" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_apply_parameters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_apply_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.ApplyParametersRequest.pb( cloud_memcache.ApplyParametersRequest() ) @@ -7219,6 +7305,7 @@ def test_apply_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.apply_parameters( request, @@ -7230,6 +7317,7 @@ def test_apply_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reschedule_maintenance_rest_bad_request( @@ -7310,10 +7398,14 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_reschedule_maintenance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, + "post_reschedule_maintenance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_reschedule_maintenance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.RescheduleMaintenanceRequest.pb( cloud_memcache.RescheduleMaintenanceRequest() ) @@ -7337,6 +7429,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reschedule_maintenance( request, @@ -7348,6 +7441,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py index 937e128efa28..4c6ea690a5d5 100644 --- a/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py +++ b/packages/google-cloud-memcache/tests/unit/gapic/memcache_v1beta2/test_cloud_memcache.py @@ -77,6 +77,13 @@ ) from google.cloud.memcache_v1beta2.types import cloud_memcache +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -331,6 +338,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudMemcacheClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudMemcacheClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6843,10 +6893,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.ListInstancesRequest.pb( cloud_memcache.ListInstancesRequest() ) @@ -6872,6 +6925,10 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_memcache.ListInstancesResponse() + post_with_metadata.return_value = ( + cloud_memcache.ListInstancesResponse(), + metadata, + ) client.list_instances( request, @@ -6883,6 +6940,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=cloud_memcache.GetInstanceRequest): @@ -6983,10 +7041,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.GetInstanceRequest.pb( cloud_memcache.GetInstanceRequest() ) @@ -7010,6 +7071,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_memcache.Instance() + post_with_metadata.return_value = cloud_memcache.Instance(), metadata client.get_instance( request, @@ -7021,6 +7083,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request( @@ -7219,10 +7282,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.CreateInstanceRequest.pb( cloud_memcache.CreateInstanceRequest() ) @@ -7246,6 +7312,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -7257,6 +7324,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -7459,10 +7527,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.UpdateInstanceRequest.pb( cloud_memcache.UpdateInstanceRequest() ) @@ -7486,6 +7557,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -7497,6 +7569,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_parameters_rest_bad_request( @@ -7577,10 +7650,13 @@ def test_update_parameters_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_update_parameters" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_update_parameters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_update_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.UpdateParametersRequest.pb( cloud_memcache.UpdateParametersRequest() ) @@ -7604,6 +7680,7 @@ def test_update_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_parameters( request, @@ -7615,6 +7692,7 @@ def test_update_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -7695,10 +7773,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.DeleteInstanceRequest.pb( cloud_memcache.DeleteInstanceRequest() ) @@ -7722,6 +7803,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -7733,6 +7815,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_apply_parameters_rest_bad_request( @@ -7813,10 +7896,13 @@ def test_apply_parameters_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_apply_parameters" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, "post_apply_parameters_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_apply_parameters" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.ApplyParametersRequest.pb( cloud_memcache.ApplyParametersRequest() ) @@ -7840,6 +7926,7 @@ def test_apply_parameters_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.apply_parameters( request, @@ -7851,6 +7938,7 @@ def test_apply_parameters_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_apply_software_update_rest_bad_request( @@ -7931,10 +8019,14 @@ def test_apply_software_update_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_apply_software_update" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, + "post_apply_software_update_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_apply_software_update" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.ApplySoftwareUpdateRequest.pb( cloud_memcache.ApplySoftwareUpdateRequest() ) @@ -7958,6 +8050,7 @@ def test_apply_software_update_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.apply_software_update( request, @@ -7969,6 +8062,7 @@ def test_apply_software_update_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reschedule_maintenance_rest_bad_request( @@ -8049,10 +8143,14 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.CloudMemcacheRestInterceptor, "post_reschedule_maintenance" ) as post, mock.patch.object( + transports.CloudMemcacheRestInterceptor, + "post_reschedule_maintenance_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.CloudMemcacheRestInterceptor, "pre_reschedule_maintenance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_memcache.RescheduleMaintenanceRequest.pb( cloud_memcache.RescheduleMaintenanceRequest() ) @@ -8076,6 +8174,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reschedule_maintenance( request, @@ -8087,6 +8186,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-memorystore/CHANGELOG.md b/packages/google-cloud-memorystore/CHANGELOG.md index 2f08cf67c551..44bde2c8f47c 100644 --- a/packages/google-cloud-memorystore/CHANGELOG.md +++ b/packages/google-cloud-memorystore/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-memorystore-v0.1.0...google-cloud-memorystore-v0.1.1) (2025-02-12) + + +### Features + +* add Instance.Mode.CLUSTER_DISABLED value, and deprecate STANDALONE ([3f8ab82](https://github.com/googleapis/google-cloud-python/commit/3f8ab82aa97dd47b79bcf52343e6764ff159e961)) +* Add REST Interceptors which support reading metadata ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) +* Add support for reading selective GAPIC generation methods from service YAML ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) + + +### Documentation + +* A comment for enum value `STANDALONE` in enum `Mode` is changed ([3f8ab82](https://github.com/googleapis/google-cloud-python/commit/3f8ab82aa97dd47b79bcf52343e6764ff159e961)) + ## 0.1.0 (2024-12-12) diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py index 33d37a7b677b..0c7cc68730c4 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py index 33d37a7b677b..0c7cc68730c4 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py index c7d248033f1c..5b4f420b1e21 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -571,6 +573,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1572,16 +1601,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1627,16 +1660,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1793,16 +1830,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1848,16 +1889,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py index 7c0e1bfe20dc..11c76b298c9b 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/services/memorystore/transports/rest.py @@ -143,12 +143,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: memorystore.DeleteInstanceRequest, @@ -168,12 +191,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_certificate_authority( self, request: memorystore.GetCertificateAuthorityRequest, @@ -194,12 +240,37 @@ def post_get_certificate_authority( ) -> memorystore.CertificateAuthority: """Post-rpc interceptor for get_certificate_authority - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_certificate_authority_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_get_certificate_authority` interceptor runs + before the `post_get_certificate_authority_with_metadata` interceptor. """ return response + def post_get_certificate_authority_with_metadata( + self, + response: memorystore.CertificateAuthority, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.CertificateAuthority, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_certificate_authority + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_certificate_authority_with_metadata` + interceptor in new development instead of the `post_get_certificate_authority` interceptor. + When both interceptors are used, this `post_get_certificate_authority_with_metadata` interceptor runs after the + `post_get_certificate_authority` interceptor. The (possibly modified) response returned by + `post_get_certificate_authority` will be passed to + `post_get_certificate_authority_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: memorystore.GetInstanceRequest, @@ -215,12 +286,35 @@ def pre_get_instance( def post_get_instance(self, response: memorystore.Instance) -> memorystore.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: memorystore.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[memorystore.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: memorystore.ListInstancesRequest, @@ -240,12 +334,37 @@ def post_list_instances( ) -> memorystore.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: memorystore.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: memorystore.UpdateInstanceRequest, @@ -265,12 +384,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -681,6 +823,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -822,6 +968,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -966,6 +1116,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_certificate_authority(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_certificate_authority_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1112,6 +1266,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1254,6 +1412,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1403,6 +1565,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py index 840f7254c853..e63935186106 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py @@ -244,13 +244,16 @@ class Mode(proto.Enum): MODE_UNSPECIFIED (0): Mode is not specified. STANDALONE (1): - Instance is in standalone mode. + Deprecated: Use CLUSTER_DISABLED instead. CLUSTER (2): Instance is in cluster mode. + CLUSTER_DISABLED (4): + Cluster mode is disabled for the instance. """ MODE_UNSPECIFIED = 0 STANDALONE = 1 CLUSTER = 2 + CLUSTER_DISABLED = 4 class StateInfo(proto.Message): r"""Additional information about the state of the instance. diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py index 33d37a7b677b..0c7cc68730c4 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py index 61d182c33ad8..319e49c132f2 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -571,6 +573,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1572,16 +1601,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1627,16 +1660,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -1793,16 +1830,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -1848,16 +1889,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py index 4adb50bc5686..dcc5303a8172 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/services/memorystore/transports/rest.py @@ -143,12 +143,35 @@ def post_create_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance( self, request: memorystore.DeleteInstanceRequest, @@ -168,12 +191,35 @@ def post_delete_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_get_certificate_authority( self, request: memorystore.GetCertificateAuthorityRequest, @@ -194,12 +240,37 @@ def post_get_certificate_authority( ) -> memorystore.CertificateAuthority: """Post-rpc interceptor for get_certificate_authority - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_certificate_authority_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_get_certificate_authority` interceptor runs + before the `post_get_certificate_authority_with_metadata` interceptor. """ return response + def post_get_certificate_authority_with_metadata( + self, + response: memorystore.CertificateAuthority, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.CertificateAuthority, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_certificate_authority + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_certificate_authority_with_metadata` + interceptor in new development instead of the `post_get_certificate_authority` interceptor. + When both interceptors are used, this `post_get_certificate_authority_with_metadata` interceptor runs after the + `post_get_certificate_authority` interceptor. The (possibly modified) response returned by + `post_get_certificate_authority` will be passed to + `post_get_certificate_authority_with_metadata`. + """ + return response, metadata + def pre_get_instance( self, request: memorystore.GetInstanceRequest, @@ -215,12 +286,35 @@ def pre_get_instance( def post_get_instance(self, response: memorystore.Instance) -> memorystore.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata( + self, + response: memorystore.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[memorystore.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_list_instances( self, request: memorystore.ListInstancesRequest, @@ -240,12 +334,37 @@ def post_list_instances( ) -> memorystore.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata( + self, + response: memorystore.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + memorystore.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_update_instance( self, request: memorystore.UpdateInstanceRequest, @@ -265,12 +384,35 @@ def post_update_instance( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Memorystore server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Memorystore server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -681,6 +823,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -822,6 +968,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -966,6 +1116,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_certificate_authority(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_certificate_authority_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1112,6 +1266,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1254,6 +1412,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -1403,6 +1565,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py index 4cfef649dfae..49b0dd994ffc 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py @@ -244,13 +244,16 @@ class Mode(proto.Enum): MODE_UNSPECIFIED (0): Mode is not specified. STANDALONE (1): - Instance is in standalone mode. + Deprecated: Use CLUSTER_DISABLED instead. CLUSTER (2): Instance is in cluster mode. + CLUSTER_DISABLED (4): + Cluster mode is disabled for the instance. """ MODE_UNSPECIFIED = 0 STANDALONE = 1 CLUSTER = 2 + CLUSTER_DISABLED = 4 class StateInfo(proto.Message): r"""Additional information about the state of the instance. diff --git a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json index efdd1702a8b0..7d539a3bb992 100644 --- a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json +++ b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memorystore", - "version": "0.1.0" + "version": "0.1.1" }, "snippets": [ { diff --git a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json index b63d6de8440f..768b6745192f 100644 --- a/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json +++ b/packages/google-cloud-memorystore/samples/generated_samples/snippet_metadata_google.cloud.memorystore.v1beta.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-memorystore", - "version": "0.1.0" + "version": "0.1.1" }, "snippets": [ { diff --git a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py index 186eb816f37f..299cd635cd07 100644 --- a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1/test_memorystore.py @@ -73,6 +73,13 @@ ) from google.cloud.memorystore_v1.types import memorystore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -307,6 +314,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MemorystoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MemorystoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2258,10 +2308,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MemorystoreRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.ListInstancesRequest.pb( memorystore.ListInstancesRequest() ) @@ -2287,6 +2340,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = memorystore.ListInstancesResponse() + post_with_metadata.return_value = memorystore.ListInstancesResponse(), metadata client.list_instances( request, @@ -2298,6 +2352,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): @@ -2406,10 +2461,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MemorystoreRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -2431,6 +2489,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = memorystore.Instance() + post_with_metadata.return_value = memorystore.Instance(), metadata client.get_instance( request, @@ -2442,6 +2501,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request( @@ -2651,10 +2711,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MemorystoreRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.CreateInstanceRequest.pb( memorystore.CreateInstanceRequest() ) @@ -2678,6 +2741,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -2689,6 +2753,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -2902,10 +2967,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MemorystoreRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.UpdateInstanceRequest.pb( memorystore.UpdateInstanceRequest() ) @@ -2929,6 +2997,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -2940,6 +3009,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -3020,10 +3090,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MemorystoreRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.DeleteInstanceRequest.pb( memorystore.DeleteInstanceRequest() ) @@ -3047,6 +3120,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -3058,6 +3132,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_certificate_authority_rest_bad_request( @@ -3142,10 +3217,14 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MemorystoreRestInterceptor, "post_get_certificate_authority" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_get_certificate_authority_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.GetCertificateAuthorityRequest.pb( memorystore.GetCertificateAuthorityRequest() ) @@ -3171,6 +3250,7 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = memorystore.CertificateAuthority() + post_with_metadata.return_value = memorystore.CertificateAuthority(), metadata client.get_certificate_authority( request, @@ -3182,6 +3262,7 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py index aab87ed5d30b..080d6afdac26 100644 --- a/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py +++ b/packages/google-cloud-memorystore/tests/unit/gapic/memorystore_v1beta/test_memorystore.py @@ -73,6 +73,13 @@ ) from google.cloud.memorystore_v1beta.types import memorystore +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -307,6 +314,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MemorystoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MemorystoreClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -2262,10 +2312,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MemorystoreRestInterceptor, "post_list_instances" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_list_instances_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_list_instances" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.ListInstancesRequest.pb( memorystore.ListInstancesRequest() ) @@ -2291,6 +2344,7 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = memorystore.ListInstancesResponse() + post_with_metadata.return_value = memorystore.ListInstancesResponse(), metadata client.list_instances( request, @@ -2302,6 +2356,7 @@ def test_list_instances_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=memorystore.GetInstanceRequest): @@ -2410,10 +2465,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MemorystoreRestInterceptor, "post_get_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_get_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_get_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.GetInstanceRequest.pb(memorystore.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -2435,6 +2493,7 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = memorystore.Instance() + post_with_metadata.return_value = memorystore.Instance(), metadata client.get_instance( request, @@ -2446,6 +2505,7 @@ def test_get_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request( @@ -2655,10 +2715,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MemorystoreRestInterceptor, "post_create_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_create_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_create_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.CreateInstanceRequest.pb( memorystore.CreateInstanceRequest() ) @@ -2682,6 +2745,7 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance( request, @@ -2693,6 +2757,7 @@ def test_create_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request( @@ -2906,10 +2971,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MemorystoreRestInterceptor, "post_update_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_update_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_update_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.UpdateInstanceRequest.pb( memorystore.UpdateInstanceRequest() ) @@ -2933,6 +3001,7 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance( request, @@ -2944,6 +3013,7 @@ def test_update_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request( @@ -3024,10 +3094,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MemorystoreRestInterceptor, "post_delete_instance" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, "post_delete_instance_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_delete_instance" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.DeleteInstanceRequest.pb( memorystore.DeleteInstanceRequest() ) @@ -3051,6 +3124,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance( request, @@ -3062,6 +3136,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_certificate_authority_rest_bad_request( @@ -3146,10 +3221,14 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MemorystoreRestInterceptor, "post_get_certificate_authority" ) as post, mock.patch.object( + transports.MemorystoreRestInterceptor, + "post_get_certificate_authority_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MemorystoreRestInterceptor, "pre_get_certificate_authority" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = memorystore.GetCertificateAuthorityRequest.pb( memorystore.GetCertificateAuthorityRequest() ) @@ -3175,6 +3254,7 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = memorystore.CertificateAuthority() + post_with_metadata.return_value = memorystore.CertificateAuthority(), metadata client.get_certificate_authority( request, @@ -3186,6 +3266,7 @@ def test_get_certificate_authority_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/google-cloud-migrationcenter/CHANGELOG.md b/packages/google-cloud-migrationcenter/CHANGELOG.md index 96ad30f75600..f455533dad04 100644 --- a/packages/google-cloud-migrationcenter/CHANGELOG.md +++ b/packages/google-cloud-migrationcenter/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.1.13](https://github.com/googleapis/google-cloud-python/compare/google-cloud-migrationcenter-v0.1.12...google-cloud-migrationcenter-v0.1.13) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) +* Add support for reading selective GAPIC generation methods from service YAML ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) + ## [0.1.12](https://github.com/googleapis/google-cloud-python/compare/google-cloud-migrationcenter-v0.1.11...google-cloud-migrationcenter-v0.1.12) (2024-12-12) diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py index 17bbab4c1877..7daf9a1dd221 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.12" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py index 17bbab4c1877..7daf9a1dd221 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.12" # {x-release-please-version} +__version__ = "0.1.13" # {x-release-please-version} diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py index a8eec9a81222..7dcaa5aa7e1a 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -693,6 +695,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -6705,16 +6734,20 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -6760,16 +6793,20 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -6926,16 +6963,20 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -6981,16 +7022,20 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/transports/rest.py b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/transports/rest.py index 8b07267760d3..6e8828c2716e 100644 --- a/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/transports/rest.py +++ b/packages/google-cloud-migrationcenter/google/cloud/migrationcenter_v1/services/migration_center/transports/rest.py @@ -472,12 +472,35 @@ def post_add_assets_to_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for add_assets_to_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_add_assets_to_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_add_assets_to_group` interceptor runs + before the `post_add_assets_to_group_with_metadata` interceptor. """ return response + def post_add_assets_to_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for add_assets_to_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_add_assets_to_group_with_metadata` + interceptor in new development instead of the `post_add_assets_to_group` interceptor. + When both interceptors are used, this `post_add_assets_to_group_with_metadata` interceptor runs after the + `post_add_assets_to_group` interceptor. The (possibly modified) response returned by + `post_add_assets_to_group` will be passed to + `post_add_assets_to_group_with_metadata`. + """ + return response, metadata + def pre_aggregate_assets_values( self, request: migrationcenter.AggregateAssetsValuesRequest, @@ -498,12 +521,38 @@ def post_aggregate_assets_values( ) -> migrationcenter.AggregateAssetsValuesResponse: """Post-rpc interceptor for aggregate_assets_values - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_aggregate_assets_values_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_aggregate_assets_values` interceptor runs + before the `post_aggregate_assets_values_with_metadata` interceptor. """ return response + def post_aggregate_assets_values_with_metadata( + self, + response: migrationcenter.AggregateAssetsValuesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.AggregateAssetsValuesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregate_assets_values + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_aggregate_assets_values_with_metadata` + interceptor in new development instead of the `post_aggregate_assets_values` interceptor. + When both interceptors are used, this `post_aggregate_assets_values_with_metadata` interceptor runs after the + `post_aggregate_assets_values` interceptor. The (possibly modified) response returned by + `post_aggregate_assets_values` will be passed to + `post_aggregate_assets_values_with_metadata`. + """ + return response, metadata + def pre_batch_delete_assets( self, request: migrationcenter.BatchDeleteAssetsRequest, @@ -539,12 +588,38 @@ def post_batch_update_assets( ) -> migrationcenter.BatchUpdateAssetsResponse: """Post-rpc interceptor for batch_update_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_update_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_batch_update_assets` interceptor runs + before the `post_batch_update_assets_with_metadata` interceptor. """ return response + def post_batch_update_assets_with_metadata( + self, + response: migrationcenter.BatchUpdateAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.BatchUpdateAssetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for batch_update_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_batch_update_assets_with_metadata` + interceptor in new development instead of the `post_batch_update_assets` interceptor. + When both interceptors are used, this `post_batch_update_assets_with_metadata` interceptor runs after the + `post_batch_update_assets` interceptor. The (possibly modified) response returned by + `post_batch_update_assets` will be passed to + `post_batch_update_assets_with_metadata`. + """ + return response, metadata + def pre_create_group( self, request: migrationcenter.CreateGroupRequest, @@ -564,12 +639,35 @@ def post_create_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_group` interceptor runs + before the `post_create_group_with_metadata` interceptor. """ return response + def post_create_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_group_with_metadata` + interceptor in new development instead of the `post_create_group` interceptor. + When both interceptors are used, this `post_create_group_with_metadata` interceptor runs after the + `post_create_group` interceptor. The (possibly modified) response returned by + `post_create_group` will be passed to + `post_create_group_with_metadata`. + """ + return response, metadata + def pre_create_import_data_file( self, request: migrationcenter.CreateImportDataFileRequest, @@ -590,12 +688,35 @@ def post_create_import_data_file( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_import_data_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_import_data_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_import_data_file` interceptor runs + before the `post_create_import_data_file_with_metadata` interceptor. """ return response + def post_create_import_data_file_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_import_data_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_import_data_file_with_metadata` + interceptor in new development instead of the `post_create_import_data_file` interceptor. + When both interceptors are used, this `post_create_import_data_file_with_metadata` interceptor runs after the + `post_create_import_data_file` interceptor. The (possibly modified) response returned by + `post_create_import_data_file` will be passed to + `post_create_import_data_file_with_metadata`. + """ + return response, metadata + def pre_create_import_job( self, request: migrationcenter.CreateImportJobRequest, @@ -615,12 +736,35 @@ def post_create_import_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_import_job` interceptor runs + before the `post_create_import_job_with_metadata` interceptor. """ return response + def post_create_import_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_import_job_with_metadata` + interceptor in new development instead of the `post_create_import_job` interceptor. + When both interceptors are used, this `post_create_import_job_with_metadata` interceptor runs after the + `post_create_import_job` interceptor. The (possibly modified) response returned by + `post_create_import_job` will be passed to + `post_create_import_job_with_metadata`. + """ + return response, metadata + def pre_create_preference_set( self, request: migrationcenter.CreatePreferenceSetRequest, @@ -641,12 +785,35 @@ def post_create_preference_set( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_preference_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_preference_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_preference_set` interceptor runs + before the `post_create_preference_set_with_metadata` interceptor. """ return response + def post_create_preference_set_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_preference_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_preference_set_with_metadata` + interceptor in new development instead of the `post_create_preference_set` interceptor. + When both interceptors are used, this `post_create_preference_set_with_metadata` interceptor runs after the + `post_create_preference_set` interceptor. The (possibly modified) response returned by + `post_create_preference_set` will be passed to + `post_create_preference_set_with_metadata`. + """ + return response, metadata + def pre_create_report( self, request: migrationcenter.CreateReportRequest, @@ -666,12 +833,35 @@ def post_create_report( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_report` interceptor runs + before the `post_create_report_with_metadata` interceptor. """ return response + def post_create_report_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_report_with_metadata` + interceptor in new development instead of the `post_create_report` interceptor. + When both interceptors are used, this `post_create_report_with_metadata` interceptor runs after the + `post_create_report` interceptor. The (possibly modified) response returned by + `post_create_report` will be passed to + `post_create_report_with_metadata`. + """ + return response, metadata + def pre_create_report_config( self, request: migrationcenter.CreateReportConfigRequest, @@ -692,12 +882,35 @@ def post_create_report_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_report_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_report_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_report_config` interceptor runs + before the `post_create_report_config_with_metadata` interceptor. """ return response + def post_create_report_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_report_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_report_config_with_metadata` + interceptor in new development instead of the `post_create_report_config` interceptor. + When both interceptors are used, this `post_create_report_config_with_metadata` interceptor runs after the + `post_create_report_config` interceptor. The (possibly modified) response returned by + `post_create_report_config` will be passed to + `post_create_report_config_with_metadata`. + """ + return response, metadata + def pre_create_source( self, request: migrationcenter.CreateSourceRequest, @@ -717,12 +930,35 @@ def post_create_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for create_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_create_source` interceptor runs + before the `post_create_source_with_metadata` interceptor. """ return response + def post_create_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_create_source_with_metadata` + interceptor in new development instead of the `post_create_source` interceptor. + When both interceptors are used, this `post_create_source_with_metadata` interceptor runs after the + `post_create_source` interceptor. The (possibly modified) response returned by + `post_create_source` will be passed to + `post_create_source_with_metadata`. + """ + return response, metadata + def pre_delete_asset( self, request: migrationcenter.DeleteAssetRequest, @@ -756,12 +992,35 @@ def post_delete_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_group` interceptor runs + before the `post_delete_group_with_metadata` interceptor. """ return response + def post_delete_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_group_with_metadata` + interceptor in new development instead of the `post_delete_group` interceptor. + When both interceptors are used, this `post_delete_group_with_metadata` interceptor runs after the + `post_delete_group` interceptor. The (possibly modified) response returned by + `post_delete_group` will be passed to + `post_delete_group_with_metadata`. + """ + return response, metadata + def pre_delete_import_data_file( self, request: migrationcenter.DeleteImportDataFileRequest, @@ -782,12 +1041,35 @@ def post_delete_import_data_file( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_import_data_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_import_data_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_import_data_file` interceptor runs + before the `post_delete_import_data_file_with_metadata` interceptor. """ return response + def post_delete_import_data_file_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_import_data_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_import_data_file_with_metadata` + interceptor in new development instead of the `post_delete_import_data_file` interceptor. + When both interceptors are used, this `post_delete_import_data_file_with_metadata` interceptor runs after the + `post_delete_import_data_file` interceptor. The (possibly modified) response returned by + `post_delete_import_data_file` will be passed to + `post_delete_import_data_file_with_metadata`. + """ + return response, metadata + def pre_delete_import_job( self, request: migrationcenter.DeleteImportJobRequest, @@ -807,12 +1089,35 @@ def post_delete_import_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_import_job` interceptor runs + before the `post_delete_import_job_with_metadata` interceptor. """ return response + def post_delete_import_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_import_job_with_metadata` + interceptor in new development instead of the `post_delete_import_job` interceptor. + When both interceptors are used, this `post_delete_import_job_with_metadata` interceptor runs after the + `post_delete_import_job` interceptor. The (possibly modified) response returned by + `post_delete_import_job` will be passed to + `post_delete_import_job_with_metadata`. + """ + return response, metadata + def pre_delete_preference_set( self, request: migrationcenter.DeletePreferenceSetRequest, @@ -833,12 +1138,35 @@ def post_delete_preference_set( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_preference_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_preference_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_preference_set` interceptor runs + before the `post_delete_preference_set_with_metadata` interceptor. """ return response + def post_delete_preference_set_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_preference_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_preference_set_with_metadata` + interceptor in new development instead of the `post_delete_preference_set` interceptor. + When both interceptors are used, this `post_delete_preference_set_with_metadata` interceptor runs after the + `post_delete_preference_set` interceptor. The (possibly modified) response returned by + `post_delete_preference_set` will be passed to + `post_delete_preference_set_with_metadata`. + """ + return response, metadata + def pre_delete_report( self, request: migrationcenter.DeleteReportRequest, @@ -858,12 +1186,35 @@ def post_delete_report( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_report` interceptor runs + before the `post_delete_report_with_metadata` interceptor. """ return response + def post_delete_report_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_report_with_metadata` + interceptor in new development instead of the `post_delete_report` interceptor. + When both interceptors are used, this `post_delete_report_with_metadata` interceptor runs after the + `post_delete_report` interceptor. The (possibly modified) response returned by + `post_delete_report` will be passed to + `post_delete_report_with_metadata`. + """ + return response, metadata + def pre_delete_report_config( self, request: migrationcenter.DeleteReportConfigRequest, @@ -884,12 +1235,35 @@ def post_delete_report_config( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_report_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_report_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_report_config` interceptor runs + before the `post_delete_report_config_with_metadata` interceptor. """ return response + def post_delete_report_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_report_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_report_config_with_metadata` + interceptor in new development instead of the `post_delete_report_config` interceptor. + When both interceptors are used, this `post_delete_report_config_with_metadata` interceptor runs after the + `post_delete_report_config` interceptor. The (possibly modified) response returned by + `post_delete_report_config` will be passed to + `post_delete_report_config_with_metadata`. + """ + return response, metadata + def pre_delete_source( self, request: migrationcenter.DeleteSourceRequest, @@ -909,12 +1283,35 @@ def post_delete_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_delete_source` interceptor runs + before the `post_delete_source_with_metadata` interceptor. """ return response + def post_delete_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_delete_source_with_metadata` + interceptor in new development instead of the `post_delete_source` interceptor. + When both interceptors are used, this `post_delete_source_with_metadata` interceptor runs after the + `post_delete_source` interceptor. The (possibly modified) response returned by + `post_delete_source` will be passed to + `post_delete_source_with_metadata`. + """ + return response, metadata + def pre_get_asset( self, request: migrationcenter.GetAssetRequest, @@ -932,12 +1329,35 @@ def pre_get_asset( def post_get_asset(self, response: migrationcenter.Asset) -> migrationcenter.Asset: """Post-rpc interceptor for get_asset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_asset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_asset` interceptor runs + before the `post_get_asset_with_metadata` interceptor. """ return response + def post_get_asset_with_metadata( + self, + response: migrationcenter.Asset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.Asset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_asset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_asset_with_metadata` + interceptor in new development instead of the `post_get_asset` interceptor. + When both interceptors are used, this `post_get_asset_with_metadata` interceptor runs after the + `post_get_asset` interceptor. The (possibly modified) response returned by + `post_get_asset` will be passed to + `post_get_asset_with_metadata`. + """ + return response, metadata + def pre_get_error_frame( self, request: migrationcenter.GetErrorFrameRequest, @@ -957,12 +1377,35 @@ def post_get_error_frame( ) -> migrationcenter.ErrorFrame: """Post-rpc interceptor for get_error_frame - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_error_frame_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_error_frame` interceptor runs + before the `post_get_error_frame_with_metadata` interceptor. """ return response + def post_get_error_frame_with_metadata( + self, + response: migrationcenter.ErrorFrame, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.ErrorFrame, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_error_frame + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_error_frame_with_metadata` + interceptor in new development instead of the `post_get_error_frame` interceptor. + When both interceptors are used, this `post_get_error_frame_with_metadata` interceptor runs after the + `post_get_error_frame` interceptor. The (possibly modified) response returned by + `post_get_error_frame` will be passed to + `post_get_error_frame_with_metadata`. + """ + return response, metadata + def pre_get_group( self, request: migrationcenter.GetGroupRequest, @@ -980,12 +1423,35 @@ def pre_get_group( def post_get_group(self, response: migrationcenter.Group) -> migrationcenter.Group: """Post-rpc interceptor for get_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_group` interceptor runs + before the `post_get_group_with_metadata` interceptor. """ return response + def post_get_group_with_metadata( + self, + response: migrationcenter.Group, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.Group, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_group_with_metadata` + interceptor in new development instead of the `post_get_group` interceptor. + When both interceptors are used, this `post_get_group_with_metadata` interceptor runs after the + `post_get_group` interceptor. The (possibly modified) response returned by + `post_get_group` will be passed to + `post_get_group_with_metadata`. + """ + return response, metadata + def pre_get_import_data_file( self, request: migrationcenter.GetImportDataFileRequest, @@ -1006,12 +1472,35 @@ def post_get_import_data_file( ) -> migrationcenter.ImportDataFile: """Post-rpc interceptor for get_import_data_file - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_import_data_file_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_import_data_file` interceptor runs + before the `post_get_import_data_file_with_metadata` interceptor. """ return response + def post_get_import_data_file_with_metadata( + self, + response: migrationcenter.ImportDataFile, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.ImportDataFile, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_import_data_file + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_import_data_file_with_metadata` + interceptor in new development instead of the `post_get_import_data_file` interceptor. + When both interceptors are used, this `post_get_import_data_file_with_metadata` interceptor runs after the + `post_get_import_data_file` interceptor. The (possibly modified) response returned by + `post_get_import_data_file` will be passed to + `post_get_import_data_file_with_metadata`. + """ + return response, metadata + def pre_get_import_job( self, request: migrationcenter.GetImportJobRequest, @@ -1031,12 +1520,35 @@ def post_get_import_job( ) -> migrationcenter.ImportJob: """Post-rpc interceptor for get_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_import_job` interceptor runs + before the `post_get_import_job_with_metadata` interceptor. """ return response + def post_get_import_job_with_metadata( + self, + response: migrationcenter.ImportJob, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.ImportJob, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_import_job_with_metadata` + interceptor in new development instead of the `post_get_import_job` interceptor. + When both interceptors are used, this `post_get_import_job_with_metadata` interceptor runs after the + `post_get_import_job` interceptor. The (possibly modified) response returned by + `post_get_import_job` will be passed to + `post_get_import_job_with_metadata`. + """ + return response, metadata + def pre_get_preference_set( self, request: migrationcenter.GetPreferenceSetRequest, @@ -1056,12 +1568,35 @@ def post_get_preference_set( ) -> migrationcenter.PreferenceSet: """Post-rpc interceptor for get_preference_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_preference_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_preference_set` interceptor runs + before the `post_get_preference_set_with_metadata` interceptor. """ return response + def post_get_preference_set_with_metadata( + self, + response: migrationcenter.PreferenceSet, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.PreferenceSet, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_preference_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_preference_set_with_metadata` + interceptor in new development instead of the `post_get_preference_set` interceptor. + When both interceptors are used, this `post_get_preference_set_with_metadata` interceptor runs after the + `post_get_preference_set` interceptor. The (possibly modified) response returned by + `post_get_preference_set` will be passed to + `post_get_preference_set_with_metadata`. + """ + return response, metadata + def pre_get_report( self, request: migrationcenter.GetReportRequest, @@ -1081,12 +1616,35 @@ def post_get_report( ) -> migrationcenter.Report: """Post-rpc interceptor for get_report - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_report` interceptor runs + before the `post_get_report_with_metadata` interceptor. """ return response + def post_get_report_with_metadata( + self, + response: migrationcenter.Report, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.Report, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_report_with_metadata` + interceptor in new development instead of the `post_get_report` interceptor. + When both interceptors are used, this `post_get_report_with_metadata` interceptor runs after the + `post_get_report` interceptor. The (possibly modified) response returned by + `post_get_report` will be passed to + `post_get_report_with_metadata`. + """ + return response, metadata + def pre_get_report_config( self, request: migrationcenter.GetReportConfigRequest, @@ -1106,12 +1664,35 @@ def post_get_report_config( ) -> migrationcenter.ReportConfig: """Post-rpc interceptor for get_report_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_report_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_report_config` interceptor runs + before the `post_get_report_config_with_metadata` interceptor. """ return response + def post_get_report_config_with_metadata( + self, + response: migrationcenter.ReportConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.ReportConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_report_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_report_config_with_metadata` + interceptor in new development instead of the `post_get_report_config` interceptor. + When both interceptors are used, this `post_get_report_config_with_metadata` interceptor runs after the + `post_get_report_config` interceptor. The (possibly modified) response returned by + `post_get_report_config` will be passed to + `post_get_report_config_with_metadata`. + """ + return response, metadata + def pre_get_settings( self, request: migrationcenter.GetSettingsRequest, @@ -1131,12 +1712,35 @@ def post_get_settings( ) -> migrationcenter.Settings: """Post-rpc interceptor for get_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_settings` interceptor runs + before the `post_get_settings_with_metadata` interceptor. """ return response + def post_get_settings_with_metadata( + self, + response: migrationcenter.Settings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.Settings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_settings_with_metadata` + interceptor in new development instead of the `post_get_settings` interceptor. + When both interceptors are used, this `post_get_settings_with_metadata` interceptor runs after the + `post_get_settings` interceptor. The (possibly modified) response returned by + `post_get_settings` will be passed to + `post_get_settings_with_metadata`. + """ + return response, metadata + def pre_get_source( self, request: migrationcenter.GetSourceRequest, @@ -1156,12 +1760,35 @@ def post_get_source( ) -> migrationcenter.Source: """Post-rpc interceptor for get_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_get_source` interceptor runs + before the `post_get_source_with_metadata` interceptor. """ return response + def post_get_source_with_metadata( + self, + response: migrationcenter.Source, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.Source, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_get_source_with_metadata` + interceptor in new development instead of the `post_get_source` interceptor. + When both interceptors are used, this `post_get_source_with_metadata` interceptor runs after the + `post_get_source` interceptor. The (possibly modified) response returned by + `post_get_source` will be passed to + `post_get_source_with_metadata`. + """ + return response, metadata + def pre_list_assets( self, request: migrationcenter.ListAssetsRequest, @@ -1181,11 +1808,36 @@ def post_list_assets( ) -> migrationcenter.ListAssetsResponse: """Post-rpc interceptor for list_assets - Override in a subclass to manipulate the response - after it is returned by the MigrationCenter server but before - it is returned to user code. + DEPRECATED. Please use the `post_list_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MigrationCenter server but before + it is returned to user code. This `post_list_assets` interceptor runs + before the `post_list_assets_with_metadata` interceptor. + """ + return response + + def post_list_assets_with_metadata( + self, + response: migrationcenter.ListAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_assets_with_metadata` + interceptor in new development instead of the `post_list_assets` interceptor. + When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the + `post_list_assets` interceptor. The (possibly modified) response returned by + `post_list_assets` will be passed to + `post_list_assets_with_metadata`. """ - return response + return response, metadata def pre_list_error_frames( self, @@ -1206,12 +1858,37 @@ def post_list_error_frames( ) -> migrationcenter.ListErrorFramesResponse: """Post-rpc interceptor for list_error_frames - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_error_frames_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_error_frames` interceptor runs + before the `post_list_error_frames_with_metadata` interceptor. """ return response + def post_list_error_frames_with_metadata( + self, + response: migrationcenter.ListErrorFramesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListErrorFramesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_error_frames + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_error_frames_with_metadata` + interceptor in new development instead of the `post_list_error_frames` interceptor. + When both interceptors are used, this `post_list_error_frames_with_metadata` interceptor runs after the + `post_list_error_frames` interceptor. The (possibly modified) response returned by + `post_list_error_frames` will be passed to + `post_list_error_frames_with_metadata`. + """ + return response, metadata + def pre_list_groups( self, request: migrationcenter.ListGroupsRequest, @@ -1231,12 +1908,37 @@ def post_list_groups( ) -> migrationcenter.ListGroupsResponse: """Post-rpc interceptor for list_groups - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_groups` interceptor runs + before the `post_list_groups_with_metadata` interceptor. """ return response + def post_list_groups_with_metadata( + self, + response: migrationcenter.ListGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_groups_with_metadata` + interceptor in new development instead of the `post_list_groups` interceptor. + When both interceptors are used, this `post_list_groups_with_metadata` interceptor runs after the + `post_list_groups` interceptor. The (possibly modified) response returned by + `post_list_groups` will be passed to + `post_list_groups_with_metadata`. + """ + return response, metadata + def pre_list_import_data_files( self, request: migrationcenter.ListImportDataFilesRequest, @@ -1257,12 +1959,38 @@ def post_list_import_data_files( ) -> migrationcenter.ListImportDataFilesResponse: """Post-rpc interceptor for list_import_data_files - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_import_data_files_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_import_data_files` interceptor runs + before the `post_list_import_data_files_with_metadata` interceptor. """ return response + def post_list_import_data_files_with_metadata( + self, + response: migrationcenter.ListImportDataFilesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListImportDataFilesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_import_data_files + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_import_data_files_with_metadata` + interceptor in new development instead of the `post_list_import_data_files` interceptor. + When both interceptors are used, this `post_list_import_data_files_with_metadata` interceptor runs after the + `post_list_import_data_files` interceptor. The (possibly modified) response returned by + `post_list_import_data_files` will be passed to + `post_list_import_data_files_with_metadata`. + """ + return response, metadata + def pre_list_import_jobs( self, request: migrationcenter.ListImportJobsRequest, @@ -1282,12 +2010,37 @@ def post_list_import_jobs( ) -> migrationcenter.ListImportJobsResponse: """Post-rpc interceptor for list_import_jobs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_import_jobs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_import_jobs` interceptor runs + before the `post_list_import_jobs_with_metadata` interceptor. """ return response + def post_list_import_jobs_with_metadata( + self, + response: migrationcenter.ListImportJobsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListImportJobsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_import_jobs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_import_jobs_with_metadata` + interceptor in new development instead of the `post_list_import_jobs` interceptor. + When both interceptors are used, this `post_list_import_jobs_with_metadata` interceptor runs after the + `post_list_import_jobs` interceptor. The (possibly modified) response returned by + `post_list_import_jobs` will be passed to + `post_list_import_jobs_with_metadata`. + """ + return response, metadata + def pre_list_preference_sets( self, request: migrationcenter.ListPreferenceSetsRequest, @@ -1308,12 +2061,38 @@ def post_list_preference_sets( ) -> migrationcenter.ListPreferenceSetsResponse: """Post-rpc interceptor for list_preference_sets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_preference_sets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_preference_sets` interceptor runs + before the `post_list_preference_sets_with_metadata` interceptor. """ return response + def post_list_preference_sets_with_metadata( + self, + response: migrationcenter.ListPreferenceSetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListPreferenceSetsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_preference_sets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_preference_sets_with_metadata` + interceptor in new development instead of the `post_list_preference_sets` interceptor. + When both interceptors are used, this `post_list_preference_sets_with_metadata` interceptor runs after the + `post_list_preference_sets` interceptor. The (possibly modified) response returned by + `post_list_preference_sets` will be passed to + `post_list_preference_sets_with_metadata`. + """ + return response, metadata + def pre_list_report_configs( self, request: migrationcenter.ListReportConfigsRequest, @@ -1334,12 +2113,38 @@ def post_list_report_configs( ) -> migrationcenter.ListReportConfigsResponse: """Post-rpc interceptor for list_report_configs - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_report_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_report_configs` interceptor runs + before the `post_list_report_configs_with_metadata` interceptor. """ return response + def post_list_report_configs_with_metadata( + self, + response: migrationcenter.ListReportConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListReportConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_report_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_report_configs_with_metadata` + interceptor in new development instead of the `post_list_report_configs` interceptor. + When both interceptors are used, this `post_list_report_configs_with_metadata` interceptor runs after the + `post_list_report_configs` interceptor. The (possibly modified) response returned by + `post_list_report_configs` will be passed to + `post_list_report_configs_with_metadata`. + """ + return response, metadata + def pre_list_reports( self, request: migrationcenter.ListReportsRequest, @@ -1359,12 +2164,37 @@ def post_list_reports( ) -> migrationcenter.ListReportsResponse: """Post-rpc interceptor for list_reports - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_reports_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_reports` interceptor runs + before the `post_list_reports_with_metadata` interceptor. """ return response + def post_list_reports_with_metadata( + self, + response: migrationcenter.ListReportsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListReportsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_reports + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_reports_with_metadata` + interceptor in new development instead of the `post_list_reports` interceptor. + When both interceptors are used, this `post_list_reports_with_metadata` interceptor runs after the + `post_list_reports` interceptor. The (possibly modified) response returned by + `post_list_reports` will be passed to + `post_list_reports_with_metadata`. + """ + return response, metadata + def pre_list_sources( self, request: migrationcenter.ListSourcesRequest, @@ -1384,12 +2214,37 @@ def post_list_sources( ) -> migrationcenter.ListSourcesResponse: """Post-rpc interceptor for list_sources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_sources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_list_sources` interceptor runs + before the `post_list_sources_with_metadata` interceptor. """ return response + def post_list_sources_with_metadata( + self, + response: migrationcenter.ListSourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ListSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_sources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_list_sources_with_metadata` + interceptor in new development instead of the `post_list_sources` interceptor. + When both interceptors are used, this `post_list_sources_with_metadata` interceptor runs after the + `post_list_sources` interceptor. The (possibly modified) response returned by + `post_list_sources` will be passed to + `post_list_sources_with_metadata`. + """ + return response, metadata + def pre_remove_assets_from_group( self, request: migrationcenter.RemoveAssetsFromGroupRequest, @@ -1410,12 +2265,35 @@ def post_remove_assets_from_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for remove_assets_from_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_remove_assets_from_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_remove_assets_from_group` interceptor runs + before the `post_remove_assets_from_group_with_metadata` interceptor. """ return response + def post_remove_assets_from_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for remove_assets_from_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_remove_assets_from_group_with_metadata` + interceptor in new development instead of the `post_remove_assets_from_group` interceptor. + When both interceptors are used, this `post_remove_assets_from_group_with_metadata` interceptor runs after the + `post_remove_assets_from_group` interceptor. The (possibly modified) response returned by + `post_remove_assets_from_group` will be passed to + `post_remove_assets_from_group_with_metadata`. + """ + return response, metadata + def pre_report_asset_frames( self, request: migrationcenter.ReportAssetFramesRequest, @@ -1436,12 +2314,38 @@ def post_report_asset_frames( ) -> migrationcenter.ReportAssetFramesResponse: """Post-rpc interceptor for report_asset_frames - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_report_asset_frames_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_report_asset_frames` interceptor runs + before the `post_report_asset_frames_with_metadata` interceptor. """ return response + def post_report_asset_frames_with_metadata( + self, + response: migrationcenter.ReportAssetFramesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + migrationcenter.ReportAssetFramesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for report_asset_frames + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_report_asset_frames_with_metadata` + interceptor in new development instead of the `post_report_asset_frames` interceptor. + When both interceptors are used, this `post_report_asset_frames_with_metadata` interceptor runs after the + `post_report_asset_frames` interceptor. The (possibly modified) response returned by + `post_report_asset_frames` will be passed to + `post_report_asset_frames_with_metadata`. + """ + return response, metadata + def pre_run_import_job( self, request: migrationcenter.RunImportJobRequest, @@ -1461,12 +2365,35 @@ def post_run_import_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for run_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_run_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_run_import_job` interceptor runs + before the `post_run_import_job_with_metadata` interceptor. """ return response + def post_run_import_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for run_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_run_import_job_with_metadata` + interceptor in new development instead of the `post_run_import_job` interceptor. + When both interceptors are used, this `post_run_import_job_with_metadata` interceptor runs after the + `post_run_import_job` interceptor. The (possibly modified) response returned by + `post_run_import_job` will be passed to + `post_run_import_job_with_metadata`. + """ + return response, metadata + def pre_update_asset( self, request: migrationcenter.UpdateAssetRequest, @@ -1486,12 +2413,35 @@ def post_update_asset( ) -> migrationcenter.Asset: """Post-rpc interceptor for update_asset - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_asset_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_update_asset` interceptor runs + before the `post_update_asset_with_metadata` interceptor. """ return response + def post_update_asset_with_metadata( + self, + response: migrationcenter.Asset, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[migrationcenter.Asset, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_asset + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_update_asset_with_metadata` + interceptor in new development instead of the `post_update_asset` interceptor. + When both interceptors are used, this `post_update_asset_with_metadata` interceptor runs after the + `post_update_asset` interceptor. The (possibly modified) response returned by + `post_update_asset` will be passed to + `post_update_asset_with_metadata`. + """ + return response, metadata + def pre_update_group( self, request: migrationcenter.UpdateGroupRequest, @@ -1511,12 +2461,35 @@ def post_update_group( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_update_group` interceptor runs + before the `post_update_group_with_metadata` interceptor. """ return response + def post_update_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_update_group_with_metadata` + interceptor in new development instead of the `post_update_group` interceptor. + When both interceptors are used, this `post_update_group_with_metadata` interceptor runs after the + `post_update_group` interceptor. The (possibly modified) response returned by + `post_update_group` will be passed to + `post_update_group_with_metadata`. + """ + return response, metadata + def pre_update_import_job( self, request: migrationcenter.UpdateImportJobRequest, @@ -1536,12 +2509,35 @@ def post_update_import_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_update_import_job` interceptor runs + before the `post_update_import_job_with_metadata` interceptor. """ return response + def post_update_import_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_update_import_job_with_metadata` + interceptor in new development instead of the `post_update_import_job` interceptor. + When both interceptors are used, this `post_update_import_job_with_metadata` interceptor runs after the + `post_update_import_job` interceptor. The (possibly modified) response returned by + `post_update_import_job` will be passed to + `post_update_import_job_with_metadata`. + """ + return response, metadata + def pre_update_preference_set( self, request: migrationcenter.UpdatePreferenceSetRequest, @@ -1562,12 +2558,35 @@ def post_update_preference_set( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_preference_set - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_preference_set_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_update_preference_set` interceptor runs + before the `post_update_preference_set_with_metadata` interceptor. """ return response + def post_update_preference_set_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_preference_set + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_update_preference_set_with_metadata` + interceptor in new development instead of the `post_update_preference_set` interceptor. + When both interceptors are used, this `post_update_preference_set_with_metadata` interceptor runs after the + `post_update_preference_set` interceptor. The (possibly modified) response returned by + `post_update_preference_set` will be passed to + `post_update_preference_set_with_metadata`. + """ + return response, metadata + def pre_update_settings( self, request: migrationcenter.UpdateSettingsRequest, @@ -1587,12 +2606,35 @@ def post_update_settings( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_settings - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_update_settings` interceptor runs + before the `post_update_settings_with_metadata` interceptor. """ return response + def post_update_settings_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_update_settings_with_metadata` + interceptor in new development instead of the `post_update_settings` interceptor. + When both interceptors are used, this `post_update_settings_with_metadata` interceptor runs after the + `post_update_settings` interceptor. The (possibly modified) response returned by + `post_update_settings` will be passed to + `post_update_settings_with_metadata`. + """ + return response, metadata + def pre_update_source( self, request: migrationcenter.UpdateSourceRequest, @@ -1612,12 +2654,35 @@ def post_update_source( ) -> operations_pb2.Operation: """Post-rpc interceptor for update_source - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_update_source` interceptor runs + before the `post_update_source_with_metadata` interceptor. """ return response + def post_update_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_update_source_with_metadata` + interceptor in new development instead of the `post_update_source` interceptor. + When both interceptors are used, this `post_update_source_with_metadata` interceptor runs after the + `post_update_source` interceptor. The (possibly modified) response returned by + `post_update_source` will be passed to + `post_update_source_with_metadata`. + """ + return response, metadata + def pre_validate_import_job( self, request: migrationcenter.ValidateImportJobRequest, @@ -1638,12 +2703,35 @@ def post_validate_import_job( ) -> operations_pb2.Operation: """Post-rpc interceptor for validate_import_job - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_import_job_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the MigrationCenter server but before - it is returned to user code. + it is returned to user code. This `post_validate_import_job` interceptor runs + before the `post_validate_import_job_with_metadata` interceptor. """ return response + def post_validate_import_job_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_import_job + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MigrationCenter server but before it is returned to user code. + + We recommend only using this `post_validate_import_job_with_metadata` + interceptor in new development instead of the `post_validate_import_job` interceptor. + When both interceptors are used, this `post_validate_import_job_with_metadata` interceptor runs after the + `post_validate_import_job` interceptor. The (possibly modified) response returned by + `post_validate_import_job` will be passed to + `post_validate_import_job_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -2057,6 +3145,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_add_assets_to_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_add_assets_to_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2211,6 +3303,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_aggregate_assets_values(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregate_assets_values_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2479,6 +3575,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_update_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_update_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2628,6 +3728,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2778,6 +3882,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_import_data_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_import_data_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -2927,6 +4035,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3077,6 +4189,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_preference_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_preference_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3224,6 +4340,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3374,6 +4494,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_report_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_report_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3521,6 +4645,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3768,6 +4896,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -3912,6 +5044,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_import_data_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_import_data_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4055,6 +5191,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4199,6 +5339,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_preference_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_preference_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4340,6 +5484,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4484,6 +5632,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_report_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_report_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4625,6 +5777,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4772,6 +5928,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_asset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_asset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -4914,6 +6074,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_error_frame(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_error_frame_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5064,6 +6228,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5209,6 +6377,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_import_data_file(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_import_data_file_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5352,6 +6524,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5496,6 +6672,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_preference_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_preference_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5640,6 +6820,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_report_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5784,6 +6968,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_report_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_report_config_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -5926,6 +7114,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6071,6 +7263,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6212,6 +7408,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6357,6 +7557,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_error_frames(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_error_frames_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6499,6 +7703,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_groups_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6647,6 +7855,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_import_data_files(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_import_data_files_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6791,6 +8003,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_import_jobs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_import_jobs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -6938,6 +8154,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_preference_sets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_preference_sets_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7085,6 +8305,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_report_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_report_configs_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7227,6 +8451,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_reports(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_reports_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7369,6 +8597,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_sources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_sources_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7524,6 +8756,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_remove_assets_from_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_remove_assets_from_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7674,6 +8910,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_report_asset_frames(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_report_asset_frames_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7823,6 +9063,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_run_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_run_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -7972,6 +9216,10 @@ def __call__( json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_asset(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_asset_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8119,6 +9367,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_group_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8268,6 +9520,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8418,6 +9674,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_preference_set(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_preference_set_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8565,6 +9825,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_settings_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8712,6 +9976,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_source_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER @@ -8862,6 +10130,10 @@ def __call__( json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_validate_import_job(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_import_job_with_metadata( + resp, response_metadata + ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER diff --git a/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json b/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json index bd4d87aa6973..b4c992410014 100644 --- a/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json +++ b/packages/google-cloud-migrationcenter/samples/generated_samples/snippet_metadata_google.cloud.migrationcenter.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-migrationcenter", - "version": "0.1.12" + "version": "0.1.13" }, "snippets": [ { diff --git a/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py b/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py index c0b2f35815fc..c257e542d230 100644 --- a/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py +++ b/packages/google-cloud-migrationcenter/tests/unit/gapic/migrationcenter_v1/test_migration_center.py @@ -76,6 +76,13 @@ ) from google.cloud.migrationcenter_v1.types import migrationcenter +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER @@ -334,6 +341,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MigrationCenterClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MigrationCenterClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -31579,10 +31629,13 @@ def test_list_assets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_assets" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_list_assets_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListAssetsRequest.pb( migrationcenter.ListAssetsRequest() ) @@ -31608,6 +31661,7 @@ def test_list_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListAssetsResponse() + post_with_metadata.return_value = migrationcenter.ListAssetsResponse(), metadata client.list_assets( request, @@ -31619,6 +31673,7 @@ def test_list_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_asset_rest_bad_request(request_type=migrationcenter.GetAssetRequest): @@ -31705,10 +31760,13 @@ def test_get_asset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_asset" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_asset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_asset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetAssetRequest.pb( migrationcenter.GetAssetRequest() ) @@ -31732,6 +31790,7 @@ def test_get_asset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.Asset() + post_with_metadata.return_value = migrationcenter.Asset(), metadata client.get_asset( request, @@ -31743,6 +31802,7 @@ def test_get_asset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_asset_rest_bad_request(request_type=migrationcenter.UpdateAssetRequest): @@ -32166,10 +32226,13 @@ def test_update_asset_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_update_asset" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_update_asset_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_update_asset" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.UpdateAssetRequest.pb( migrationcenter.UpdateAssetRequest() ) @@ -32193,6 +32256,7 @@ def test_update_asset_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.Asset() + post_with_metadata.return_value = migrationcenter.Asset(), metadata client.update_asset( request, @@ -32204,6 +32268,7 @@ def test_update_asset_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_update_assets_rest_bad_request( @@ -32285,10 +32350,14 @@ def test_batch_update_assets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_batch_update_assets" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_batch_update_assets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_batch_update_assets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.BatchUpdateAssetsRequest.pb( migrationcenter.BatchUpdateAssetsRequest() ) @@ -32314,6 +32383,10 @@ def test_batch_update_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.BatchUpdateAssetsResponse() + post_with_metadata.return_value = ( + migrationcenter.BatchUpdateAssetsResponse(), + metadata, + ) client.batch_update_assets( request, @@ -32325,6 +32398,7 @@ def test_batch_update_assets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_asset_rest_bad_request(request_type=migrationcenter.DeleteAssetRequest): @@ -32922,10 +32996,14 @@ def test_report_asset_frames_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_report_asset_frames" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_report_asset_frames_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_report_asset_frames" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ReportAssetFramesRequest.pb( migrationcenter.ReportAssetFramesRequest() ) @@ -32951,6 +33029,10 @@ def test_report_asset_frames_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ReportAssetFramesResponse() + post_with_metadata.return_value = ( + migrationcenter.ReportAssetFramesResponse(), + metadata, + ) client.report_asset_frames( request, @@ -32962,6 +33044,7 @@ def test_report_asset_frames_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_aggregate_assets_values_rest_bad_request( @@ -33043,10 +33126,14 @@ def test_aggregate_assets_values_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_aggregate_assets_values" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_aggregate_assets_values_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_aggregate_assets_values" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.AggregateAssetsValuesRequest.pb( migrationcenter.AggregateAssetsValuesRequest() ) @@ -33072,6 +33159,10 @@ def test_aggregate_assets_values_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.AggregateAssetsValuesResponse() + post_with_metadata.return_value = ( + migrationcenter.AggregateAssetsValuesResponse(), + metadata, + ) client.aggregate_assets_values( request, @@ -33083,6 +33174,7 @@ def test_aggregate_assets_values_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_import_job_rest_bad_request( @@ -33265,10 +33357,14 @@ def test_create_import_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_import_job" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_create_import_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreateImportJobRequest.pb( migrationcenter.CreateImportJobRequest() ) @@ -33292,6 +33388,7 @@ def test_create_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_import_job( request, @@ -33303,6 +33400,7 @@ def test_create_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_import_jobs_rest_bad_request( @@ -33389,10 +33487,13 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_import_jobs" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_list_import_jobs_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_import_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListImportJobsRequest.pb( migrationcenter.ListImportJobsRequest() ) @@ -33418,6 +33519,10 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListImportJobsResponse() + post_with_metadata.return_value = ( + migrationcenter.ListImportJobsResponse(), + metadata, + ) client.list_import_jobs( request, @@ -33429,6 +33534,7 @@ def test_list_import_jobs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_import_job_rest_bad_request( @@ -33522,10 +33628,13 @@ def test_get_import_job_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_import_job" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_import_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetImportJobRequest.pb( migrationcenter.GetImportJobRequest() ) @@ -33549,6 +33658,7 @@ def test_get_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ImportJob() + post_with_metadata.return_value = migrationcenter.ImportJob(), metadata client.get_import_job( request, @@ -33560,6 +33670,7 @@ def test_get_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_import_job_rest_bad_request( @@ -33640,10 +33751,14 @@ def test_delete_import_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_import_job" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_delete_import_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeleteImportJobRequest.pb( migrationcenter.DeleteImportJobRequest() ) @@ -33667,6 +33782,7 @@ def test_delete_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_import_job( request, @@ -33678,6 +33794,7 @@ def test_delete_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_import_job_rest_bad_request( @@ -33864,10 +33981,14 @@ def test_update_import_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_update_import_job" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_update_import_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_update_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.UpdateImportJobRequest.pb( migrationcenter.UpdateImportJobRequest() ) @@ -33891,6 +34012,7 @@ def test_update_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_import_job( request, @@ -33902,6 +34024,7 @@ def test_update_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_import_job_rest_bad_request( @@ -33982,10 +34105,14 @@ def test_validate_import_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_validate_import_job" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_validate_import_job_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_validate_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ValidateImportJobRequest.pb( migrationcenter.ValidateImportJobRequest() ) @@ -34009,6 +34136,7 @@ def test_validate_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.validate_import_job( request, @@ -34020,6 +34148,7 @@ def test_validate_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_run_import_job_rest_bad_request( @@ -34100,10 +34229,13 @@ def test_run_import_job_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_run_import_job" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_run_import_job_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_run_import_job" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.RunImportJobRequest.pb( migrationcenter.RunImportJobRequest() ) @@ -34127,6 +34259,7 @@ def test_run_import_job_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.run_import_job( request, @@ -34138,6 +34271,7 @@ def test_run_import_job_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_import_data_file_rest_bad_request( @@ -34235,10 +34369,14 @@ def test_get_import_data_file_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_import_data_file" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_get_import_data_file_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_import_data_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetImportDataFileRequest.pb( migrationcenter.GetImportDataFileRequest() ) @@ -34264,6 +34402,7 @@ def test_get_import_data_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ImportDataFile() + post_with_metadata.return_value = migrationcenter.ImportDataFile(), metadata client.get_import_data_file( request, @@ -34275,6 +34414,7 @@ def test_get_import_data_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_import_data_files_rest_bad_request( @@ -34361,10 +34501,14 @@ def test_list_import_data_files_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_import_data_files" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_list_import_data_files_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_import_data_files" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListImportDataFilesRequest.pb( migrationcenter.ListImportDataFilesRequest() ) @@ -34390,6 +34534,10 @@ def test_list_import_data_files_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListImportDataFilesResponse() + post_with_metadata.return_value = ( + migrationcenter.ListImportDataFilesResponse(), + metadata, + ) client.list_import_data_files( request, @@ -34401,6 +34549,7 @@ def test_list_import_data_files_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_import_data_file_rest_bad_request( @@ -34562,10 +34711,14 @@ def test_create_import_data_file_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_import_data_file" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_create_import_data_file_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_import_data_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreateImportDataFileRequest.pb( migrationcenter.CreateImportDataFileRequest() ) @@ -34589,6 +34742,7 @@ def test_create_import_data_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_import_data_file( request, @@ -34600,6 +34754,7 @@ def test_create_import_data_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_import_data_file_rest_bad_request( @@ -34684,10 +34839,14 @@ def test_delete_import_data_file_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_import_data_file" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_delete_import_data_file_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_import_data_file" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeleteImportDataFileRequest.pb( migrationcenter.DeleteImportDataFileRequest() ) @@ -34711,6 +34870,7 @@ def test_delete_import_data_file_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_import_data_file( request, @@ -34722,6 +34882,7 @@ def test_delete_import_data_file_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_groups_rest_bad_request(request_type=migrationcenter.ListGroupsRequest): @@ -34806,10 +34967,13 @@ def test_list_groups_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_groups" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_list_groups_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_groups" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListGroupsRequest.pb( migrationcenter.ListGroupsRequest() ) @@ -34835,6 +34999,7 @@ def test_list_groups_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListGroupsResponse() + post_with_metadata.return_value = migrationcenter.ListGroupsResponse(), metadata client.list_groups( request, @@ -34846,6 +35011,7 @@ def test_list_groups_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_group_rest_bad_request(request_type=migrationcenter.GetGroupRequest): @@ -34932,10 +35098,13 @@ def test_get_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_group" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_group_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetGroupRequest.pb( migrationcenter.GetGroupRequest() ) @@ -34959,6 +35128,7 @@ def test_get_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.Group() + post_with_metadata.return_value = migrationcenter.Group(), metadata client.get_group( request, @@ -34970,6 +35140,7 @@ def test_get_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_group_rest_bad_request(request_type=migrationcenter.CreateGroupRequest): @@ -35123,10 +35294,13 @@ def test_create_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_group" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_create_group_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreateGroupRequest.pb( migrationcenter.CreateGroupRequest() ) @@ -35150,6 +35324,7 @@ def test_create_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_group( request, @@ -35161,6 +35336,7 @@ def test_create_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_group_rest_bad_request(request_type=migrationcenter.UpdateGroupRequest): @@ -35318,10 +35494,13 @@ def test_update_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_update_group" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_update_group_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_update_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.UpdateGroupRequest.pb( migrationcenter.UpdateGroupRequest() ) @@ -35345,6 +35524,7 @@ def test_update_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_group( request, @@ -35356,6 +35536,7 @@ def test_update_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_group_rest_bad_request(request_type=migrationcenter.DeleteGroupRequest): @@ -35434,10 +35615,13 @@ def test_delete_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_group" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_delete_group_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeleteGroupRequest.pb( migrationcenter.DeleteGroupRequest() ) @@ -35461,6 +35645,7 @@ def test_delete_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_group( request, @@ -35472,6 +35657,7 @@ def test_delete_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_add_assets_to_group_rest_bad_request( @@ -35552,10 +35738,14 @@ def test_add_assets_to_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_add_assets_to_group" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_add_assets_to_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_add_assets_to_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.AddAssetsToGroupRequest.pb( migrationcenter.AddAssetsToGroupRequest() ) @@ -35579,6 +35769,7 @@ def test_add_assets_to_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.add_assets_to_group( request, @@ -35590,6 +35781,7 @@ def test_add_assets_to_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_remove_assets_from_group_rest_bad_request( @@ -35670,10 +35862,14 @@ def test_remove_assets_from_group_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_remove_assets_from_group" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_remove_assets_from_group_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_remove_assets_from_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.RemoveAssetsFromGroupRequest.pb( migrationcenter.RemoveAssetsFromGroupRequest() ) @@ -35697,6 +35893,7 @@ def test_remove_assets_from_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.remove_assets_from_group( request, @@ -35708,6 +35905,7 @@ def test_remove_assets_from_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_error_frames_rest_bad_request( @@ -35794,10 +35992,14 @@ def test_list_error_frames_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_error_frames" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_list_error_frames_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_error_frames" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListErrorFramesRequest.pb( migrationcenter.ListErrorFramesRequest() ) @@ -35823,6 +36025,10 @@ def test_list_error_frames_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListErrorFramesResponse() + post_with_metadata.return_value = ( + migrationcenter.ListErrorFramesResponse(), + metadata, + ) client.list_error_frames( request, @@ -35834,6 +36040,7 @@ def test_list_error_frames_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_error_frame_rest_bad_request( @@ -35922,10 +36129,13 @@ def test_get_error_frame_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_error_frame" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_error_frame_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_error_frame" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetErrorFrameRequest.pb( migrationcenter.GetErrorFrameRequest() ) @@ -35949,6 +36159,7 @@ def test_get_error_frame_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ErrorFrame() + post_with_metadata.return_value = migrationcenter.ErrorFrame(), metadata client.get_error_frame( request, @@ -35960,6 +36171,7 @@ def test_get_error_frame_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_sources_rest_bad_request(request_type=migrationcenter.ListSourcesRequest): @@ -36044,10 +36256,13 @@ def test_list_sources_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_sources" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_list_sources_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_sources" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListSourcesRequest.pb( migrationcenter.ListSourcesRequest() ) @@ -36073,6 +36288,10 @@ def test_list_sources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListSourcesResponse() + post_with_metadata.return_value = ( + migrationcenter.ListSourcesResponse(), + metadata, + ) client.list_sources( request, @@ -36084,6 +36303,7 @@ def test_list_sources_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_source_rest_bad_request(request_type=migrationcenter.GetSourceRequest): @@ -36182,10 +36402,13 @@ def test_get_source_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_source" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetSourceRequest.pb( migrationcenter.GetSourceRequest() ) @@ -36209,6 +36432,7 @@ def test_get_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.Source() + post_with_metadata.return_value = migrationcenter.Source(), metadata client.get_source( request, @@ -36220,6 +36444,7 @@ def test_get_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_source_rest_bad_request( @@ -36380,10 +36605,13 @@ def test_create_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_source" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_create_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreateSourceRequest.pb( migrationcenter.CreateSourceRequest() ) @@ -36407,6 +36635,7 @@ def test_create_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_source( request, @@ -36418,6 +36647,7 @@ def test_create_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_source_rest_bad_request( @@ -36582,10 +36812,13 @@ def test_update_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_update_source" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_update_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_update_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.UpdateSourceRequest.pb( migrationcenter.UpdateSourceRequest() ) @@ -36609,6 +36842,7 @@ def test_update_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_source( request, @@ -36620,6 +36854,7 @@ def test_update_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_source_rest_bad_request( @@ -36700,10 +36935,13 @@ def test_delete_source_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_source" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_delete_source_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_source" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeleteSourceRequest.pb( migrationcenter.DeleteSourceRequest() ) @@ -36727,6 +36965,7 @@ def test_delete_source_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_source( request, @@ -36738,6 +36977,7 @@ def test_delete_source_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_preference_sets_rest_bad_request( @@ -36824,10 +37064,14 @@ def test_list_preference_sets_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_preference_sets" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_list_preference_sets_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_preference_sets" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListPreferenceSetsRequest.pb( migrationcenter.ListPreferenceSetsRequest() ) @@ -36853,6 +37097,10 @@ def test_list_preference_sets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListPreferenceSetsResponse() + post_with_metadata.return_value = ( + migrationcenter.ListPreferenceSetsResponse(), + metadata, + ) client.list_preference_sets( request, @@ -36864,6 +37112,7 @@ def test_list_preference_sets_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_preference_set_rest_bad_request( @@ -36952,10 +37201,14 @@ def test_get_preference_set_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_preference_set" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_get_preference_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_preference_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetPreferenceSetRequest.pb( migrationcenter.GetPreferenceSetRequest() ) @@ -36981,6 +37234,7 @@ def test_get_preference_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.PreferenceSet() + post_with_metadata.return_value = migrationcenter.PreferenceSet(), metadata client.get_preference_set( request, @@ -36992,6 +37246,7 @@ def test_get_preference_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_preference_set_rest_bad_request( @@ -37177,10 +37432,14 @@ def test_create_preference_set_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_preference_set" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_create_preference_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_preference_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreatePreferenceSetRequest.pb( migrationcenter.CreatePreferenceSetRequest() ) @@ -37204,6 +37463,7 @@ def test_create_preference_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_preference_set( request, @@ -37215,6 +37475,7 @@ def test_create_preference_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_preference_set_rest_bad_request( @@ -37408,10 +37669,14 @@ def test_update_preference_set_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_update_preference_set" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_update_preference_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_update_preference_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.UpdatePreferenceSetRequest.pb( migrationcenter.UpdatePreferenceSetRequest() ) @@ -37435,6 +37700,7 @@ def test_update_preference_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_preference_set( request, @@ -37446,6 +37712,7 @@ def test_update_preference_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_preference_set_rest_bad_request( @@ -37526,10 +37793,14 @@ def test_delete_preference_set_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_preference_set" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_delete_preference_set_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_preference_set" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeletePreferenceSetRequest.pb( migrationcenter.DeletePreferenceSetRequest() ) @@ -37553,6 +37824,7 @@ def test_delete_preference_set_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_preference_set( request, @@ -37564,6 +37836,7 @@ def test_delete_preference_set_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_settings_rest_bad_request(request_type=migrationcenter.GetSettingsRequest): @@ -37648,10 +37921,13 @@ def test_get_settings_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_settings" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_settings_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetSettingsRequest.pb( migrationcenter.GetSettingsRequest() ) @@ -37675,6 +37951,7 @@ def test_get_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.Settings() + post_with_metadata.return_value = migrationcenter.Settings(), metadata client.get_settings( request, @@ -37686,6 +37963,7 @@ def test_get_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_settings_rest_bad_request( @@ -37837,10 +38115,13 @@ def test_update_settings_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_update_settings" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_update_settings_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_update_settings" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.UpdateSettingsRequest.pb( migrationcenter.UpdateSettingsRequest() ) @@ -37864,6 +38145,7 @@ def test_update_settings_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_settings( request, @@ -37875,6 +38157,7 @@ def test_update_settings_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_report_config_rest_bad_request( @@ -38032,10 +38315,14 @@ def test_create_report_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_report_config" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_create_report_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_report_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreateReportConfigRequest.pb( migrationcenter.CreateReportConfigRequest() ) @@ -38059,6 +38346,7 @@ def test_create_report_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_report_config( request, @@ -38070,6 +38358,7 @@ def test_create_report_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_report_config_rest_bad_request( @@ -38158,10 +38447,14 @@ def test_get_report_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_report_config" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_get_report_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_report_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetReportConfigRequest.pb( migrationcenter.GetReportConfigRequest() ) @@ -38187,6 +38480,7 @@ def test_get_report_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ReportConfig() + post_with_metadata.return_value = migrationcenter.ReportConfig(), metadata client.get_report_config( request, @@ -38198,6 +38492,7 @@ def test_get_report_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_report_configs_rest_bad_request( @@ -38284,10 +38579,14 @@ def test_list_report_configs_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_report_configs" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_list_report_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_report_configs" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListReportConfigsRequest.pb( migrationcenter.ListReportConfigsRequest() ) @@ -38313,6 +38612,10 @@ def test_list_report_configs_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListReportConfigsResponse() + post_with_metadata.return_value = ( + migrationcenter.ListReportConfigsResponse(), + metadata, + ) client.list_report_configs( request, @@ -38324,6 +38627,7 @@ def test_list_report_configs_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_report_config_rest_bad_request( @@ -38404,10 +38708,14 @@ def test_delete_report_config_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_report_config" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, + "post_delete_report_config_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_report_config" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeleteReportConfigRequest.pb( migrationcenter.DeleteReportConfigRequest() ) @@ -38431,6 +38739,7 @@ def test_delete_report_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_report_config( request, @@ -38442,6 +38751,7 @@ def test_delete_report_config_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_report_rest_bad_request( @@ -38719,10 +39029,13 @@ def test_create_report_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_create_report" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_create_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_create_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.CreateReportRequest.pb( migrationcenter.CreateReportRequest() ) @@ -38746,6 +39059,7 @@ def test_create_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_report( request, @@ -38757,6 +39071,7 @@ def test_create_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_report_rest_bad_request(request_type=migrationcenter.GetReportRequest): @@ -38851,10 +39166,13 @@ def test_get_report_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_get_report" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_get_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_get_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.GetReportRequest.pb( migrationcenter.GetReportRequest() ) @@ -38878,6 +39196,7 @@ def test_get_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.Report() + post_with_metadata.return_value = migrationcenter.Report(), metadata client.get_report( request, @@ -38889,6 +39208,7 @@ def test_get_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_reports_rest_bad_request(request_type=migrationcenter.ListReportsRequest): @@ -38977,10 +39297,13 @@ def test_list_reports_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.MigrationCenterRestInterceptor, "post_list_reports" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_list_reports_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_list_reports" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.ListReportsRequest.pb( migrationcenter.ListReportsRequest() ) @@ -39006,6 +39329,10 @@ def test_list_reports_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = migrationcenter.ListReportsResponse() + post_with_metadata.return_value = ( + migrationcenter.ListReportsResponse(), + metadata, + ) client.list_reports( request, @@ -39017,6 +39344,7 @@ def test_list_reports_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_report_rest_bad_request( @@ -39101,10 +39429,13 @@ def test_delete_report_rest_interceptors(null_interceptor): ), mock.patch.object( transports.MigrationCenterRestInterceptor, "post_delete_report" ) as post, mock.patch.object( + transports.MigrationCenterRestInterceptor, "post_delete_report_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.MigrationCenterRestInterceptor, "pre_delete_report" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = migrationcenter.DeleteReportRequest.pb( migrationcenter.DeleteReportRequest() ) @@ -39128,6 +39459,7 @@ def test_delete_report_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_report( request, @@ -39139,6 +39471,7 @@ def test_delete_report_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/.kokoro/release-single.sh b/packages/google-cloud-modelarmor/.OwlBot.yaml old mode 100755 new mode 100644 similarity index 54% rename from .kokoro/release-single.sh rename to packages/google-cloud-modelarmor/.OwlBot.yaml index a9969e47aaa6..b6c2e1fa64ed --- a/.kokoro/release-single.sh +++ b/packages/google-cloud-modelarmor/.OwlBot.yaml @@ -1,4 +1,3 @@ -#!/bin/bash # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,15 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to non-zero if any command fails, -# or zero if all commands in the pipeline exit successfully. -set -eo pipefail - -pwd - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") - -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* +deep-copy-regex: + - source: /google/cloud/modelarmor/(v.*)/.*-py + dest: /owl-bot-staging/google-cloud-modelarmor/$1 +api-name: google-cloud-modelarmor diff --git a/packages/google-cloud-modelarmor/.coveragerc b/packages/google-cloud-modelarmor/.coveragerc new file mode 100644 index 000000000000..63fc14a43357 --- /dev/null +++ b/packages/google-cloud-modelarmor/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/modelarmor/__init__.py + google/cloud/modelarmor/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-modelarmor/.flake8 b/packages/google-cloud-modelarmor/.flake8 new file mode 100644 index 000000000000..32986c79287a --- /dev/null +++ b/packages/google-cloud-modelarmor/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E231, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-modelarmor/.gitignore b/packages/google-cloud-modelarmor/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/google-cloud-modelarmor/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/google-cloud-modelarmor/.repo-metadata.json b/packages/google-cloud-modelarmor/.repo-metadata.json new file mode 100644 index 000000000000..a583d9c21cfa --- /dev/null +++ b/packages/google-cloud-modelarmor/.repo-metadata.json @@ -0,0 +1,17 @@ +{ + "name": "google-cloud-modelarmor", + "name_pretty": "Model Armor API", + "api_description": "Model Armor helps you protect against risks like prompt injection, harmful content, and data leakage in generative AI applications by letting you define policies that filter user prompts and model responses.", + "product_documentation": "https://cloud.google.com/security-command-center/docs/model-armor-overview", + "client_documentation": "https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest", + "issue_tracker": "https://issuetracker.google.com/issues/new?component=1514910&template=0", + "release_level": "preview", + "language": "python", + "library_type": "GAPIC_AUTO", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-modelarmor", + "api_id": "modelarmor.googleapis.com", + "default_version": "v1", + "codeowner_team": "", + "api_shortname": "modelarmor" +} diff --git a/packages/google-cloud-modelarmor/CHANGELOG.md b/packages/google-cloud-modelarmor/CHANGELOG.md new file mode 100644 index 000000000000..9f2d4df743e4 --- /dev/null +++ b/packages/google-cloud-modelarmor/CHANGELOG.md @@ -0,0 +1,18 @@ +# Changelog + +## [0.1.1](https://github.com/googleapis/google-cloud-python/compare/google-cloud-modelarmor-v0.1.0...google-cloud-modelarmor-v0.1.1) (2025-02-12) + + +### Features + +* Add REST Interceptors which support reading metadata ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) +* Add support for reading selective GAPIC generation methods from service YAML ([a0910dd](https://github.com/googleapis/google-cloud-python/commit/a0910dd51541d238bc5fcf10159066ddfd928579)) + +## 0.1.0 (2025-01-29) + + +### Features + +* add initial files for google.cloud.modelarmor.v1 ([#13435](https://github.com/googleapis/google-cloud-python/issues/13435)) ([659f13b](https://github.com/googleapis/google-cloud-python/commit/659f13b2b1acacb20869663696860f63dcbbdf2f)) + +## Changelog diff --git a/packages/google-cloud-modelarmor/CODE_OF_CONDUCT.md b/packages/google-cloud-modelarmor/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..039f43681204 --- /dev/null +++ b/packages/google-cloud-modelarmor/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-modelarmor/CONTRIBUTING.rst b/packages/google-cloud-modelarmor/CONTRIBUTING.rst new file mode 100644 index 000000000000..c0ae9f34b391 --- /dev/null +++ b/packages/google-cloud-modelarmor/CONTRIBUTING.rst @@ -0,0 +1,273 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``google-cloud-python`` `repo`_ on GitHub. + +- Fork and clone the ``google-cloud-python`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``google-cloud-python`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-google-cloud-python``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/google-cloud-python.git hack-on-google-cloud-python + $ cd hack-on-google-cloud-python + # Configure remotes such that you can pull changes from the googleapis/google-cloud-python + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/google-cloud-python + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.13 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``google-cloud-python``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.13 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-modelarmor + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/google-cloud-python/blob/main/packages/google-cloud-modelarmor/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-modelarmor/LICENSE b/packages/google-cloud-modelarmor/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-modelarmor/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-modelarmor/MANIFEST.in b/packages/google-cloud-modelarmor/MANIFEST.in new file mode 100644 index 000000000000..d6814cd60037 --- /dev/null +++ b/packages/google-cloud-modelarmor/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/google-cloud-modelarmor/README.rst b/packages/google-cloud-modelarmor/README.rst new file mode 100644 index 000000000000..ff77efa07e13 --- /dev/null +++ b/packages/google-cloud-modelarmor/README.rst @@ -0,0 +1,108 @@ +Python Client for Model Armor API +================================= + +|preview| |pypi| |versions| + +`Model Armor API`_: Model Armor helps you protect against risks like prompt injection, harmful content, and data leakage in generative AI applications by letting you define policies that filter user prompts and model responses. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |preview| image:: https://img.shields.io/badge/support-preview-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-modelarmor.svg + :target: https://pypi.org/project/google-cloud-modelarmor/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-modelarmor.svg + :target: https://pypi.org/project/google-cloud-modelarmor/ +.. _Model Armor API: https://cloud.google.com/security-command-center/docs/model-armor-overview +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest/summary_overview +.. _Product Documentation: https://cloud.google.com/security-command-center/docs/model-armor-overview + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Model Armor API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Model Armor API.: https://cloud.google.com/security-command-center/docs/model-armor-overview +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. + +With `venv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`venv`: https://docs.python.org/3/library/venv.html + + +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-modelarmor/samples + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of +Python. + +Python >= 3.7 + +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.6 + +If you are using an `end-of-life`_ +version of Python, we recommend that you update as soon as possible to an actively supported version. + +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + pip install google-cloud-modelarmor + + +Windows +^^^^^^^ + +.. code-block:: console + + py -m venv + .\\Scripts\activate + pip install google-cloud-modelarmor + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Model Armor API + to see other available methods on the client. +- Read the `Model Armor API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Model Armor API Product documentation: https://cloud.google.com/security-command-center/docs/model-armor-overview +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/packages/google-cloud-modelarmor/docs/CHANGELOG.md b/packages/google-cloud-modelarmor/docs/CHANGELOG.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/CHANGELOG.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-modelarmor/docs/README.rst b/packages/google-cloud-modelarmor/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-modelarmor/docs/_static/custom.css b/packages/google-cloud-modelarmor/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-modelarmor/docs/_templates/layout.html b/packages/google-cloud-modelarmor/docs/_templates/layout.html new file mode 100644 index 000000000000..6316a537f72b --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-modelarmor/docs/conf.py b/packages/google-cloud-modelarmor/docs/conf.py new file mode 100644 index 000000000000..3fad77c2784f --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/conf.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# google-cloud-modelarmor documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import shlex +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "google-cloud-modelarmor" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for google-cloud-modelarmor", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-modelarmor-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-modelarmor.tex", + "google-cloud-modelarmor Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-modelarmor", + "google-cloud-modelarmor Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-modelarmor", + "google-cloud-modelarmor Documentation", + author, + "google-cloud-modelarmor", + "google-cloud-modelarmor Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-modelarmor/docs/index.rst b/packages/google-cloud-modelarmor/docs/index.rst new file mode 100644 index 000000000000..4e93f045588a --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/index.rst @@ -0,0 +1,28 @@ +.. include:: README.rst + +.. include:: multiprocessing.rst + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + modelarmor_v1/services_ + modelarmor_v1/types_ + + +Changelog +--------- + +For a list of all ``google-cloud-modelarmor`` releases: + +.. toctree:: + :maxdepth: 2 + + CHANGELOG + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-modelarmor/docs/modelarmor_v1/model_armor.rst b/packages/google-cloud-modelarmor/docs/modelarmor_v1/model_armor.rst new file mode 100644 index 000000000000..20a6f66d12fa --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/modelarmor_v1/model_armor.rst @@ -0,0 +1,10 @@ +ModelArmor +---------------------------- + +.. automodule:: google.cloud.modelarmor_v1.services.model_armor + :members: + :inherited-members: + +.. automodule:: google.cloud.modelarmor_v1.services.model_armor.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-modelarmor/docs/modelarmor_v1/services_.rst b/packages/google-cloud-modelarmor/docs/modelarmor_v1/services_.rst new file mode 100644 index 000000000000..17642b96fd46 --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/modelarmor_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Modelarmor v1 API +=========================================== +.. toctree:: + :maxdepth: 2 + + model_armor diff --git a/packages/google-cloud-modelarmor/docs/modelarmor_v1/types_.rst b/packages/google-cloud-modelarmor/docs/modelarmor_v1/types_.rst new file mode 100644 index 000000000000..5405311617c3 --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/modelarmor_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Modelarmor v1 API +======================================== + +.. automodule:: google.cloud.modelarmor_v1.types + :members: + :show-inheritance: diff --git a/packages/google-cloud-modelarmor/docs/multiprocessing.rst b/packages/google-cloud-modelarmor/docs/multiprocessing.rst new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-modelarmor/docs/summary_overview.md b/packages/google-cloud-modelarmor/docs/summary_overview.md new file mode 100644 index 000000000000..6da636b0a43b --- /dev/null +++ b/packages/google-cloud-modelarmor/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Model Armor API API + +Overview of the APIs available for Model Armor API API. + +## All entries + +Classes, methods and properties & attributes for +Model Armor API API. + +[classes](https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/google-cloud-modelarmor/latest/summary_property.html) diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor/__init__.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor/__init__.py new file mode 100644 index 000000000000..c0e9a35834e6 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor/__init__.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.modelarmor import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.modelarmor_v1.services.model_armor.async_client import ( + ModelArmorAsyncClient, +) +from google.cloud.modelarmor_v1.services.model_armor.client import ModelArmorClient +from google.cloud.modelarmor_v1.types.service import ( + ByteDataItem, + CreateTemplateRequest, + CsamFilterResult, + DataItem, + DeleteTemplateRequest, + DetectionConfidenceLevel, + FilterConfig, + FilterExecutionState, + FilterMatchState, + FilterResult, + FloorSetting, + GetFloorSettingRequest, + GetTemplateRequest, + InvocationResult, + ListTemplatesRequest, + ListTemplatesResponse, + MaliciousUriFilterResult, + MaliciousUriFilterSettings, + MessageItem, + PiAndJailbreakFilterResult, + PiAndJailbreakFilterSettings, + RaiFilterResult, + RaiFilterSettings, + RaiFilterType, + RangeInfo, + SanitizationResult, + SanitizeModelResponseRequest, + SanitizeModelResponseResponse, + SanitizeUserPromptRequest, + SanitizeUserPromptResponse, + SdpAdvancedConfig, + SdpBasicConfig, + SdpDeidentifyResult, + SdpFilterResult, + SdpFilterSettings, + SdpFinding, + SdpFindingLikelihood, + SdpInspectResult, + Template, + UpdateFloorSettingRequest, + UpdateTemplateRequest, + VirusDetail, + VirusScanFilterResult, +) + +__all__ = ( + "ModelArmorClient", + "ModelArmorAsyncClient", + "ByteDataItem", + "CreateTemplateRequest", + "CsamFilterResult", + "DataItem", + "DeleteTemplateRequest", + "FilterConfig", + "FilterResult", + "FloorSetting", + "GetFloorSettingRequest", + "GetTemplateRequest", + "ListTemplatesRequest", + "ListTemplatesResponse", + "MaliciousUriFilterResult", + "MaliciousUriFilterSettings", + "MessageItem", + "PiAndJailbreakFilterResult", + "PiAndJailbreakFilterSettings", + "RaiFilterResult", + "RaiFilterSettings", + "RangeInfo", + "SanitizationResult", + "SanitizeModelResponseRequest", + "SanitizeModelResponseResponse", + "SanitizeUserPromptRequest", + "SanitizeUserPromptResponse", + "SdpAdvancedConfig", + "SdpBasicConfig", + "SdpDeidentifyResult", + "SdpFilterResult", + "SdpFilterSettings", + "SdpFinding", + "SdpInspectResult", + "Template", + "UpdateFloorSettingRequest", + "UpdateTemplateRequest", + "VirusDetail", + "VirusScanFilterResult", + "DetectionConfidenceLevel", + "FilterExecutionState", + "FilterMatchState", + "InvocationResult", + "RaiFilterType", + "SdpFindingLikelihood", +) diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor/gapic_version.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor/gapic_version.py new file mode 100644 index 000000000000..0c7cc68730c4 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor/py.typed b/packages/google-cloud-modelarmor/google/cloud/modelarmor/py.typed new file mode 100644 index 000000000000..82eb7144ecd6 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-modelarmor package uses inline types. diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/__init__.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/__init__.py new file mode 100644 index 000000000000..3dc502eaccf7 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.modelarmor_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.model_armor import ModelArmorAsyncClient, ModelArmorClient +from .types.service import ( + ByteDataItem, + CreateTemplateRequest, + CsamFilterResult, + DataItem, + DeleteTemplateRequest, + DetectionConfidenceLevel, + FilterConfig, + FilterExecutionState, + FilterMatchState, + FilterResult, + FloorSetting, + GetFloorSettingRequest, + GetTemplateRequest, + InvocationResult, + ListTemplatesRequest, + ListTemplatesResponse, + MaliciousUriFilterResult, + MaliciousUriFilterSettings, + MessageItem, + PiAndJailbreakFilterResult, + PiAndJailbreakFilterSettings, + RaiFilterResult, + RaiFilterSettings, + RaiFilterType, + RangeInfo, + SanitizationResult, + SanitizeModelResponseRequest, + SanitizeModelResponseResponse, + SanitizeUserPromptRequest, + SanitizeUserPromptResponse, + SdpAdvancedConfig, + SdpBasicConfig, + SdpDeidentifyResult, + SdpFilterResult, + SdpFilterSettings, + SdpFinding, + SdpFindingLikelihood, + SdpInspectResult, + Template, + UpdateFloorSettingRequest, + UpdateTemplateRequest, + VirusDetail, + VirusScanFilterResult, +) + +__all__ = ( + "ModelArmorAsyncClient", + "ByteDataItem", + "CreateTemplateRequest", + "CsamFilterResult", + "DataItem", + "DeleteTemplateRequest", + "DetectionConfidenceLevel", + "FilterConfig", + "FilterExecutionState", + "FilterMatchState", + "FilterResult", + "FloorSetting", + "GetFloorSettingRequest", + "GetTemplateRequest", + "InvocationResult", + "ListTemplatesRequest", + "ListTemplatesResponse", + "MaliciousUriFilterResult", + "MaliciousUriFilterSettings", + "MessageItem", + "ModelArmorClient", + "PiAndJailbreakFilterResult", + "PiAndJailbreakFilterSettings", + "RaiFilterResult", + "RaiFilterSettings", + "RaiFilterType", + "RangeInfo", + "SanitizationResult", + "SanitizeModelResponseRequest", + "SanitizeModelResponseResponse", + "SanitizeUserPromptRequest", + "SanitizeUserPromptResponse", + "SdpAdvancedConfig", + "SdpBasicConfig", + "SdpDeidentifyResult", + "SdpFilterResult", + "SdpFilterSettings", + "SdpFinding", + "SdpFindingLikelihood", + "SdpInspectResult", + "Template", + "UpdateFloorSettingRequest", + "UpdateTemplateRequest", + "VirusDetail", + "VirusScanFilterResult", +) diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/gapic_metadata.json b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/gapic_metadata.json new file mode 100644 index 000000000000..1597fcac7630 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/gapic_metadata.json @@ -0,0 +1,163 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.modelarmor_v1", + "protoPackage": "google.cloud.modelarmor.v1", + "schema": "1.0", + "services": { + "ModelArmor": { + "clients": { + "grpc": { + "libraryClient": "ModelArmorClient", + "rpcs": { + "CreateTemplate": { + "methods": [ + "create_template" + ] + }, + "DeleteTemplate": { + "methods": [ + "delete_template" + ] + }, + "GetFloorSetting": { + "methods": [ + "get_floor_setting" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "ListTemplates": { + "methods": [ + "list_templates" + ] + }, + "SanitizeModelResponse": { + "methods": [ + "sanitize_model_response" + ] + }, + "SanitizeUserPrompt": { + "methods": [ + "sanitize_user_prompt" + ] + }, + "UpdateFloorSetting": { + "methods": [ + "update_floor_setting" + ] + }, + "UpdateTemplate": { + "methods": [ + "update_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ModelArmorAsyncClient", + "rpcs": { + "CreateTemplate": { + "methods": [ + "create_template" + ] + }, + "DeleteTemplate": { + "methods": [ + "delete_template" + ] + }, + "GetFloorSetting": { + "methods": [ + "get_floor_setting" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "ListTemplates": { + "methods": [ + "list_templates" + ] + }, + "SanitizeModelResponse": { + "methods": [ + "sanitize_model_response" + ] + }, + "SanitizeUserPrompt": { + "methods": [ + "sanitize_user_prompt" + ] + }, + "UpdateFloorSetting": { + "methods": [ + "update_floor_setting" + ] + }, + "UpdateTemplate": { + "methods": [ + "update_template" + ] + } + } + }, + "rest": { + "libraryClient": "ModelArmorClient", + "rpcs": { + "CreateTemplate": { + "methods": [ + "create_template" + ] + }, + "DeleteTemplate": { + "methods": [ + "delete_template" + ] + }, + "GetFloorSetting": { + "methods": [ + "get_floor_setting" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "ListTemplates": { + "methods": [ + "list_templates" + ] + }, + "SanitizeModelResponse": { + "methods": [ + "sanitize_model_response" + ] + }, + "SanitizeUserPrompt": { + "methods": [ + "sanitize_user_prompt" + ] + }, + "UpdateFloorSetting": { + "methods": [ + "update_floor_setting" + ] + }, + "UpdateTemplate": { + "methods": [ + "update_template" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/gapic_version.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/gapic_version.py new file mode 100644 index 000000000000..0c7cc68730c4 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.1" # {x-release-please-version} diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/py.typed b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/py.typed new file mode 100644 index 000000000000..82eb7144ecd6 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-modelarmor package uses inline types. diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/__init__.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/__init__.py new file mode 100644 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/__init__.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/__init__.py new file mode 100644 index 000000000000..d8a2b249a208 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import ModelArmorAsyncClient +from .client import ModelArmorClient + +__all__ = ( + "ModelArmorClient", + "ModelArmorAsyncClient", +) diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/async_client.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/async_client.py new file mode 100644 index 000000000000..96b452b5dca4 --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/async_client.py @@ -0,0 +1,1377 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.modelarmor_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.modelarmor_v1.services.model_armor import pagers +from google.cloud.modelarmor_v1.types import service + +from .client import ModelArmorClient +from .transports.base import DEFAULT_CLIENT_INFO, ModelArmorTransport +from .transports.grpc_asyncio import ModelArmorGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class ModelArmorAsyncClient: + """Service describing handlers for resources""" + + _client: ModelArmorClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = ModelArmorClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ModelArmorClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ModelArmorClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ModelArmorClient._DEFAULT_UNIVERSE + + floor_setting_path = staticmethod(ModelArmorClient.floor_setting_path) + parse_floor_setting_path = staticmethod(ModelArmorClient.parse_floor_setting_path) + template_path = staticmethod(ModelArmorClient.template_path) + parse_template_path = staticmethod(ModelArmorClient.parse_template_path) + common_billing_account_path = staticmethod( + ModelArmorClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ModelArmorClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ModelArmorClient.common_folder_path) + parse_common_folder_path = staticmethod(ModelArmorClient.parse_common_folder_path) + common_organization_path = staticmethod(ModelArmorClient.common_organization_path) + parse_common_organization_path = staticmethod( + ModelArmorClient.parse_common_organization_path + ) + common_project_path = staticmethod(ModelArmorClient.common_project_path) + parse_common_project_path = staticmethod(ModelArmorClient.parse_common_project_path) + common_location_path = staticmethod(ModelArmorClient.common_location_path) + parse_common_location_path = staticmethod( + ModelArmorClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelArmorAsyncClient: The constructed client. + """ + return ModelArmorClient.from_service_account_info.__func__(ModelArmorAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelArmorAsyncClient: The constructed client. + """ + return ModelArmorClient.from_service_account_file.__func__(ModelArmorAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ModelArmorClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ModelArmorTransport: + """Returns the transport used by the client instance. + + Returns: + ModelArmorTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = ModelArmorClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, ModelArmorTransport, Callable[..., ModelArmorTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the model armor async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ModelArmorTransport,Callable[..., ModelArmorTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ModelArmorTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ModelArmorClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.modelarmor_v1.ModelArmorAsyncClient`.", + extra={ + "serviceName": "google.cloud.modelarmor.v1.ModelArmor", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.modelarmor.v1.ModelArmor", + "credentialsType": None, + }, + ) + + async def list_templates( + self, + request: Optional[Union[service.ListTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTemplatesAsyncPager: + r"""Lists Templates in a given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_list_templates(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.ListTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.ListTemplatesRequest, dict]]): + The request object. Message for requesting list of + Templates + parent (:class:`str`): + Required. Parent value for + ListTemplatesRequest + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.services.model_armor.pagers.ListTemplatesAsyncPager: + Message for response to listing + Templates + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.ListTemplatesRequest): + request = service.ListTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_templates + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_template( + self, + request: Optional[Union[service.GetTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.Template: + r"""Gets details of a single Template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_get_template(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.GetTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.GetTemplateRequest, dict]]): + The request object. Message for getting a Template + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.Template: + Message describing Template resource + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetTemplateRequest): + request = service.GetTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_template( + self, + request: Optional[Union[service.CreateTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + template: Optional[service.Template] = None, + template_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.Template: + r"""Creates a new Template in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_create_template(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.CreateTemplateRequest( + parent="parent_value", + template_id="template_id_value", + ) + + # Make the request + response = await client.create_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.CreateTemplateRequest, dict]]): + The request object. Message for creating a Template + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + template (:class:`google.cloud.modelarmor_v1.types.Template`): + Required. The resource being created + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + template_id (:class:`str`): + Required. Id of the requesting object If auto-generating + Id server-side, remove this field and template_id from + the method_signature of Create RPC + + This corresponds to the ``template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.Template: + Message describing Template resource + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, template, template_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateTemplateRequest): + request = service.CreateTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if template is not None: + request.template = template + if template_id is not None: + request.template_id = template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_template( + self, + request: Optional[Union[service.UpdateTemplateRequest, dict]] = None, + *, + template: Optional[service.Template] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.Template: + r"""Updates the parameters of a single Template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_update_template(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.UpdateTemplateRequest( + ) + + # Make the request + response = await client.update_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.UpdateTemplateRequest, dict]]): + The request object. Message for updating a Template + template (:class:`google.cloud.modelarmor_v1.types.Template`): + Required. The resource being updated + This corresponds to the ``template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to specify the fields to be + overwritten in the Template resource by the update. The + fields specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.Template: + Message describing Template resource + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateTemplateRequest): + request = service.UpdateTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if template is not None: + request.template = template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("template.name", request.template.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_template( + self, + request: Optional[Union[service.DeleteTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single Template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_delete_template(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.DeleteTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_template(request=request) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.DeleteTemplateRequest, dict]]): + The request object. Message for deleting a Template + name (:class:`str`): + Required. Name of the resource + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.DeleteTemplateRequest): + request = service.DeleteTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_floor_setting( + self, + request: Optional[Union[service.GetFloorSettingRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.FloorSetting: + r"""Gets details of a single floor setting of a project + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_get_floor_setting(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.GetFloorSettingRequest( + name="name_value", + ) + + # Make the request + response = await client.get_floor_setting(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.GetFloorSettingRequest, dict]]): + The request object. Message for getting a Floor Setting + name (:class:`str`): + Required. The name of the floor + setting to get, example + projects/123/floorsetting. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.FloorSetting: + Message describing FloorSetting + resource + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.GetFloorSettingRequest): + request = service.GetFloorSettingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_floor_setting + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_floor_setting( + self, + request: Optional[Union[service.UpdateFloorSettingRequest, dict]] = None, + *, + floor_setting: Optional[service.FloorSetting] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.FloorSetting: + r"""Updates the parameters of a single floor setting of a + project + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_update_floor_setting(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + request = modelarmor_v1.UpdateFloorSettingRequest( + ) + + # Make the request + response = await client.update_floor_setting(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.UpdateFloorSettingRequest, dict]]): + The request object. Message for Updating a Floor Setting + floor_setting (:class:`google.cloud.modelarmor_v1.types.FloorSetting`): + Required. The floor setting being + updated. + + This corresponds to the ``floor_setting`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask is used to specify the fields to be + overwritten in the FloorSetting resource by the update. + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.FloorSetting: + Message describing FloorSetting + resource + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([floor_setting, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.UpdateFloorSettingRequest): + request = service.UpdateFloorSettingRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if floor_setting is not None: + request.floor_setting = floor_setting + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_floor_setting + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("floor_setting.name", request.floor_setting.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def sanitize_user_prompt( + self, + request: Optional[Union[service.SanitizeUserPromptRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.SanitizeUserPromptResponse: + r"""Sanitizes User Prompt. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_sanitize_user_prompt(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + user_prompt_data = modelarmor_v1.DataItem() + user_prompt_data.text = "text_value" + + request = modelarmor_v1.SanitizeUserPromptRequest( + name="name_value", + user_prompt_data=user_prompt_data, + ) + + # Make the request + response = await client.sanitize_user_prompt(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.SanitizeUserPromptRequest, dict]]): + The request object. Sanitize User Prompt request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.SanitizeUserPromptResponse: + Sanitized User Prompt Response. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SanitizeUserPromptRequest): + request = service.SanitizeUserPromptRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.sanitize_user_prompt + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def sanitize_model_response( + self, + request: Optional[Union[service.SanitizeModelResponseRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> service.SanitizeModelResponseResponse: + r"""Sanitizes Model Response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1 + + async def sample_sanitize_model_response(): + # Create a client + client = modelarmor_v1.ModelArmorAsyncClient() + + # Initialize request argument(s) + model_response_data = modelarmor_v1.DataItem() + model_response_data.text = "text_value" + + request = modelarmor_v1.SanitizeModelResponseRequest( + name="name_value", + model_response_data=model_response_data, + ) + + # Make the request + response = await client.sanitize_model_response(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.modelarmor_v1.types.SanitizeModelResponseRequest, dict]]): + The request object. Sanitize Model Response request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.modelarmor_v1.types.SanitizeModelResponseResponse: + Sanitized Model Response Response. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.SanitizeModelResponseRequest): + request = service.SanitizeModelResponseRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.sanitize_model_response + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "ModelArmorAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("ModelArmorAsyncClient",) diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/client.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/client.py new file mode 100644 index 000000000000..6c10190bddbb --- /dev/null +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1/services/model_armor/client.py @@ -0,0 +1,1809 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.modelarmor_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.modelarmor_v1.services.model_armor import pagers +from google.cloud.modelarmor_v1.types import service + +from .transports.base import DEFAULT_CLIENT_INFO, ModelArmorTransport +from .transports.grpc import ModelArmorGrpcTransport +from .transports.grpc_asyncio import ModelArmorGrpcAsyncIOTransport +from .transports.rest import ModelArmorRestTransport + + +class ModelArmorClientMeta(type): + """Metaclass for the ModelArmor client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ModelArmorTransport]] + _transport_registry["grpc"] = ModelArmorGrpcTransport + _transport_registry["grpc_asyncio"] = ModelArmorGrpcAsyncIOTransport + _transport_registry["rest"] = ModelArmorRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ModelArmorTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ModelArmorClient(metaclass=ModelArmorClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "modelarmor.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "modelarmor.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelArmorClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ModelArmorClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ModelArmorTransport: + """Returns the transport used by the client instance. + + Returns: + ModelArmorTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def floor_setting_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified floor_setting string.""" + return "projects/{project}/locations/{location}/floorSetting".format( + project=project, + location=location, + ) + + @staticmethod + def parse_floor_setting_path(path: str) -> Dict[str, str]: + """Parses a floor_setting path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/floorSetting$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def template_path( + project: str, + location: str, + template: str, + ) -> str: + """Returns a fully-qualified template string.""" + return "projects/{project}/locations/{location}/templates/{template}".format( + project=project, + location=location, + template=template, + ) + + @staticmethod + def parse_template_path(path: str) -> Dict[str, str]: + """Parses a template path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/templates/(?P